diff --git a/p4a/pythonforandroid/__init__.py b/p4a/pythonforandroid/__init__.py index 27f4493..f39a847 100644 --- a/p4a/pythonforandroid/__init__.py +++ b/p4a/pythonforandroid/__init__.py @@ -1,2 +1 @@ - -__version__ = '0.5' +__version__ = '2022.09.04' diff --git a/p4a/pythonforandroid/androidndk.py b/p4a/pythonforandroid/androidndk.py new file mode 100644 index 0000000..83cb355 --- /dev/null +++ b/p4a/pythonforandroid/androidndk.py @@ -0,0 +1,83 @@ +import sys +import os + + +class AndroidNDK: + """ + This class is used to get the current NDK information. + """ + + ndk_dir = "" + + def __init__(self, ndk_dir): + self.ndk_dir = ndk_dir + + @property + def host_tag(self): + """ + Returns the host tag for the current system. + Note: The host tag is ``darwin-x86_64`` even on Apple Silicon macs. + """ + return f"{sys.platform}-x86_64" + + @property + def llvm_prebuilt_dir(self): + return os.path.join( + self.ndk_dir, "toolchains", "llvm", "prebuilt", self.host_tag + ) + + @property + def llvm_bin_dir(self): + return os.path.join(self.llvm_prebuilt_dir, "bin") + + @property + def clang(self): + return os.path.join(self.llvm_bin_dir, "clang") + + @property + def clang_cxx(self): + return os.path.join(self.llvm_bin_dir, "clang++") + + @property + def llvm_binutils_prefix(self): + return os.path.join(self.llvm_bin_dir, "llvm-") + + @property + def llvm_ar(self): + return f"{self.llvm_binutils_prefix}ar" + + @property + def llvm_ranlib(self): + return f"{self.llvm_binutils_prefix}ranlib" + + @property + def llvm_objcopy(self): + return f"{self.llvm_binutils_prefix}objcopy" + + @property + def llvm_objdump(self): + return f"{self.llvm_binutils_prefix}objdump" + + @property + def llvm_readelf(self): + return f"{self.llvm_binutils_prefix}readelf" + + @property + def llvm_strip(self): + return f"{self.llvm_binutils_prefix}strip" + + @property + def sysroot(self): + return os.path.join(self.llvm_prebuilt_dir, "sysroot") + + @property + def sysroot_include_dir(self): + return os.path.join(self.sysroot, "usr", "include") + + @property + def sysroot_lib_dir(self): + return os.path.join(self.sysroot, "usr", "lib") + + @property + def libcxx_include_dir(self): + return os.path.join(self.sysroot_include_dir, "c++", "v1") diff --git a/p4a/pythonforandroid/archs.py b/p4a/pythonforandroid/archs.py index 09ebba4..b960ca6 100644 --- a/p4a/pythonforandroid/archs.py +++ b/p4a/pythonforandroid/archs.py @@ -1,22 +1,46 @@ from distutils.spawn import find_executable from os import environ -from os.path import (exists, join, dirname, split) -from glob import glob +from os.path import join +from multiprocessing import cpu_count from pythonforandroid.recipe import Recipe from pythonforandroid.util import BuildInterruptingException, build_platform -class Arch(object): - - toolchain_prefix = None - '''The prefix for the toolchain dir in the NDK.''' +class Arch: command_prefix = None '''The prefix for NDK commands such as gcc.''' + arch = "" + '''Name of the arch such as: `armeabi-v7a`, `arm64-v8a`, `x86`...''' + + arch_cflags = [] + '''Specific arch `cflags`, expect to be overwrote in subclass if needed.''' + + common_cflags = [ + '-target {target}', + '-fomit-frame-pointer' + ] + + common_cppflags = [ + '-DANDROID', + '-I{ctx.ndk.sysroot_include_dir}', + '-I{python_includes}', + ] + + common_ldflags = ['-L{ctx_libs_dir}'] + + common_ldlibs = ['-lm'] + + common_ldshared = [ + '-pthread', + '-shared', + '-Wl,-O1', + '-Wl,-Bsymbolic-functions', + ] + def __init__(self, ctx): - super(Arch, self).__init__() self.ctx = ctx # Allows injecting additional linker paths used by any recipe. @@ -28,6 +52,14 @@ class Arch(object): def __str__(self): return self.arch + @property + def ndk_lib_dir(self): + return join(self.ctx.ndk.sysroot_lib_dir, self.command_prefix) + + @property + def ndk_lib_dir_versioned(self): + return join(self.ndk_lib_dir, str(self.ctx.ndk_api)) + @property def include_dirs(self): return [ @@ -38,216 +70,235 @@ class Arch(object): @property def target(self): - target_data = self.command_prefix.split('-') - return '-'.join( - [target_data[0], 'none', target_data[1], target_data[2]]) + # As of NDK r19, the toolchains installed by default with the + # NDK may be used in-place. The make_standalone_toolchain.py script + # is no longer needed for interfacing with arbitrary build systems. + # See: https://developer.android.com/ndk/guides/other_build_systems + return '{triplet}{ndk_api}'.format( + triplet=self.command_prefix, ndk_api=self.ctx.ndk_api + ) - def get_env(self, with_flags_in_cc=True, clang=False): + @property + def clang_exe(self): + """Full path of the clang compiler depending on the android's ndk + version used.""" + return self.get_clang_exe() + + @property + def clang_exe_cxx(self): + """Full path of the clang++ compiler depending on the android's ndk + version used.""" + return self.get_clang_exe(plus_plus=True) + + def get_clang_exe(self, with_target=False, plus_plus=False): + """Returns the full path of the clang/clang++ compiler, supports two + kwargs: + + - `with_target`: prepend `target` to clang + - `plus_plus`: will return the clang++ compiler (defaults to `False`) + """ + compiler = 'clang' + if with_target: + compiler = '{target}-{compiler}'.format( + target=self.target, compiler=compiler + ) + if plus_plus: + compiler += '++' + return join(self.ctx.ndk.llvm_bin_dir, compiler) + + def get_env(self, with_flags_in_cc=True): env = {} - cflags = [ - '-DANDROID', - '-fomit-frame-pointer', - '-D__ANDROID_API__={}'.format(self.ctx.ndk_api)] - if not clang: - cflags.append('-mandroid') - else: - cflags.append('-target ' + self.target) - toolchain = '{android_host}-{toolchain_version}'.format( - android_host=self.ctx.toolchain_prefix, - toolchain_version=self.ctx.toolchain_version) - toolchain = join(self.ctx.ndk_dir, 'toolchains', toolchain, - 'prebuilt', build_platform) - cflags.append('-gcc-toolchain {}'.format(toolchain)) + # HOME: User's home directory + # + # Many tools including p4a store outputs in the user's home + # directory. This is found from the HOME environment variable + # and falls back to the system account database. Setting HOME + # can be used to globally divert these tools to use a different + # path. Furthermore, in containerized environments the user may + # not exist in the account database, so if HOME isn't set than + # these tools will fail. + if 'HOME' in environ: + env['HOME'] = environ['HOME'] - env['CFLAGS'] = ' '.join(cflags) + # CFLAGS/CXXFLAGS: the processor flags + env['CFLAGS'] = ' '.join(self.common_cflags).format(target=self.target) + if self.arch_cflags: + # each architecture may have has his own CFLAGS + env['CFLAGS'] += ' ' + ' '.join(self.arch_cflags) + env['CXXFLAGS'] = env['CFLAGS'] - # Link the extra global link paths first before anything else + # CPPFLAGS (for macros and includes) + env['CPPFLAGS'] = ' '.join(self.common_cppflags).format( + ctx=self.ctx, + command_prefix=self.command_prefix, + python_includes=join( + self.ctx.get_python_install_dir(self.arch), + 'include/python{}'.format(self.ctx.python_recipe.version[0:3]), + ), + ) + + # LDFLAGS: Link the extra global link paths first before anything else # (such that overriding system libraries with them is possible) - env['LDFLAGS'] = ' ' + " ".join([ - "-L'" + l.replace("'", "'\"'\"'") + "'" # no shlex.quote in py2 - for l in self.extra_global_link_paths - ]) + ' ' + env['LDFLAGS'] = ( + ' ' + + " ".join( + [ + "-L'" + + link_path.replace("'", "'\"'\"'") + + "'" # no shlex.quote in py2 + for link_path in self.extra_global_link_paths + ] + ) + + ' ' + ' '.join(self.common_ldflags).format( + ctx_libs_dir=self.ctx.get_libs_dir(self.arch) + ) + ) - sysroot = join(self.ctx._ndk_dir, 'sysroot') - if exists(sysroot): - # post-15 NDK per - # https://android.googlesource.com/platform/ndk/+/ndk-r15-release/docs/UnifiedHeaders.md - env['CFLAGS'] += ' -isystem {}/sysroot/usr/include/{}'.format( - self.ctx.ndk_dir, self.ctx.toolchain_prefix) - env['CFLAGS'] += ' -I{}/sysroot/usr/include/{}'.format( - self.ctx.ndk_dir, self.command_prefix) - else: - sysroot = self.ctx.ndk_platform - env['CFLAGS'] += ' -I{}'.format(self.ctx.ndk_platform) - env['CFLAGS'] += ' -isysroot {} '.format(sysroot) - env['CFLAGS'] += '-I' + join(self.ctx.get_python_install_dir(), - 'include/python{}'.format( - self.ctx.python_recipe.version[0:3]) - ) - - env['LDFLAGS'] += '--sysroot={} '.format(self.ctx.ndk_platform) - - env["CXXFLAGS"] = env["CFLAGS"] - - env["LDFLAGS"] += " ".join(['-lm', '-L' + self.ctx.get_libs_dir(self.arch)]) - - if self.ctx.ndk == 'crystax': - env['LDFLAGS'] += ' -L{}/sources/crystax/libs/{} -lcrystax'.format(self.ctx.ndk_dir, self.arch) - - toolchain_prefix = self.ctx.toolchain_prefix - toolchain_version = self.ctx.toolchain_version - command_prefix = self.command_prefix - - env['TOOLCHAIN_PREFIX'] = toolchain_prefix - env['TOOLCHAIN_VERSION'] = toolchain_version + # LDLIBS: Library flags or names given to compilers when they are + # supposed to invoke the linker. + env['LDLIBS'] = ' '.join(self.common_ldlibs) + # CCACHE ccache = '' if self.ctx.ccache and bool(int(environ.get('USE_CCACHE', '1'))): # print('ccache found, will optimize builds') ccache = self.ctx.ccache + ' ' env['USE_CCACHE'] = '1' env['NDK_CCACHE'] = self.ctx.ccache - env.update({k: v for k, v in environ.items() if k.startswith('CCACHE_')}) + env.update( + {k: v for k, v in environ.items() if k.startswith('CCACHE_')} + ) - if clang: - llvm_dirname = split( - glob(join(self.ctx.ndk_dir, 'toolchains', 'llvm*'))[-1])[-1] - clang_path = join(self.ctx.ndk_dir, 'toolchains', llvm_dirname, - 'prebuilt', build_platform, 'bin') - environ['PATH'] = '{clang_path}:{path}'.format( - clang_path=clang_path, path=environ['PATH']) - exe = join(clang_path, 'clang') - execxx = join(clang_path, 'clang++') - else: - exe = '{command_prefix}-gcc'.format(command_prefix=command_prefix) - execxx = '{command_prefix}-g++'.format(command_prefix=command_prefix) - - cc = find_executable(exe, path=environ['PATH']) + # Compiler: `CC` and `CXX` (and make sure that the compiler exists) + env['PATH'] = self.ctx.env['PATH'] + cc = find_executable(self.clang_exe, path=env['PATH']) if cc is None: - print('Searching path are: {!r}'.format(environ['PATH'])) + print('Searching path are: {!r}'.format(env['PATH'])) raise BuildInterruptingException( 'Couldn\'t find executable for CC. This indicates a ' 'problem locating the {} executable in the Android ' 'NDK, not that you don\'t have a normal compiler ' - 'installed. Exiting.'.format(exe)) + 'installed. Exiting.'.format(self.clang_exe)) if with_flags_in_cc: env['CC'] = '{ccache}{exe} {cflags}'.format( - exe=exe, + exe=self.clang_exe, ccache=ccache, cflags=env['CFLAGS']) env['CXX'] = '{ccache}{execxx} {cxxflags}'.format( - execxx=execxx, + execxx=self.clang_exe_cxx, ccache=ccache, cxxflags=env['CXXFLAGS']) else: env['CC'] = '{ccache}{exe}'.format( - exe=exe, + exe=self.clang_exe, ccache=ccache) env['CXX'] = '{ccache}{execxx}'.format( - execxx=execxx, + execxx=self.clang_exe_cxx, ccache=ccache) - env['AR'] = '{}-ar'.format(command_prefix) - env['RANLIB'] = '{}-ranlib'.format(command_prefix) - env['LD'] = '{}-ld'.format(command_prefix) - env['LDSHARED'] = env["CC"] + " -pthread -shared " +\ - "-Wl,-O1 -Wl,-Bsymbolic-functions " - if self.ctx.python_recipe and self.ctx.python_recipe.from_crystax: - # For crystax python, we can't use the host python headers: - env["CFLAGS"] += ' -I{}/sources/python/{}/include/python/'.\ - format(self.ctx.ndk_dir, self.ctx.python_recipe.version[0:3]) - env['STRIP'] = '{}-strip --strip-unneeded'.format(command_prefix) - env['MAKE'] = 'make -j5' - env['READELF'] = '{}-readelf'.format(command_prefix) - env['NM'] = '{}-nm'.format(command_prefix) + # Android's LLVM binutils + env['AR'] = self.ctx.ndk.llvm_ar + env['RANLIB'] = self.ctx.ndk.llvm_ranlib + env['STRIP'] = f'{self.ctx.ndk.llvm_strip} --strip-unneeded' + env['READELF'] = self.ctx.ndk.llvm_readelf + env['OBJCOPY'] = self.ctx.ndk.llvm_objcopy + env['MAKE'] = 'make -j{}'.format(str(cpu_count())) + + # Android's arch/toolchain + env['ARCH'] = self.arch + env['NDK_API'] = 'android-{}'.format(str(self.ctx.ndk_api)) + + # Custom linker options + env['LDSHARED'] = env['CC'] + ' ' + ' '.join(self.common_ldshared) + + # Host python (used by some recipes) hostpython_recipe = Recipe.get_recipe( 'host' + self.ctx.python_recipe.name, self.ctx) env['BUILDLIB_PATH'] = join( hostpython_recipe.get_build_dir(self.arch), - 'build', 'lib.{}-{}'.format( - build_platform, self.ctx.python_recipe.major_minor_version_string) + 'native-build', + 'build', + 'lib.{}-{}'.format( + build_platform, + self.ctx.python_recipe.major_minor_version_string, + ), ) - env['PATH'] = environ['PATH'] - - env['ARCH'] = self.arch - env['NDK_API'] = 'android-{}'.format(str(self.ctx.ndk_api)) - - if self.ctx.python_recipe and self.ctx.python_recipe.from_crystax: - env['CRYSTAX_PYTHON_VERSION'] = self.ctx.python_recipe.version + # for reproducible builds + if 'SOURCE_DATE_EPOCH' in environ: + for k in 'LC_ALL TZ SOURCE_DATE_EPOCH PYTHONHASHSEED BUILD_DATE BUILD_TIME'.split(): + if k in environ: + env[k] = environ[k] return env class ArchARM(Arch): arch = "armeabi" - toolchain_prefix = 'arm-linux-androideabi' command_prefix = 'arm-linux-androideabi' - platform_dir = 'arch-arm' @property def target(self): target_data = self.command_prefix.split('-') - return '-'.join( - ['armv7a', 'none', target_data[1], target_data[2]]) + return '{triplet}{ndk_api}'.format( + triplet='-'.join(['armv7a', target_data[1], target_data[2]]), + ndk_api=self.ctx.ndk_api, + ) class ArchARMv7_a(ArchARM): arch = 'armeabi-v7a' - - def get_env(self, with_flags_in_cc=True, clang=False): - env = super(ArchARMv7_a, self).get_env(with_flags_in_cc, clang=clang) - env['CFLAGS'] = (env['CFLAGS'] + - (' -march=armv7-a -mfloat-abi=softfp ' - '-mfpu=vfp -mthumb')) - env['CXXFLAGS'] = env['CFLAGS'] - return env + arch_cflags = [ + '-march=armv7-a', + '-mfloat-abi=softfp', + '-mfpu=vfp', + '-mthumb', + '-fPIC', + ] class Archx86(Arch): arch = 'x86' - toolchain_prefix = 'x86' command_prefix = 'i686-linux-android' - platform_dir = 'arch-x86' - - def get_env(self, with_flags_in_cc=True, clang=False): - env = super(Archx86, self).get_env(with_flags_in_cc, clang=clang) - env['CFLAGS'] = (env['CFLAGS'] + - ' -march=i686 -mtune=intel -mssse3 -mfpmath=sse -m32') - env['CXXFLAGS'] = env['CFLAGS'] - return env + arch_cflags = [ + '-march=i686', + '-mssse3', + '-mfpmath=sse', + '-m32', + '-fPIC', + ] class Archx86_64(Arch): arch = 'x86_64' - toolchain_prefix = 'x86_64' command_prefix = 'x86_64-linux-android' - platform_dir = 'arch-x86_64' - - def get_env(self, with_flags_in_cc=True, clang=False): - env = super(Archx86_64, self).get_env(with_flags_in_cc, clang=clang) - env['CFLAGS'] = (env['CFLAGS'] + - ' -march=x86-64 -msse4.2 -mpopcnt -m64 -mtune=intel') - env['CXXFLAGS'] = env['CFLAGS'] - return env + arch_cflags = [ + '-march=x86-64', + '-msse4.2', + '-mpopcnt', + '-m64', + '-fPIC', + ] class ArchAarch_64(Arch): arch = 'arm64-v8a' - toolchain_prefix = 'aarch64-linux-android' command_prefix = 'aarch64-linux-android' - platform_dir = 'arch-arm64' + arch_cflags = [ + '-march=armv8-a', + '-fPIC' + # '-I' + join(dirname(__file__), 'includes', 'arm64-v8a'), + ] - def get_env(self, with_flags_in_cc=True, clang=False): - env = super(ArchAarch_64, self).get_env(with_flags_in_cc, clang=clang) - incpath = ' -I' + join(dirname(__file__), 'includes', 'arm64-v8a') - env['EXTRA_CFLAGS'] = incpath - env['CFLAGS'] += incpath - env['CXXFLAGS'] += incpath - if with_flags_in_cc: - env['CC'] += incpath - env['CXX'] += incpath - return env + # Note: This `EXTRA_CFLAGS` below should target the commented `include` + # above in `arch_cflags`. The original lines were added during the Sdl2's + # bootstrap creation, and modified/commented during the migration to the + # NDK r19 build system, because it seems that we don't need it anymore, + # do we need them? + # def get_env(self, with_flags_in_cc=True): + # env = super().get_env(with_flags_in_cc) + # env['EXTRA_CFLAGS'] = self.arch_cflags[-1] + # return env diff --git a/p4a/pythonforandroid/bdistapk.py b/p4a/pythonforandroid/bdistapk.py index a27f4d1..bcf77cd 100644 --- a/p4a/pythonforandroid/bdistapk.py +++ b/p4a/pythonforandroid/bdistapk.py @@ -1,6 +1,4 @@ -from __future__ import print_function from setuptools import Command -from pythonforandroid import toolchain import sys from os.path import realpath, join, exists, dirname, curdir, basename, split @@ -16,16 +14,16 @@ def argv_contains(t): return False -class BdistAPK(Command): - description = 'Create an APK with python-for-android' +class Bdist(Command): user_options = [] + package_type = None def initialize_options(self): for option in self.user_options: setattr(self, option[0].strip('=').replace('-', '_'), None) - option_dict = self.distribution.get_option_dict('apk') + option_dict = self.distribution.get_option_dict(self.package_type) # This is a hack, we probably aren't supposed to loop through # the option_dict so early because distutils does exactly the @@ -34,10 +32,9 @@ class BdistAPK(Command): for (option, (source, value)) in option_dict.items(): setattr(self, option, str(value)) - def finalize_options(self): - setup_options = self.distribution.get_option_dict('apk') + setup_options = self.distribution.get_option_dict(self.package_type) for (option, (source, value)) in setup_options.items(): if source == 'command line': continue @@ -70,16 +67,15 @@ class BdistAPK(Command): sys.argv.append('--version={}'.format(version)) if not argv_contains('--arch'): - arch = 'arm64-v8a' + arch = 'armeabi-v7a' self.arch = arch sys.argv.append('--arch={}'.format(arch)) def run(self): - self.prepare_build_dir() - from pythonforandroid.toolchain import main - sys.argv[1] = 'apk' + from pythonforandroid.entrypoints import main + sys.argv[1] = self.package_type main() def prepare_build_dir(self): @@ -112,7 +108,7 @@ class BdistAPK(Command): makedirs(new_dir) print('Including {}'.format(filen)) copyfile(filen, join(bdist_dir, filen)) - if basename(filen) in ('main.py', 'main.pyo'): + if basename(filen) in ('main.py', 'main.pyc'): main_py_dirs.append(filen) # This feels ridiculous, but how else to define the main.py dir? @@ -123,7 +119,7 @@ class BdistAPK(Command): exit(1) if len(main_py_dirs) > 1: print('WARNING: Multiple main.py dirs found, using the shortest path') - main_py_dirs.sort(key=lambda j: len(split(j))) + main_py_dirs = sorted(main_py_dirs, key=lambda j: len(split(j))) if not argv_contains('--launcher'): sys.argv.append('--private={}'.format( @@ -131,18 +127,39 @@ class BdistAPK(Command): ) +class BdistAPK(Bdist): + """distutil command handler for 'apk'.""" + description = 'Create an APK with python-for-android' + package_type = 'apk' + + +class BdistAAR(Bdist): + """distutil command handler for 'aar'.""" + description = 'Create an AAR with python-for-android' + package_type = 'aar' + + +class BdistAAB(Bdist): + """distutil command handler for 'aab'.""" + description = 'Create an AAB with python-for-android' + package_type = 'aab' + + def _set_user_options(): # This seems like a silly way to do things, but not sure if there's a # better way to pass arbitrary options onwards to p4a - user_options = [('requirements=', None, None),] + user_options = [('requirements=', None, None), ] for i, arg in enumerate(sys.argv): if arg.startswith('--'): if ('=' in arg or - (i < (len(sys.argv) - 1) and not sys.argv[i+1].startswith('-'))): + (i < (len(sys.argv) - 1) and not sys.argv[i+1].startswith('-'))): user_options.append((arg[2:].split('=')[0] + '=', None, None)) else: user_options.append((arg[2:], None, None)) BdistAPK.user_options = user_options + BdistAAB.user_options = user_options + BdistAAR.user_options = user_options + _set_user_options() diff --git a/p4a/pythonforandroid/bootstrap.py b/p4a/pythonforandroid/bootstrap.py old mode 100644 new mode 100755 index b4a9a9e..0a5225e --- a/p4a/pythonforandroid/bootstrap.py +++ b/p4a/pythonforandroid/bootstrap.py @@ -1,20 +1,20 @@ +import functools +import glob +import importlib +import os from os.path import (join, dirname, isdir, normpath, splitext, basename) from os import listdir, walk, sep import sh import shlex -import glob -import importlib -import os import shutil -from pythonforandroid.logger import (warning, shprint, info, logger, - debug) -from pythonforandroid.util import (current_directory, ensure_dir, - temp_directory) +from pythonforandroid.logger import (shprint, info, logger, debug) +from pythonforandroid.util import ( + current_directory, ensure_dir, temp_directory, BuildInterruptingException) from pythonforandroid.recipe import Recipe -def copy_files(src_root, dest_root, override=True): +def copy_files(src_root, dest_root, override=True, symlink=False): for root, dirnames, filenames in walk(src_root): for filename in filenames: subdir = normpath(root.replace(src_root, "")) @@ -29,12 +29,44 @@ def copy_files(src_root, dest_root, override=True): if override and os.path.exists(dest_file): os.unlink(dest_file) if not os.path.exists(dest_file): - shutil.copy(src_file, dest_file) + if symlink: + os.symlink(src_file, dest_file) + else: + shutil.copy(src_file, dest_file) else: os.makedirs(dest_file) -class Bootstrap(object): +default_recipe_priorities = [ + "webview", "sdl2", "service_only" # last is highest +] +# ^^ NOTE: these are just the default priorities if no special rules +# apply (which you can find in the code below), so basically if no +# known graphical lib or web lib is used - in which case service_only +# is the most reasonable guess. + + +def _cmp_bootstraps_by_priority(a, b): + def rank_bootstrap(bootstrap): + """ Returns a ranking index for each bootstrap, + with higher priority ranked with higher number. """ + if bootstrap.name in default_recipe_priorities: + return default_recipe_priorities.index(bootstrap.name) + 1 + return 0 + + # Rank bootstraps in order: + rank_a = rank_bootstrap(a) + rank_b = rank_bootstrap(b) + if rank_a != rank_b: + return (rank_b - rank_a) + else: + if a.name < b.name: # alphabetic sort for determinism + return -1 + else: + return 1 + + +class Bootstrap: '''An Android project template, containing recipe stuff for compilation and templated fields for APK info. ''' @@ -45,15 +77,11 @@ class Bootstrap(object): bootstrap_dir = None build_dir = None - dist_dir = None dist_name = None distribution = None # All bootstraps should include Python in some way: - recipe_depends = [ - ("python2", "python2legacy", "python3", "python3crystax"), - 'android', - ] + recipe_depends = ['python3', 'android'] can_be_chosen_automatically = True '''Determines whether the bootstrap can be chosen as one that @@ -70,9 +98,9 @@ class Bootstrap(object): def dist_dir(self): '''The dist dir at which to place the finished distribution.''' if self.distribution is None: - warning('Tried to access {}.dist_dir, but {}.distribution ' - 'is None'.format(self, self)) - exit(1) + raise BuildInterruptingException( + 'Internal error: tried to access {}.dist_dir, but {}.distribution ' + 'is None'.format(self, self)) return self.distribution.dist_dir @property @@ -84,7 +112,7 @@ class Bootstrap(object): and optional dependencies are being used, and returns a list of these.''' recipes = [] - built_recipes = self.ctx.recipe_build_order + built_recipes = self.ctx.recipe_build_order or [] for recipe in self.recipe_depends: if isinstance(recipe, (tuple, list)): for alternative in recipe: @@ -104,70 +132,102 @@ class Bootstrap(object): def get_dist_dir(self, name): return join(self.ctx.dist_dir, name) - def get_common_dir(self): - return os.path.abspath(join(self.bootstrap_dir, "..", 'common')) - @property def name(self): modname = self.__class__.__module__ return modname.split(".", 2)[-1] + def get_bootstrap_dirs(self): + """get all bootstrap directories, following the MRO path""" + + # get all bootstrap names along the __mro__, cutting off Bootstrap and object + classes = self.__class__.__mro__[:-2] + bootstrap_names = [cls.name for cls in classes] + ['common'] + bootstrap_dirs = [ + join(self.ctx.root_dir, 'bootstraps', bootstrap_name) + for bootstrap_name in reversed(bootstrap_names) + ] + return bootstrap_dirs + + def _copy_in_final_files(self): + if self.name == "sdl2": + # Get the paths for copying SDL2's java source code: + sdl2_recipe = Recipe.get_recipe("sdl2", self.ctx) + sdl2_build_dir = sdl2_recipe.get_jni_dir() + src_dir = join(sdl2_build_dir, "SDL", "android-project", + "app", "src", "main", "java", + "org", "libsdl", "app") + target_dir = join(self.dist_dir, 'src', 'main', 'java', 'org', + 'libsdl', 'app') + + # Do actual copying: + info('Copying in SDL2 .java files from: ' + str(src_dir)) + if not os.path.exists(target_dir): + os.makedirs(target_dir) + copy_files(src_dir, target_dir, override=True) + def prepare_build_dir(self): - '''Ensure that a build dir exists for the recipe. This same single - dir will be used for building all different archs.''' + """Ensure that a build dir exists for the recipe. This same single + dir will be used for building all different archs.""" + bootstrap_dirs = self.get_bootstrap_dirs() + # now do a cumulative copy of all bootstrap dirs self.build_dir = self.get_build_dir() - self.common_dir = self.get_common_dir() - copy_files(join(self.bootstrap_dir, 'build'), self.build_dir) - copy_files(join(self.common_dir, 'build'), self.build_dir, - override=False) - if self.ctx.symlink_java_src: - info('Symlinking java src instead of copying') - shprint(sh.rm, '-r', join(self.build_dir, 'src')) - shprint(sh.mkdir, join(self.build_dir, 'src')) - for dirn in listdir(join(self.bootstrap_dir, 'build', 'src')): - shprint(sh.ln, '-s', join(self.bootstrap_dir, 'build', 'src', dirn), - join(self.build_dir, 'src')) + for bootstrap_dir in bootstrap_dirs: + copy_files(join(bootstrap_dir, 'build'), self.build_dir, symlink=self.ctx.symlink_bootstrap_files) + with current_directory(self.build_dir): with open('project.properties', 'w') as fileh: fileh.write('target=android-{}'.format(self.ctx.android_api)) - def prepare_dist_dir(self, name): + def prepare_dist_dir(self): ensure_dir(self.dist_dir) - def run_distribute(self): + def assemble_distribution(self): + ''' Copies all the files into the distribution (this function is + overridden by the specific bootstrap classes to do this) + and add in the distribution info. + ''' + self._copy_in_final_files() self.distribution.save_info(self.dist_dir) @classmethod - def list_bootstraps(cls): + def all_bootstraps(cls): '''Find all the available bootstraps and return them.''' forbidden_dirs = ('__pycache__', 'common') bootstraps_dir = join(dirname(__file__), 'bootstraps') + result = set() for name in listdir(bootstraps_dir): if name in forbidden_dirs: continue filen = join(bootstraps_dir, name) if isdir(filen): - yield name + result.add(name) + return result @classmethod - def get_bootstrap_from_recipes(cls, recipes, ctx): - '''Returns a bootstrap whose recipe requirements do not conflict with - the given recipes.''' + def get_usable_bootstraps_for_recipes(cls, recipes, ctx): + '''Returns all bootstrap whose recipe requirements do not conflict + with the given recipes, in no particular order.''' info('Trying to find a bootstrap that matches the given recipes.') bootstraps = [cls.get_bootstrap(name, ctx) - for name in cls.list_bootstraps()] - acceptable_bootstraps = [] + for name in cls.all_bootstraps()] + acceptable_bootstraps = set() + + # Find out which bootstraps are acceptable: for bs in bootstraps: if not bs.can_be_chosen_automatically: continue - possible_dependency_lists = expand_dependencies(bs.recipe_depends) + possible_dependency_lists = expand_dependencies(bs.recipe_depends, ctx) for possible_dependencies in possible_dependency_lists: ok = True + # Check if the bootstap's dependencies have an internal conflict: for recipe in possible_dependencies: recipe = Recipe.get_recipe(recipe, ctx) - if any([conflict in recipes for conflict in recipe.conflicts]): + if any(conflict in recipes for conflict in recipe.conflicts): ok = False break + # Check if bootstrap's dependencies conflict with chosen + # packages: for recipe in recipes: try: recipe = Recipe.get_recipe(recipe, ctx) @@ -175,19 +235,63 @@ class Bootstrap(object): conflicts = [] else: conflicts = recipe.conflicts - if any([conflict in possible_dependencies - for conflict in conflicts]): + if any(conflict in possible_dependencies + for conflict in conflicts): ok = False break if ok and bs not in acceptable_bootstraps: - acceptable_bootstraps.append(bs) + acceptable_bootstraps.add(bs) + info('Found {} acceptable bootstraps: {}'.format( len(acceptable_bootstraps), [bs.name for bs in acceptable_bootstraps])) - if acceptable_bootstraps: - info('Using the first of these: {}' - .format(acceptable_bootstraps[0].name)) - return acceptable_bootstraps[0] + return acceptable_bootstraps + + @classmethod + def get_bootstrap_from_recipes(cls, recipes, ctx): + '''Picks a single recommended default bootstrap out of + all_usable_bootstraps_from_recipes() for the given reicpes, + and returns it.''' + + known_web_packages = {"flask"} # to pick webview over service_only + recipes_with_deps_lists = expand_dependencies(recipes, ctx) + acceptable_bootstraps = cls.get_usable_bootstraps_for_recipes( + recipes, ctx + ) + + def have_dependency_in_recipes(dep): + for dep_list in recipes_with_deps_lists: + if dep in dep_list: + return True + return False + + # Special rule: return SDL2 bootstrap if there's an sdl2 dep: + if (have_dependency_in_recipes("sdl2") and + "sdl2" in [b.name for b in acceptable_bootstraps] + ): + info('Using sdl2 bootstrap since it is in dependencies') + return cls.get_bootstrap("sdl2", ctx) + + # Special rule: return "webview" if we depend on common web recipe: + for possible_web_dep in known_web_packages: + if have_dependency_in_recipes(possible_web_dep): + # We have a web package dep! + if "webview" in [b.name for b in acceptable_bootstraps]: + info('Using webview bootstrap since common web packages ' + 'were found {}'.format( + known_web_packages.intersection(recipes) + )) + return cls.get_bootstrap("webview", ctx) + + prioritized_acceptable_bootstraps = sorted( + list(acceptable_bootstraps), + key=functools.cmp_to_key(_cmp_bootstraps_by_priority) + ) + + if prioritized_acceptable_bootstraps: + info('Using the highest ranked/first of these: {}' + .format(prioritized_acceptable_bootstraps[0].name)) + return prioritized_acceptable_bootstraps[0] return None @classmethod @@ -218,15 +322,16 @@ class Bootstrap(object): tgt_dir = join(dest_dir, arch.arch) ensure_dir(tgt_dir) for src_dir in src_dirs: - for lib in glob.glob(join(src_dir, wildcard)): - shprint(sh.cp, '-a', lib, tgt_dir) + libs = glob.glob(join(src_dir, wildcard)) + if libs: + shprint(sh.cp, '-a', *libs, tgt_dir) def distribute_javaclasses(self, javaclass_dir, dest_dir="src"): '''Copy existing javaclasses from build dir to current dist dir.''' info('Copying java files') ensure_dir(dest_dir) - for filename in glob.glob(javaclass_dir): - shprint(sh.cp, '-a', filename, dest_dir) + filenames = glob.glob(javaclass_dir) + shprint(sh.cp, '-a', *filenames, dest_dir) def distribute_aars(self, arch): '''Process existing .aar bundles and copy to current dist dir.''' @@ -259,24 +364,18 @@ class Bootstrap(object): debug(" to {}".format(so_tgt_dir)) ensure_dir(so_tgt_dir) so_files = glob.glob(join(so_src_dir, '*.so')) - for f in so_files: - shprint(sh.cp, '-a', f, so_tgt_dir) + shprint(sh.cp, '-a', *so_files, so_tgt_dir) def strip_libraries(self, arch): info('Stripping libraries') - if self.ctx.python_recipe.from_crystax: - info('Python was loaded from CrystaX, skipping strip') - return env = arch.get_env() tokens = shlex.split(env['STRIP']) strip = sh.Command(tokens[0]) if len(tokens) > 1: strip = strip.bake(tokens[1:]) - libs_dir = join(self.dist_dir, '_python_bundle', + libs_dir = join(self.dist_dir, f'_python_bundle__{arch.arch}', '_python_bundle', 'modules') - if self.ctx.python_recipe.name == 'python2legacy': - libs_dir = join(self.dist_dir, 'private') filens = shprint(sh.find, libs_dir, join(self.dist_dir, 'libs'), '-iname', '*.so', _env=env).stdout.decode('utf-8') @@ -301,9 +400,31 @@ class Bootstrap(object): shprint(sh.rm, '-rf', d) -def expand_dependencies(recipes): +def expand_dependencies(recipes, ctx): + """ This function expands to lists of all different available + alternative recipe combinations, with the dependencies added in + ONLY for all the not-with-alternative recipes. + (So this is like the deps graph very simplified and incomplete, but + hopefully good enough for most basic bootstrap compatibility checks) + """ + + # Add in all the deps of recipes where there is no alternative: + recipes_with_deps = list(recipes) + for entry in recipes: + if not isinstance(entry, (tuple, list)) or len(entry) == 1: + if isinstance(entry, (tuple, list)): + entry = entry[0] + try: + recipe = Recipe.get_recipe(entry, ctx) + recipes_with_deps += recipe.depends + except ValueError: + # it's a pure python package without a recipe, so we + # don't know the dependencies...skipping for now + pass + + # Split up lists by available alternatives: recipe_lists = [[]] - for recipe in recipes: + for recipe in recipes_with_deps: if isinstance(recipe, (tuple, list)): new_recipe_lists = [] for alternative in recipe: @@ -313,6 +434,6 @@ def expand_dependencies(recipes): new_recipe_lists.append(new_list) recipe_lists = new_recipe_lists else: - for old_list in recipe_lists: - old_list.append(recipe) + for existing_list in recipe_lists: + existing_list.append(recipe) return recipe_lists diff --git a/p4a/pythonforandroid/bootstraps/common/build/build.py b/p4a/pythonforandroid/bootstraps/common/build/build.py index 342115e..c49d18f 100644 --- a/p4a/pythonforandroid/bootstraps/common/build/build.py +++ b/p4a/pythonforandroid/bootstraps/common/build/build.py @@ -1,13 +1,13 @@ -#!/usr/bin/env python2.7 - -from __future__ import print_function +#!/usr/bin/env python3 +from gzip import GzipFile +import hashlib import json from os.path import ( dirname, join, isfile, realpath, relpath, split, exists, basename ) -from os import listdir, makedirs, remove +from os import environ, listdir, makedirs, remove import os import shlex import shutil @@ -16,19 +16,20 @@ import sys import tarfile import tempfile import time -from zipfile import ZipFile from distutils.version import LooseVersion from fnmatch import fnmatch import jinja2 -def get_dist_info_for(key): +def get_dist_info_for(key, error_if_missing=True): try: with open(join(dirname(__file__), 'dist_info.json'), 'r') as fileh: info = json.load(fileh) - value = str(info[key]) + value = info[key] except (OSError, KeyError) as e: + if not error_if_missing: + return None print("BUILD FAILURE: Couldn't extract the key `" + key + "` " + "from dist_info.json: " + str(e)) sys.exit(1) @@ -39,10 +40,6 @@ def get_hostpython(): return get_dist_info_for('hostpython') -def get_python_version(): - return get_dist_info_for('python_version') - - def get_bootstrap_name(): return get_dist_info_for('bootstrap') @@ -57,7 +54,6 @@ else: curdir = dirname(__file__) PYTHON = get_hostpython() -PYTHON_VERSION = get_python_version() if PYTHON is not None and not exists(PYTHON): PYTHON = None @@ -72,29 +68,23 @@ BLACKLIST_PATTERNS = [ '~', '*.bak', '*.swp', + + # Android artifacts + '*.apk', + '*.aab', ] -# pyc/py -if PYTHON is not None: - BLACKLIST_PATTERNS.append('*.py') - if PYTHON_VERSION and int(PYTHON_VERSION[0]) == 2: - # we only blacklist `.pyc` for python2 because in python3 the compiled - # extension is `.pyc` (.pyo files not exists for python >= 3.6) - BLACKLIST_PATTERNS.append('*.pyc') WHITELIST_PATTERNS = [] if get_bootstrap_name() in ('sdl2', 'webview', 'service_only'): WHITELIST_PATTERNS.append('pyconfig.h') -python_files = [] - environment = jinja2.Environment(loader=jinja2.FileSystemLoader( join(curdir, 'templates'))) -def try_unlink(fn): - if exists(fn): - os.unlink(fn) +DEFAULT_PYTHON_ACTIVITY_JAVA_CLASS = 'org.kivy.android.PythonActivity' +DEFAULT_PYTHON_SERVICE_JAVA_CLASS = 'org.kivy.android.PythonService' def ensure_dir(path): @@ -154,75 +144,33 @@ def listfiles(d): yield fn -def make_python_zip(): - ''' - Search for all the python related files, and construct the pythonXX.zip - According to - # http://randomsplat.com/id5-cross-compiling-python-for-embedded-linux.html - site-packages, config and lib-dynload will be not included. - ''' - - if not exists('private'): - print('No compiled python is present to zip, skipping.') - return - - global python_files - d = realpath(join('private', 'lib', 'python2.7')) - - def select(fn): - if is_blacklist(fn): - return False - fn = realpath(fn) - assert(fn.startswith(d)) - fn = fn[len(d):] - if (fn.startswith('/site-packages/') - or fn.startswith('/config/') - or fn.startswith('/lib-dynload/') - or fn.startswith('/libpymodules.so')): - return False - return fn - - # get a list of all python file - python_files = [x for x in listfiles(d) if select(x)] - - # create the final zipfile - zfn = join('private', 'lib', 'python27.zip') - zf = ZipFile(zfn, 'w') - - # put all the python files in it - for fn in python_files: - afn = fn[len(d):] - zf.write(fn, afn) - zf.close() - - -def make_tar(tfn, source_dirs, ignore_path=[], optimize_python=True): +def make_tar(tfn, source_dirs, byte_compile_python=False, optimize_python=True): ''' Make a zip file `fn` from the contents of source_dis. ''' - # selector function - def select(fn): - rfn = realpath(fn) - for p in ignore_path: - if p.endswith('/'): - p = p[:-1] - if rfn.startswith(p): - return False - if rfn in python_files: - return False - return not is_blacklist(fn) + def clean(tinfo): + """cleaning function (for reproducible builds)""" + tinfo.uid = tinfo.gid = 0 + tinfo.uname = tinfo.gname = '' + tinfo.mtime = 0 + return tinfo # get the files and relpath file of all the directory we asked for files = [] for sd in source_dirs: sd = realpath(sd) - compile_dir(sd, optimize_python=optimize_python) - files += [(x, relpath(realpath(x), sd)) for x in listfiles(sd) - if select(x)] + for fn in listfiles(sd): + if is_blacklist(fn): + continue + if fn.endswith('.py') and byte_compile_python: + fn = compile_py_file(fn, optimize_python=optimize_python) + files.append((fn, relpath(realpath(fn), sd))) + files.sort() # deterministic # create tar.gz of thoses files - tf = tarfile.open(tfn, 'w:gz', format=tarfile.USTAR_FORMAT) + gf = GzipFile(tfn, 'wb', mtime=0) # deterministic + tf = tarfile.open(None, 'w', gf, format=tarfile.USTAR_FORMAT) dirs = [] for fn, afn in files: dn = dirname(afn) @@ -238,25 +186,24 @@ def make_tar(tfn, source_dirs, ignore_path=[], optimize_python=True): dirs.append(d) tinfo = tarfile.TarInfo(d) tinfo.type = tarfile.DIRTYPE + clean(tinfo) tf.addfile(tinfo) # put the file - tf.add(fn, afn) + tf.add(fn, afn, filter=clean) tf.close() + gf.close() -def compile_dir(dfn, optimize_python=True): +def compile_py_file(python_file, optimize_python=True): ''' - Compile *.py in directory `dfn` to *.pyo + Compile python_file to *.pyc and return the filename of the *.pyc file. ''' if PYTHON is None: return - if int(PYTHON_VERSION[0]) >= 3: - args = [PYTHON, '-m', 'compileall', '-b', '-f', dfn] - else: - args = [PYTHON, '-m', 'compileall', '-f', dfn] + args = [PYTHON, '-m', 'compileall', '-b', '-f', python_file] if optimize_python: # -OO = strip docstrings args.insert(1, '-OO') @@ -268,16 +215,18 @@ def compile_dir(dfn, optimize_python=True): 'error, see logs above') exit(1) + return ".".join([os.path.splitext(python_file)[0], "pyc"]) + def make_package(args): - # If no launcher is specified, require a main.py/main.pyo: + # If no launcher is specified, require a main.py/main.pyc: if (get_bootstrap_name() != "sdl" or args.launcher is None) and \ - get_bootstrap_name() != "webview": + get_bootstrap_name() not in ["webview", "service_library"]: # (webview doesn't need an entrypoint, apparently) if args.private is None or ( not exists(join(realpath(args.private), 'main.py')) and - not exists(join(realpath(args.private), 'main.pyo'))): - print('''BUILD FAILURE: No main.py(o) found in your app directory. This + not exists(join(realpath(args.private), 'main.pyc'))): + print('''BUILD FAILURE: No main.py(c) found in your app directory. This file must exist to act as the entry point for you app. If your app is started by a file with a different name, rename it to main.py or add a main.py that loads it.''') @@ -286,53 +235,159 @@ main.py that loads it.''') assets_dir = "src/main/assets" # Delete the old assets. - try_unlink(join(assets_dir, 'public.mp3')) - try_unlink(join(assets_dir, 'private.mp3')) + shutil.rmtree(assets_dir, ignore_errors=True) ensure_dir(assets_dir) - # In order to speedup import and initial depack, - # construct a python27.zip - make_python_zip() - # Add extra environment variable file into tar-able directory: env_vars_tarpath = tempfile.mkdtemp(prefix="p4a-extra-env-") with open(os.path.join(env_vars_tarpath, "p4a_env_vars.txt"), "w") as f: - f.write("P4A_IS_WINDOWED=" + str(args.window) + "\n") + if hasattr(args, "window"): + f.write("P4A_IS_WINDOWED=" + str(args.window) + "\n") if hasattr(args, "orientation"): f.write("P4A_ORIENTATION=" + str(args.orientation) + "\n") f.write("P4A_NUMERIC_VERSION=" + str(args.numeric_version) + "\n") f.write("P4A_MINSDK=" + str(args.min_sdk_version) + "\n") # Package up the private data (public not supported). - tar_dirs = [env_vars_tarpath] - if args.private: - tar_dirs.append(args.private) - for python_bundle_dir in ('private', 'crystax_python', '_python_bundle'): - if exists(python_bundle_dir): - tar_dirs.append(python_bundle_dir) - if get_bootstrap_name() == "webview": - tar_dirs.append('webview_includes') - if args.private or args.launcher: - make_tar( - join(assets_dir, 'private.mp3'), tar_dirs, args.ignore_path, - optimize_python=args.optimize_python) + use_setup_py = get_dist_info_for("use_setup_py", + error_if_missing=False) is True + private_tar_dirs = [env_vars_tarpath] + _temp_dirs_to_clean = [] + try: + if args.private: + if not use_setup_py or ( + not exists(join(args.private, "setup.py")) and + not exists(join(args.private, "pyproject.toml")) + ): + print('No setup.py/pyproject.toml used, copying ' + 'full private data into .apk.') + private_tar_dirs.append(args.private) + else: + print("Copying main.py's ONLY, since other app data is " + "expected in site-packages.") + main_py_only_dir = tempfile.mkdtemp() + _temp_dirs_to_clean.append(main_py_only_dir) + + # Check all main.py files we need to copy: + copy_paths = ["main.py", join("service", "main.py")] + for copy_path in copy_paths: + variants = [ + copy_path, + copy_path.partition(".")[0] + ".pyc", + ] + # Check in all variants with all possible endings: + for variant in variants: + if exists(join(args.private, variant)): + # Make sure surrounding directly exists: + dir_path = os.path.dirname(variant) + if (len(dir_path) > 0 and + not exists( + join(main_py_only_dir, dir_path) + )): + os.mkdir(join(main_py_only_dir, dir_path)) + # Copy actual file: + shutil.copyfile( + join(args.private, variant), + join(main_py_only_dir, variant), + ) + + # Append directory with all main.py's to result apk paths: + private_tar_dirs.append(main_py_only_dir) + if get_bootstrap_name() == "webview": + for asset in listdir('webview_includes'): + shutil.copy(join('webview_includes', asset), join(assets_dir, asset)) + + for asset in args.assets: + asset_src, asset_dest = asset.split(":") + if isfile(realpath(asset_src)): + ensure_dir(dirname(join(assets_dir, asset_dest))) + shutil.copy(realpath(asset_src), join(assets_dir, asset_dest)) + else: + shutil.copytree(realpath(asset_src), join(assets_dir, asset_dest)) + + if args.private or args.launcher: + for arch in get_dist_info_for("archs"): + libs_dir = f"libs/{arch}" + make_tar( + join(libs_dir, "libpybundle.so"), + [f"_python_bundle__{arch}"], + byte_compile_python=args.byte_compile_python, + optimize_python=args.optimize_python, + ) + make_tar( + join(assets_dir, "private.tar"), + private_tar_dirs, + byte_compile_python=args.byte_compile_python, + optimize_python=args.optimize_python, + ) + finally: + for directory in _temp_dirs_to_clean: + shutil.rmtree(directory) # Remove extra env vars tar-able directory: shutil.rmtree(env_vars_tarpath) # Prepare some variables for templating process res_dir = "src/main/res" + res_dir_initial = "src/res_initial" + # make res_dir stateless + if exists(res_dir_initial): + shutil.rmtree(res_dir, ignore_errors=True) + shutil.copytree(res_dir_initial, res_dir) + else: + shutil.copytree(res_dir, res_dir_initial) + + # Add user resouces + for resource in args.resources: + resource_src, resource_dest = resource.split(":") + if isfile(realpath(resource_src)): + ensure_dir(dirname(join(res_dir, resource_dest))) + shutil.copy(realpath(resource_src), join(res_dir, resource_dest)) + else: + shutil.copytree(realpath(resource_src), + join(res_dir, resource_dest), dirs_exist_ok=True) + default_icon = 'templates/kivy-icon.png' default_presplash = 'templates/kivy-presplash.jpg' shutil.copy( args.icon or default_icon, - join(res_dir, 'drawable/icon.png') + join(res_dir, 'mipmap/icon.png') ) + if args.icon_fg and args.icon_bg: + shutil.copy(args.icon_fg, join(res_dir, 'mipmap/icon_foreground.png')) + shutil.copy(args.icon_bg, join(res_dir, 'mipmap/icon_background.png')) + with open(join(res_dir, 'mipmap-anydpi-v26/icon.xml'), "w") as fd: + fd.write(""" + + + + +""") + elif args.icon_fg or args.icon_bg: + print("WARNING: Received an --icon_fg or an --icon_bg argument, but not both. " + "Ignoring.") + if get_bootstrap_name() != "service_only": - shutil.copy( - args.presplash or default_presplash, - join(res_dir, 'drawable/presplash.jpg') - ) + lottie_splashscreen = join(res_dir, 'raw/splashscreen.json') + if args.presplash_lottie: + shutil.copy( + 'templates/lottie.xml', + join(res_dir, 'layout/lottie.xml') + ) + ensure_dir(join(res_dir, 'raw')) + shutil.copy( + args.presplash_lottie, + join(res_dir, 'raw/splashscreen.json') + ) + else: + if exists(lottie_splashscreen): + remove(lottie_splashscreen) + remove(join(res_dir, 'layout/lottie.xml')) + + shutil.copy( + args.presplash or default_presplash, + join(res_dir, 'drawable/presplash.jpg') + ) # If extra Java jars were requested, copy them into the libs directory jars = [] @@ -360,17 +415,17 @@ main.py that loads it.''') version_code = 0 if not args.numeric_version: - # Set version code in format (arch-minsdk-app_version) - with open(join(dirname(__file__), 'dist_info.json'), 'r') as dist_info: - dist_data = json.load(dist_info) - arch = dist_data["archs"][0] - arch_dict = {"x86_64": "9", "arm64-v8a": "8", "armeabi-v7a": "7", "x86": "6"} - arch_code = arch_dict.get(arch, '1') + """ + Set version code in format (10 + minsdk + app_version) + Historically versioning was (arch + minsdk + app_version), + with arch expressed with a single digit from 6 to 9. + Since the multi-arch support, has been changed to 10. + """ min_sdk = args.min_sdk_version for i in args.version.split('.'): version_code *= 100 version_code += int(i) - args.numeric_version = "{}{}{}".format(arch_code, min_sdk, version_code) + args.numeric_version = "{}{}{}".format("10", min_sdk, version_code) if args.intent_filters: with open(args.intent_filters) as fd: @@ -387,6 +442,9 @@ main.py that loads it.''') for spec in args.extra_source_dirs: if ':' in spec: specdir, specincludes = spec.split(':') + print('WARNING: Currently gradle builds only support including source ' + 'directories, so when building using gradle all files in ' + '{} will be included.'.format(specdir)) else: specdir = spec specincludes = '**' @@ -402,6 +460,7 @@ main.py that loads it.''') service = True service_names = [] + base_service_class = args.service_class_name.split('.')[-1] for sid, spec in enumerate(args.services): spec = spec.split(':') name = spec[0] @@ -426,6 +485,7 @@ main.py that loads it.''') foreground=foreground, sticky=sticky, service_id=sid + 1, + base_service_class=base_service_class, ) # Find the SDK directory and target API @@ -447,19 +507,37 @@ main.py that loads it.''') # Try to build with the newest available build tools ignored = {".DS_Store", ".ds_store"} build_tools_versions = [x for x in listdir(join(sdk_dir, 'build-tools')) if x not in ignored] - build_tools_versions.sort(key=LooseVersion) + build_tools_versions = sorted(build_tools_versions, + key=LooseVersion) build_tools_version = build_tools_versions[-1] # Folder name for launcher (used by SDL2 bootstrap) url_scheme = 'kivy' + # Copy backup rules file if specified and update the argument + res_xml_dir = join(res_dir, 'xml') + if args.backup_rules: + ensure_dir(res_xml_dir) + shutil.copy(join(args.private, args.backup_rules), res_xml_dir) + args.backup_rules = split(args.backup_rules)[1][:-4] + + # Copy res_xml files to src/main/res/xml + if args.res_xmls: + ensure_dir(res_xml_dir) + for xmlpath in args.res_xmls: + if not os.path.exists(xmlpath): + xmlpath = join(args.private, xmlpath) + shutil.copy(xmlpath, res_xml_dir) + # Render out android manifest: manifest_path = "src/main/AndroidManifest.xml" render_args = { "args": args, "service": service, "service_names": service_names, - "android_api": android_api + "android_api": android_api, + "debug": "debug" in args.build_mode, + "native_services": args.native_services } if get_bootstrap_name() == "sdl2": render_args["url_scheme"] = url_scheme @@ -482,9 +560,17 @@ main.py that loads it.''') aars=aars, jars=jars, android_api=android_api, - build_tools_version=build_tools_version + build_tools_version=build_tools_version, + debug_build="debug" in args.build_mode, + is_library=(get_bootstrap_name() == 'service_library'), ) + # gradle properties + render( + 'gradle.tmpl.properties', + 'gradle.properties', + args=args) + # ant build templates render( 'build.tmpl.xml', @@ -493,9 +579,18 @@ main.py that loads it.''') versioned_name=versioned_name) # String resources: + timestamp = time.time() + if 'SOURCE_DATE_EPOCH' in environ: + # for reproducible builds + timestamp = int(environ['SOURCE_DATE_EPOCH']) + private_version = "{} {} {}".format( + args.version, + args.numeric_version, + timestamp + ) render_args = { "args": args, - "private_version": str(time.time()) + "private_version": hashlib.sha1(private_version.encode()).hexdigest() } if get_bootstrap_name() == "sdl2": render_args["url_scheme"] = url_scheme @@ -527,27 +622,31 @@ main.py that loads it.''') for patch_name in os.listdir(join('src', 'patches')): patch_path = join('src', 'patches', patch_name) print("Applying patch: " + str(patch_path)) + + # -N: insist this is FORWARD patch, don't reverse apply + # -p1: strip first path component + # -t: batch mode, don't ask questions + patch_command = ["patch", "-N", "-p1", "-t", "-i", patch_path] + try: - subprocess.check_output([ - # -N: insist this is FORWARd patch, don't reverse apply - # -p1: strip first path component - # -t: batch mode, don't ask questions - "patch", "-N", "-p1", "-t", "-i", patch_path - ]) + # Use a dry run to establish whether the patch is already applied. + # If we don't check this, the patch may be partially applied (which is bad!) + subprocess.check_output(patch_command + ["--dry-run"]) except subprocess.CalledProcessError as e: if e.returncode == 1: - # Return code 1 means it didn't apply, this will - # usually mean it is already applied. - print("Warning: failed to apply patch (" + - "exit code 1), " + - "assuming it is already applied: " + - str(patch_path) - ) + # Return code 1 means not all hunks could be applied, this usually + # means the patch is already applied. + print("Warning: failed to apply patch (exit code 1), " + "assuming it is already applied: ", + str(patch_path)) else: raise e + else: + # The dry run worked, so do the real thing + subprocess.check_output(patch_command) -def parse_args(args=None): +def parse_args_and_make_package(args=None): global BLACKLIST_PATTERNS, WHITELIST_PATTERNS, PYTHON # Get the default minsdk, equal to the NDK API that this dist is built against @@ -602,16 +701,36 @@ tools directory of the Android SDK. help='Custom key=value to add in application metadata') ap.add_argument('--uses-library', dest='android_used_libs', action='append', default=[], help='Used shared libraries included using tag in AndroidManifest.xml') + ap.add_argument('--asset', dest='assets', + action="append", default=[], + metavar="/path/to/source:dest", + help='Put this in the assets folder at assets/dest') + ap.add_argument('--resource', dest='resources', + action="append", default=[], + metavar="/path/to/source:kind/asset", + help='Put this in the res folder at res/kind') ap.add_argument('--icon', dest='icon', help=('A png file to use as the icon for ' 'the application.')) + ap.add_argument('--icon-fg', dest='icon_fg', + help=('A png file to use as the foreground of the adaptive icon ' + 'for the application.')) + ap.add_argument('--icon-bg', dest='icon_bg', + help=('A png file to use as the background of the adaptive icon ' + 'for the application.')) ap.add_argument('--service', dest='services', action='append', default=[], help='Declare a new service entrypoint: ' 'NAME:PATH_TO_PY[:foreground]') + ap.add_argument('--native-service', dest='native_services', action='append', default=[], + help='Declare a new native service: ' + 'package.name.service') if get_bootstrap_name() != "service_only": ap.add_argument('--presplash', dest='presplash', help=('A jpeg file to use as a screen while the ' 'application is loading.')) + ap.add_argument('--presplash-lottie', dest='presplash_lottie', + help=('A lottie (json) file to use as an animation while the ' + 'application is loading.')) ap.add_argument('--presplash-color', dest='presplash_color', default='#000000', @@ -636,6 +755,28 @@ tools directory of the Android SDK. 'https://developer.android.com/guide/' 'topics/manifest/' 'activity-element.html')) + + ap.add_argument('--enable-androidx', dest='enable_androidx', + action='store_true', + help=('Enable the AndroidX support library, ' + 'requires api = 28 or greater')) + ap.add_argument('--android-entrypoint', dest='android_entrypoint', + default=DEFAULT_PYTHON_ACTIVITY_JAVA_CLASS, + help='Defines which java class will be used for startup, usually a subclass of PythonActivity') + ap.add_argument('--android-apptheme', dest='android_apptheme', + default='@android:style/Theme.NoTitleBar', + help='Defines which app theme should be selected for the main activity') + ap.add_argument('--add-compile-option', dest='compile_options', default=[], + action='append', help='add compile options to gradle.build') + ap.add_argument('--add-gradle-repository', dest='gradle_repositories', + default=[], + action='append', + help='Ddd a repository for gradle') + ap.add_argument('--add-packaging-option', dest='packaging_options', + default=[], + action='append', + help='Dndroid packaging options') + ap.add_argument('--wakelock', dest='wakelock', action='store_true', help=('Indicate if the application needs the device ' 'to stay on')) @@ -647,6 +788,13 @@ tools directory of the Android SDK. default=join(curdir, 'whitelist.txt'), help=('Use a whitelist file to prevent blacklisting of ' 'file in the final APK')) + ap.add_argument('--release', dest='build_mode', action='store_const', + const='release', default='debug', + help='Build your app as a non-debug release build. ' + '(Disables gdb debugging among other things)') + ap.add_argument('--with-debug-symbols', dest='with_debug_symbols', + action='store_const', const=True, default=False, + help='Will keep debug symbols from `.so` files.') ap.add_argument('--add-jar', dest='add_jar', action='append', help=('Add a Java .jar to the libs, so you can access its ' 'classes with pyjnius. You can specify this ' @@ -674,6 +822,8 @@ tools directory of the Android SDK. 'filename containing xml. The filename should be ' 'located relative to the python-for-android ' 'directory')) + ap.add_argument('--res_xml', dest='res_xmls', action='append', default=[], + help='Add files to res/xml directory (for example device-filters)', nargs='+') ap.add_argument('--with-billing', dest='billing_pubkey', help='If set, the billing service will be added (not implemented)') ap.add_argument('--add-source', dest='extra_source_dirs', action='append', @@ -685,8 +835,6 @@ tools directory of the Android SDK. ap.add_argument('--try-system-python-compile', dest='try_system_python_compile', action='store_true', help='Use the system python during compileall if possible.') - ap.add_argument('--no-compile-pyo', dest='no_compile_pyo', action='store_true', - help='Do not optimise .py files to .pyo.') ap.add_argument('--sign', action='store_true', help=('Try to sign the APK with your credentials. You must set ' 'the appropriate environment variables.')) @@ -698,10 +846,33 @@ tools directory of the Android SDK. help='Set the launch mode of the main activity in the manifest.') ap.add_argument('--allow-backup', dest='allow_backup', default='true', help="if set to 'false', then android won't backup the application.") + ap.add_argument('--backup-rules', dest='backup_rules', default='', + help=('Backup rules for Android Auto Backup. Argument is a ' + 'filename containing xml. The filename should be ' + 'located relative to the private directory containing your source code ' + 'files (containing your main.py entrypoint). ' + 'See https://developer.android.com/guide/topics/data/' + 'autobackup#IncludingFiles for more information')) + ap.add_argument('--no-byte-compile-python', dest='byte_compile_python', + action='store_false', default=True, + help='Skip byte compile for .py files.') ap.add_argument('--no-optimize-python', dest='optimize_python', action='store_false', default=True, - help=('Whether to compile to optimised .pyo files, using -OO ' + help=('Whether to compile to optimised .pyc files, using -OO ' '(strips docstrings and asserts)')) + ap.add_argument('--extra-manifest-xml', default='', + help=('Extra xml to write directly inside the element of' + 'AndroidManifest.xml')) + ap.add_argument('--extra-manifest-application-arguments', default='', + help='Extra arguments to be added to the tag of' + 'AndroidManifest.xml') + ap.add_argument('--manifest-placeholders', dest='manifest_placeholders', + default='[:]', help=('Inject build variables into the manifest ' + 'via the manifestPlaceholders property')) + ap.add_argument('--service-class-name', dest='service_class_name', default=DEFAULT_PYTHON_SERVICE_JAVA_CLASS, + help='Use that parameter if you need to implement your own PythonServive Java class') + ap.add_argument('--activity-class-name', dest='activity_class_name', default=DEFAULT_PYTHON_ACTIVITY_JAVA_CLASS, + help='The full java class name of the main activity') # Put together arguments, and add those from .p4a config file: if args is None: @@ -721,7 +892,6 @@ tools directory of the Android SDK. _read_configuration() args = ap.parse_args(args) - args.ignore_path = [] if args.name and args.name[0] == '"' and args.name[-1] == '"': args.name = args.name[1:-1] @@ -751,21 +921,19 @@ tools directory of the Android SDK. if args.permissions and isinstance(args.permissions[0], list): args.permissions = [p for perm in args.permissions for p in perm] + if args.res_xmls and isinstance(args.res_xmls[0], list): + args.res_xmls = [x for res in args.res_xmls for x in res] + if args.try_system_python_compile: # Hardcoding python2.7 is okay for now, as python3 skips the # compilation anyway - if not exists('crystax_python'): - python_executable = 'python2.7' - try: - subprocess.call([python_executable, '--version']) - except (OSError, subprocess.CalledProcessError): - pass - else: - PYTHON = python_executable - - if args.no_compile_pyo: - PYTHON = None - BLACKLIST_PATTERNS.remove('*.py') + python_executable = 'python2.7' + try: + subprocess.call([python_executable, '--version']) + except (OSError, subprocess.CalledProcessError): + pass + else: + PYTHON = python_executable if args.blacklist: with open(args.blacklist) as fd: @@ -791,4 +959,4 @@ tools directory of the Android SDK. if __name__ == "__main__": - parse_args() + parse_args_and_make_package() diff --git a/p4a/pythonforandroid/bootstraps/common/build/gradle/wrapper/gradle-wrapper.properties b/p4a/pythonforandroid/bootstraps/common/build/gradle/wrapper/gradle-wrapper.properties index efc019a..dd012b8 100644 --- a/p4a/pythonforandroid/bootstraps/common/build/gradle/wrapper/gradle-wrapper.properties +++ b/p4a/pythonforandroid/bootstraps/common/build/gradle/wrapper/gradle-wrapper.properties @@ -3,4 +3,4 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-4.4-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-7.4.1-all.zip diff --git a/p4a/pythonforandroid/bootstraps/common/build/jni/application/src/Android.mk b/p4a/pythonforandroid/bootstraps/common/build/jni/application/src/Android.mk index 4a442ee..fb2b177 100644 --- a/p4a/pythonforandroid/bootstraps/common/build/jni/application/src/Android.mk +++ b/p4a/pythonforandroid/bootstraps/common/build/jni/application/src/Android.mk @@ -21,7 +21,3 @@ LOCAL_LDLIBS := -lGLESv1_CM -lGLESv2 -llog $(EXTRA_LDLIBS) LOCAL_LDFLAGS += -L$(PYTHON_LINK_ROOT) $(APPLICATION_ADDITIONAL_LDFLAGS) include $(BUILD_SHARED_LIBRARY) - -ifdef CRYSTAX_PYTHON_VERSION - $(call import-module,python/$(CRYSTAX_PYTHON_VERSION)) -endif diff --git a/p4a/pythonforandroid/bootstraps/common/build/jni/application/src/start.c b/p4a/pythonforandroid/bootstraps/common/build/jni/application/src/start.c index 3429118..bad5218 100644 --- a/p4a/pythonforandroid/bootstraps/common/build/jni/application/src/start.c +++ b/p4a/pythonforandroid/bootstraps/common/build/jni/application/src/start.c @@ -15,15 +15,11 @@ #include #include "bootstrap_name.h" + #ifndef BOOTSTRAP_USES_NO_SDL_HEADERS #include "SDL.h" -#ifndef BOOTSTRAP_NAME_PYGAME #include "SDL_opengles2.h" #endif -#endif -#ifdef BOOTSTRAP_NAME_PYGAME -#include "jniwrapperstuff.h" -#endif #include "android/log.h" #define ENTRYPOINT_MAXLEN 128 @@ -169,26 +165,14 @@ int main(int argc, char *argv[]) { // Set up the python path char paths[256]; - char crystax_python_dir[256]; - snprintf(crystax_python_dir, 256, - "%s/crystax_python", getenv("ANDROID_UNPACK")); char python_bundle_dir[256]; snprintf(python_bundle_dir, 256, "%s/_python_bundle", getenv("ANDROID_UNPACK")); - if (dir_exists(crystax_python_dir) || dir_exists(python_bundle_dir)) { - if (dir_exists(crystax_python_dir)) { - LOGP("crystax_python exists"); - snprintf(paths, 256, - "%s/stdlib.zip:%s/modules", - crystax_python_dir, crystax_python_dir); - } - - if (dir_exists(python_bundle_dir)) { - LOGP("_python_bundle dir exists"); - snprintf(paths, 256, - "%s/stdlib.zip:%s/modules", - python_bundle_dir, python_bundle_dir); - } + if (dir_exists(python_bundle_dir)) { + LOGP("_python_bundle dir exists"); + snprintf(paths, 256, + "%s/stdlib.zip:%s/modules", + python_bundle_dir, python_bundle_dir); LOGP("calculated paths to be..."); LOGP(paths); @@ -200,24 +184,11 @@ int main(int argc, char *argv[]) { LOGP("set wchar paths..."); } else { - // We do not expect to see crystax_python any more, so no point - // reminding the user about it. If it does exist, we'll have - // logged it earlier. - LOGP("_python_bundle does not exist"); + LOGP("_python_bundle does not exist...this not looks good, all python" + " recipes should have this folder, should we expect a crash soon?"); } Py_Initialize(); - -#if PY_MAJOR_VERSION < 3 - // Can't Py_SetPath in python2 but we can set PySys_SetPath, which must - // be applied after Py_Initialize rather than before like Py_SetPath - #if PY_MICRO_VERSION >= 15 - // Only for python native-build - PySys_SetPath(paths); - #endif - PySys_SetArgv(argc, argv); -#endif - LOGP("Initialized python"); /* ensure threads will work. @@ -236,34 +207,8 @@ int main(int argc, char *argv[]) { * replace sys.path with our path */ PyRun_SimpleString("import sys, posix\n"); - if (dir_exists("lib")) { - /* If we built our own python, set up the paths correctly. - * This is only the case if we are using the python2legacy recipe - */ - LOGP("Setting up python from ANDROID_APP_PATH"); - PyRun_SimpleString("private = posix.environ['ANDROID_APP_PATH']\n" - "argument = posix.environ['ANDROID_ARGUMENT']\n" - "sys.path[:] = [ \n" - " private + '/lib/python27.zip', \n" - " private + '/lib/python2.7/', \n" - " private + '/lib/python2.7/lib-dynload/', \n" - " private + '/lib/python2.7/site-packages/', \n" - " argument ]\n"); - } char add_site_packages_dir[256]; - if (dir_exists(crystax_python_dir)) { - snprintf(add_site_packages_dir, 256, - "sys.path.append('%s/site-packages')", - crystax_python_dir); - - PyRun_SimpleString("import sys\n" - "sys.argv = ['notaninterpreterreally']\n" - "from os.path import realpath, join, dirname"); - PyRun_SimpleString(add_site_packages_dir); - /* "sys.path.append(join(dirname(realpath(__file__)), 'site-packages'))") */ - PyRun_SimpleString("sys.path = ['.'] + sys.path"); - } if (dir_exists(python_bundle_dir)) { snprintf(add_site_packages_dir, 256, @@ -281,13 +226,13 @@ int main(int argc, char *argv[]) { PyRun_SimpleString( "class LogFile(object):\n" " def __init__(self):\n" - " self.buffer = ''\n" + " self.__buffer = ''\n" " def write(self, s):\n" - " s = self.buffer + s\n" - " lines = s.split(\"\\n\")\n" + " s = self.__buffer + s\n" + " lines = s.split('\\n')\n" " for l in lines[:-1]:\n" - " androidembed.log(l)\n" - " self.buffer = lines[-1]\n" + " androidembed.log(l.replace('\\x00', ''))\n" + " self.__buffer = lines[-1]\n" " def flush(self):\n" " return\n" "sys.stdout = sys.stderr = LogFile()\n" @@ -306,14 +251,10 @@ int main(int argc, char *argv[]) { */ LOGP("Run user program, change dir and execute entrypoint"); - /* Get the entrypoint, search the .pyo then .py + /* Get the entrypoint, search the .pyc then .py */ char *dot = strrchr(env_entrypoint, '.'); -#if PY_MAJOR_VERSION > 2 char *ext = ".pyc"; -#else - char *ext = ".pyo"; -#endif if (dot <= 0) { LOGP("Invalid entrypoint, abort."); return -1; @@ -329,21 +270,17 @@ int main(int argc, char *argv[]) { entrypoint[strlen(env_entrypoint) - 1] = '\0'; LOGP(entrypoint); if (!file_exists(entrypoint)) { - LOGP("Entrypoint not found (.pyc/.pyo, fallback on .py), abort"); + LOGP("Entrypoint not found (.pyc, fallback on .py), abort"); return -1; } } else { strcpy(entrypoint, env_entrypoint); } } else if (!strcmp(dot, ".py")) { - /* if .py is passed, check the pyo version first */ + /* if .py is passed, check the pyc version first */ strcpy(entrypoint, env_entrypoint); entrypoint[strlen(env_entrypoint) + 1] = '\0'; -#if PY_MAJOR_VERSION > 2 entrypoint[strlen(env_entrypoint)] = 'c'; -#else - entrypoint[strlen(env_entrypoint)] = 'o'; -#endif if (!file_exists(entrypoint)) { /* fallback on pure python version */ if (!file_exists(env_entrypoint)) { @@ -353,7 +290,7 @@ int main(int argc, char *argv[]) { strcpy(entrypoint, env_entrypoint); } } else { - LOGP("Entrypoint have an invalid extension (must be .py or .pyc/.pyo), abort."); + LOGP("Entrypoint have an invalid extension (must be .py or .pyc), abort."); return -1; } // LOGP("Entrypoint is:"); @@ -374,8 +311,7 @@ int main(int argc, char *argv[]) { ret = 1; PyErr_Print(); /* This exits with the right code if SystemExit. */ PyObject *f = PySys_GetObject("stdout"); - if (PyFile_WriteString( - "\n", f)) /* python2 used Py_FlushLine, but this no longer exists */ + if (PyFile_WriteString("\n", f)) PyErr_Clear(); } diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/Octal.java b/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/kamranzafar/jtar/Octal.java similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/Octal.java rename to p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/kamranzafar/jtar/Octal.java diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarConstants.java b/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/kamranzafar/jtar/TarConstants.java similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarConstants.java rename to p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/kamranzafar/jtar/TarConstants.java diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarEntry.java b/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/kamranzafar/jtar/TarEntry.java similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarEntry.java rename to p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/kamranzafar/jtar/TarEntry.java diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarHeader.java b/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/kamranzafar/jtar/TarHeader.java similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarHeader.java rename to p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/kamranzafar/jtar/TarHeader.java diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarInputStream.java b/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/kamranzafar/jtar/TarInputStream.java similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarInputStream.java rename to p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/kamranzafar/jtar/TarInputStream.java diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarOutputStream.java b/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/kamranzafar/jtar/TarOutputStream.java similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarOutputStream.java rename to p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/kamranzafar/jtar/TarOutputStream.java diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarUtils.java b/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/kamranzafar/jtar/TarUtils.java similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarUtils.java rename to p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/kamranzafar/jtar/TarUtils.java diff --git a/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/kivy/android/PythonService.java b/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/kivy/android/PythonService.java index 4f20fb7..dd6f307 100644 --- a/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/kivy/android/PythonService.java +++ b/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/kivy/android/PythonService.java @@ -14,10 +14,10 @@ import android.app.PendingIntent; import android.os.Process; import java.io.File; -import org.kivy.android.PythonUtil; - -import org.renpy.android.Hardware; - +//imports for channel definition +import android.app.NotificationManager; +import android.app.NotificationChannel; +import android.graphics.Color; public class PythonService extends Service implements Runnable { @@ -33,6 +33,8 @@ public class PythonService extends Service implements Runnable { private String serviceEntrypoint; // Argument to pass to Python code, private String pythonServiceArgument; + + public static PythonService mService = null; private Intent startIntent = null; @@ -42,10 +44,6 @@ public class PythonService extends Service implements Runnable { autoRestartService = restart; } - public boolean canDisplayNotification() { - return true; - } - public int startType() { return START_NOT_STICKY; } @@ -64,10 +62,15 @@ public class PythonService extends Service implements Runnable { public int onStartCommand(Intent intent, int flags, int startId) { if (pythonThread != null) { Log.v("python service", "service exists, do not start again"); - return START_NOT_STICKY; + return startType(); + } + //intent is null if OS restarts a STICKY service + if (intent == null) { + Context context = getApplicationContext(); + intent = getThisDefaultIntent(context, ""); } - startIntent = intent; + startIntent = intent; Bundle extras = intent.getExtras(); androidPrivate = extras.getString("androidPrivate"); androidArgument = extras.getString("androidArgument"); @@ -75,28 +78,38 @@ public class PythonService extends Service implements Runnable { pythonName = extras.getString("pythonName"); pythonHome = extras.getString("pythonHome"); pythonPath = extras.getString("pythonPath"); + boolean serviceStartAsForeground = ( + extras.getString("serviceStartAsForeground").equals("true") + ); pythonServiceArgument = extras.getString("pythonServiceArgument"); - pythonThread = new Thread(this); pythonThread.start(); - if (canDisplayNotification()) { + if (serviceStartAsForeground) { doStartForeground(extras); } return startType(); } + protected int getServiceId() { + return 1; + } + + protected Intent getThisDefaultIntent(Context ctx, String pythonServiceArgument) { + return null; + } + protected void doStartForeground(Bundle extras) { String serviceTitle = extras.getString("serviceTitle"); String serviceDescription = extras.getString("serviceDescription"); - Notification notification; Context context = getApplicationContext(); Intent contextIntent = new Intent(context, PythonActivity.class); PendingIntent pIntent = PendingIntent.getActivity(context, 0, contextIntent, - PendingIntent.FLAG_UPDATE_CURRENT); - if (Build.VERSION.SDK_INT < Build.VERSION_CODES.HONEYCOMB) { + PendingIntent.FLAG_IMMUTABLE | PendingIntent.FLAG_UPDATE_CURRENT); + + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) { notification = new Notification( context.getApplicationInfo().icon, serviceTitle, System.currentTimeMillis()); try { @@ -109,14 +122,26 @@ public class PythonService extends Service implements Runnable { IllegalArgumentException | InvocationTargetException e) { } } else { - Notification.Builder builder = new Notification.Builder(context); + // for android 8+ we need to create our own channel + // https://stackoverflow.com/questions/47531742/startforeground-fail-after-upgrade-to-android-8-1 + String NOTIFICATION_CHANNEL_ID = "org.kivy.p4a"; //TODO: make this configurable + String channelName = "Background Service"; //TODO: make this configurable + NotificationChannel chan = new NotificationChannel(NOTIFICATION_CHANNEL_ID, channelName, + NotificationManager.IMPORTANCE_NONE); + + chan.setLightColor(Color.BLUE); + chan.setLockscreenVisibility(Notification.VISIBILITY_PRIVATE); + NotificationManager manager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE); + manager.createNotificationChannel(chan); + + Notification.Builder builder = new Notification.Builder(context, NOTIFICATION_CHANNEL_ID); builder.setContentTitle(serviceTitle); builder.setContentText(serviceDescription); builder.setContentIntent(pIntent); builder.setSmallIcon(context.getApplicationInfo().icon); notification = builder.build(); } - startForeground(1, notification); + startForeground(getServiceId(), notification); } @Override @@ -137,7 +162,10 @@ public class PythonService extends Service implements Runnable { @Override public void onTaskRemoved(Intent rootIntent) { super.onTaskRemoved(rootIntent); - stopSelf(); + //sticky servcie runtime/restart is managed by the OS. leave it running when app is closed + if (startType() != START_STICKY) { + stopSelf(); + } } @Override diff --git a/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/kivy/android/PythonUtil.java b/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/kivy/android/PythonUtil.java index 1f26738..2d6ca9f 100644 --- a/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/kivy/android/PythonUtil.java +++ b/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/kivy/android/PythonUtil.java @@ -1,12 +1,20 @@ package org.kivy.android; +import java.io.InputStream; +import java.io.FileInputStream; +import java.io.FileOutputStream; import java.io.File; +import android.app.Activity; +import android.content.Context; +import android.content.res.Resources; import android.util.Log; +import android.widget.Toast; + import java.util.ArrayList; -import java.io.FilenameFilter; import java.util.regex.Pattern; +import org.renpy.android.AssetExtract; public class PythonUtil { private static final String TAG = "pythonutil"; @@ -32,21 +40,25 @@ public class PythonUtil { protected static ArrayList getLibraries(File libsDir) { ArrayList libsList = new ArrayList(); - addLibraryIfExists(libsList, "crystax", libsDir); addLibraryIfExists(libsList, "sqlite3", libsDir); addLibraryIfExists(libsList, "ffi", libsDir); + addLibraryIfExists(libsList, "png16", libsDir); addLibraryIfExists(libsList, "ssl.*", libsDir); addLibraryIfExists(libsList, "crypto.*", libsDir); - libsList.add("python2.7"); + addLibraryIfExists(libsList, "SDL2", libsDir); + addLibraryIfExists(libsList, "SDL2_image", libsDir); + addLibraryIfExists(libsList, "SDL2_mixer", libsDir); + addLibraryIfExists(libsList, "SDL2_ttf", libsDir); libsList.add("python3.5m"); libsList.add("python3.6m"); libsList.add("python3.7m"); + libsList.add("python3.8"); + libsList.add("python3.9"); libsList.add("main"); return libsList; } public static void loadLibraries(File filesDir, File libsDir) { - String filesDirPath = filesDir.getAbsolutePath(); boolean foundPython = false; for (String lib : getLibraries(libsDir)) { @@ -61,8 +73,8 @@ public class PythonUtil { // load, and it has failed, give a more // general error Log.v(TAG, "Library loading error: " + e.getMessage()); - if (lib.startsWith("python3.7") && !foundPython) { - throw new java.lang.RuntimeException("Could not load any libpythonXXX.so"); + if (lib.startsWith("python3.9") && !foundPython) { + throw new RuntimeException("Could not load any libpythonXXX.so"); } else if (lib.startsWith("python")) { continue; } else { @@ -73,5 +85,174 @@ public class PythonUtil { } Log.v(TAG, "Loaded everything!"); - } + } + + public static String getAppRoot(Context ctx) { + String appRoot = ctx.getFilesDir().getAbsolutePath() + "/app"; + return appRoot; + } + + public static String getResourceString(Context ctx, String name) { + // Taken from org.renpy.android.ResourceManager + Resources res = ctx.getResources(); + int id = res.getIdentifier(name, "string", ctx.getPackageName()); + return res.getString(id); + } + + /** + * Show an error using a toast. (Only makes sense from non-UI threads.) + */ + protected static void toastError(final Activity activity, final String msg) { + activity.runOnUiThread(new Runnable () { + public void run() { + Toast.makeText(activity, msg, Toast.LENGTH_LONG).show(); + } + }); + + // Wait to show the error. + synchronized (activity) { + try { + activity.wait(1000); + } catch (InterruptedException e) { + } + } + } + + protected static void recursiveDelete(File f) { + if (f.isDirectory()) { + for (File r : f.listFiles()) { + recursiveDelete(r); + } + } + f.delete(); + } + + public static void unpackAsset( + Context ctx, + final String resource, + File target, + boolean cleanup_on_version_update) { + + Log.v(TAG, "Unpacking " + resource + " " + target.getName()); + + // The version of data in memory and on disk. + String dataVersion = getResourceString(ctx, resource + "_version"); + String diskVersion = null; + + Log.v(TAG, "Data version is " + dataVersion); + + // If no version, no unpacking is necessary. + if (dataVersion == null) { + return; + } + + // Check the current disk version, if any. + String filesDir = target.getAbsolutePath(); + String diskVersionFn = filesDir + "/" + resource + ".version"; + + try { + byte buf[] = new byte[64]; + InputStream is = new FileInputStream(diskVersionFn); + int len = is.read(buf); + diskVersion = new String(buf, 0, len); + is.close(); + } catch (Exception e) { + diskVersion = ""; + } + + // If the disk data is out of date, extract it and write the version file. + if (! dataVersion.equals(diskVersion)) { + Log.v(TAG, "Extracting " + resource + " assets."); + + if (cleanup_on_version_update) { + recursiveDelete(target); + } + target.mkdirs(); + + AssetExtract ae = new AssetExtract(ctx); + if (!ae.extractTar(resource + ".tar", target.getAbsolutePath(), "private")) { + String msg = "Could not extract " + resource + " data."; + if (ctx instanceof Activity) { + toastError((Activity)ctx, msg); + } else { + Log.v(TAG, msg); + } + } + + try { + // Write .nomedia. + new File(target, ".nomedia").createNewFile(); + + // Write version file. + FileOutputStream os = new FileOutputStream(diskVersionFn); + os.write(dataVersion.getBytes()); + os.close(); + } catch (Exception e) { + Log.w(TAG, e); + } + } + } + + public static void unpackPyBundle( + Context ctx, + final String resource, + File target, + boolean cleanup_on_version_update) { + + Log.v(TAG, "Unpacking " + resource + " " + target.getName()); + + // The version of data in memory and on disk. + String dataVersion = getResourceString(ctx, "private_version"); + String diskVersion = null; + + Log.v(TAG, "Data version is " + dataVersion); + + // If no version, no unpacking is necessary. + if (dataVersion == null) { + return; + } + + // Check the current disk version, if any. + String filesDir = target.getAbsolutePath(); + String diskVersionFn = filesDir + "/" + "libpybundle" + ".version"; + + try { + byte buf[] = new byte[64]; + InputStream is = new FileInputStream(diskVersionFn); + int len = is.read(buf); + diskVersion = new String(buf, 0, len); + is.close(); + } catch (Exception e) { + diskVersion = ""; + } + + if (! dataVersion.equals(diskVersion)) { + // If the disk data is out of date, extract it and write the version file. + Log.v(TAG, "Extracting " + resource + " assets."); + + if (cleanup_on_version_update) { + recursiveDelete(target); + } + target.mkdirs(); + + AssetExtract ae = new AssetExtract(ctx); + if (!ae.extractTar(resource + ".so", target.getAbsolutePath(), "pybundle")) { + String msg = "Could not extract " + resource + " data."; + if (ctx instanceof Activity) { + toastError((Activity)ctx, msg); + } else { + Log.v(TAG, msg); + } + } + + try { + // Write version file. + FileOutputStream os = new FileOutputStream(diskVersionFn); + os.write(dataVersion.getBytes()); + os.close(); + } catch (Exception e) { + Log.w(TAG, e); + } + } + } } diff --git a/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/renpy/android/AssetExtract.java b/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/renpy/android/AssetExtract.java index 52d6424..0a5dda6 100644 --- a/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/renpy/android/AssetExtract.java +++ b/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/renpy/android/AssetExtract.java @@ -2,36 +2,34 @@ // spaces amount package org.renpy.android; -import java.io.*; - -import android.app.Activity; +import android.content.Context; import android.util.Log; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.IOException; import java.io.InputStream; -import java.io.FileInputStream; +import java.io.OutputStream; import java.io.FileOutputStream; +import java.io.FileNotFoundException; import java.io.File; +import java.io.FileInputStream; import java.util.zip.GZIPInputStream; import android.content.res.AssetManager; - -import org.kamranzafar.jtar.*; +import org.kamranzafar.jtar.TarEntry; +import org.kamranzafar.jtar.TarInputStream; public class AssetExtract { private AssetManager mAssetManager = null; - private Activity mActivity = null; - public AssetExtract(Activity act) { - mActivity = act; - mAssetManager = act.getAssets(); + public AssetExtract(Context context) { + mAssetManager = context.getAssets(); } - public boolean extractTar(String asset, String target) { + public boolean extractTar(String asset, String target, String method) { byte buf[] = new byte[1024 * 1024]; @@ -39,7 +37,12 @@ public class AssetExtract { TarInputStream tis = null; try { - assetStream = mAssetManager.open(asset, AssetManager.ACCESS_STREAMING); + if(method == "private"){ + assetStream = mAssetManager.open(asset, AssetManager.ACCESS_STREAMING); + } else if (method == "pybundle") { + assetStream = new FileInputStream(asset); + } + tis = new TarInputStream(new BufferedInputStream(new GZIPInputStream(new BufferedInputStream(assetStream, 8192)), 8192)); } catch (IOException e) { Log.e("python", "opening up extract tar", e); @@ -51,7 +54,7 @@ public class AssetExtract { try { entry = tis.getNextEntry(); - } catch ( java.io.IOException e ) { + } catch ( IOException e ) { Log.e("python", "extracting tar", e); return false; } @@ -76,8 +79,7 @@ public class AssetExtract { try { out = new BufferedOutputStream(new FileOutputStream(path), 8192); - } catch ( FileNotFoundException e ) { - } catch ( SecurityException e ) { }; + } catch ( FileNotFoundException | SecurityException e ) {} if ( out == null ) { Log.e("python", "could not open " + path); @@ -97,7 +99,7 @@ public class AssetExtract { out.flush(); out.close(); - } catch ( java.io.IOException e ) { + } catch ( IOException e ) { Log.e("python", "extracting zip", e); return false; } diff --git a/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/renpy/android/Hardware.java b/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/renpy/android/Hardware.java new file mode 100644 index 0000000..8475762 --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/renpy/android/Hardware.java @@ -0,0 +1,279 @@ +package org.renpy.android; + +import android.content.Context; +import android.os.Vibrator; +import android.hardware.Sensor; +import android.hardware.SensorEvent; +import android.hardware.SensorEventListener; +import android.hardware.SensorManager; +import android.util.DisplayMetrics; +import android.view.inputmethod.InputMethodManager; +import android.view.View; + +import java.util.List; +import android.net.wifi.ScanResult; +import android.net.wifi.WifiManager; +import android.content.BroadcastReceiver; +import android.content.Intent; +import android.content.IntentFilter; +import android.net.ConnectivityManager; +import android.net.NetworkInfo; + +import org.kivy.android.PythonActivity; + +/** + * Methods that are expected to be called via JNI, to access the + * device's non-screen hardware. (For example, the vibration and + * accelerometer.) + */ +public class Hardware { + + // The context. + static Context context; + static View view; + public static final float defaultRv[] = { 0f, 0f, 0f }; + + /** + * Vibrate for s seconds. + */ + public static void vibrate(double s) { + Vibrator v = (Vibrator) context.getSystemService(Context.VIBRATOR_SERVICE); + if (v != null) { + v.vibrate((int) (1000 * s)); + } + } + + /** + * Get an Overview of all Hardware Sensors of an Android Device + */ + public static String getHardwareSensors() { + SensorManager sm = (SensorManager) context.getSystemService(Context.SENSOR_SERVICE); + List allSensors = sm.getSensorList(Sensor.TYPE_ALL); + + if (allSensors != null) { + String resultString = ""; + for (Sensor s : allSensors) { + resultString += String.format("Name=" + s.getName()); + resultString += String.format(",Vendor=" + s.getVendor()); + resultString += String.format(",Version=" + s.getVersion()); + resultString += String.format(",MaximumRange=" + s.getMaximumRange()); + // XXX MinDelay is not in the 2.2 + //resultString += String.format(",MinDelay=" + s.getMinDelay()); + resultString += String.format(",Power=" + s.getPower()); + resultString += String.format(",Type=" + s.getType() + "\n"); + } + return resultString; + } + return ""; + } + + + /** + * Get Access to 3 Axis Hardware Sensors Accelerometer, Orientation and Magnetic Field Sensors + */ + public static class generic3AxisSensor implements SensorEventListener { + private final SensorManager sSensorManager; + private final Sensor sSensor; + private final int sSensorType; + SensorEvent sSensorEvent; + + public generic3AxisSensor(int sensorType) { + sSensorType = sensorType; + sSensorManager = (SensorManager)context.getSystemService(Context.SENSOR_SERVICE); + sSensor = sSensorManager.getDefaultSensor(sSensorType); + } + + public void onAccuracyChanged(Sensor sensor, int accuracy) { + } + + public void onSensorChanged(SensorEvent event) { + sSensorEvent = event; + } + + /** + * Enable or disable the Sensor by registering/unregistering + */ + public void changeStatus(boolean enable) { + if (enable) { + sSensorManager.registerListener(this, sSensor, SensorManager.SENSOR_DELAY_NORMAL); + } else { + sSensorManager.unregisterListener(this, sSensor); + } + } + + /** + * Read the Sensor + */ + public float[] readSensor() { + if (sSensorEvent != null) { + return sSensorEvent.values; + } else { + return defaultRv; + } + } + } + + public static generic3AxisSensor accelerometerSensor = null; + public static generic3AxisSensor orientationSensor = null; + public static generic3AxisSensor magneticFieldSensor = null; + + /** + * functions for backward compatibility reasons + */ + + public static void accelerometerEnable(boolean enable) { + if ( accelerometerSensor == null ) + accelerometerSensor = new generic3AxisSensor(Sensor.TYPE_ACCELEROMETER); + accelerometerSensor.changeStatus(enable); + } + public static float[] accelerometerReading() { + if ( accelerometerSensor == null ) + return defaultRv; + return (float[]) accelerometerSensor.readSensor(); + } + public static void orientationSensorEnable(boolean enable) { + if ( orientationSensor == null ) + orientationSensor = new generic3AxisSensor(Sensor.TYPE_ORIENTATION); + orientationSensor.changeStatus(enable); + } + public static float[] orientationSensorReading() { + if ( orientationSensor == null ) + return defaultRv; + return (float[]) orientationSensor.readSensor(); + } + public static void magneticFieldSensorEnable(boolean enable) { + if ( magneticFieldSensor == null ) + magneticFieldSensor = new generic3AxisSensor(Sensor.TYPE_MAGNETIC_FIELD); + magneticFieldSensor.changeStatus(enable); + } + public static float[] magneticFieldSensorReading() { + if ( magneticFieldSensor == null ) + return defaultRv; + return (float[]) magneticFieldSensor.readSensor(); + } + + static public DisplayMetrics metrics = new DisplayMetrics(); + + /** + * Get display DPI. + */ + public static int getDPI() { + // AND: Shouldn't have to get the metrics like this every time... + PythonActivity.mActivity.getWindowManager().getDefaultDisplay().getMetrics(metrics); + return metrics.densityDpi; + } + + // /** + // * Show the soft keyboard. + // */ + // public static void showKeyboard(int input_type) { + // //Log.i("python", "hardware.Java show_keyword " input_type); + + // InputMethodManager imm = (InputMethodManager) context.getSystemService(Context.INPUT_METHOD_SERVICE); + + // SDLSurfaceView vw = (SDLSurfaceView) view; + + // int inputType = input_type; + + // if (vw.inputType != inputType){ + // vw.inputType = inputType; + // imm.restartInput(view); + // } + + // imm.showSoftInput(view, InputMethodManager.SHOW_FORCED); + // } + + /** + * Hide the soft keyboard. + */ + public static void hideKeyboard() { + InputMethodManager imm = (InputMethodManager) context.getSystemService(Context.INPUT_METHOD_SERVICE); + imm.hideSoftInputFromWindow(view.getWindowToken(), 0); + } + + /** + * Scan WiFi networks + */ + static List latestResult; + + public static void enableWifiScanner() + { + IntentFilter i = new IntentFilter(); + i.addAction(WifiManager.SCAN_RESULTS_AVAILABLE_ACTION); + + context.registerReceiver(new BroadcastReceiver() { + + @Override + public void onReceive(Context c, Intent i) { + // Code to execute when SCAN_RESULTS_AVAILABLE_ACTION event occurs + WifiManager w = (WifiManager) c.getSystemService(Context.WIFI_SERVICE); + latestResult = w.getScanResults(); // Returns a of scanResults + } + + }, i); + + } + + public static String scanWifi() { + + // Now you can call this and it should execute the broadcastReceiver's + // onReceive() + if (latestResult != null){ + + String latestResultString = ""; + for (ScanResult result : latestResult) + { + latestResultString += String.format("%s\t%s\t%d\n", result.SSID, result.BSSID, result.level); + } + + return latestResultString; + } + + return ""; + } + + /** + * network state + */ + + public static boolean network_state = false; + + /** + * Check network state directly + * + * (only one connection can be active at a given moment, detects all network type) + * + */ + public static boolean checkNetwork() + { + boolean state = false; + final ConnectivityManager conMgr = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE); + + final NetworkInfo activeNetwork = conMgr.getActiveNetworkInfo(); + if (activeNetwork != null && activeNetwork.isConnected()) { + state = true; + } else { + state = false; + } + + return state; + } + + /** + * To recieve network state changes + */ + public static void registerNetworkCheck() + { + IntentFilter i = new IntentFilter(); + i.addAction(ConnectivityManager.CONNECTIVITY_ACTION); + context.registerReceiver(new BroadcastReceiver() { + + @Override + public void onReceive(Context c, Intent i) { + network_state = checkNetwork(); + } + + }, i); + } + +} diff --git a/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/renpy/android/ResourceManager.java b/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/renpy/android/ResourceManager.java index 47455ab..a170c84 100644 --- a/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/renpy/android/ResourceManager.java +++ b/p4a/pythonforandroid/bootstraps/common/build/src/main/java/org/renpy/android/ResourceManager.java @@ -1,8 +1,7 @@ /** * This class takes care of managing resources for us. In our code, we * can't use R, since the name of the package containing R will - * change. (This same code is used in both org.renpy.android and - * org.renpy.pygame.) So this is the next best thing. + * change. So this is the next best thing. */ package org.renpy.android; diff --git a/p4a/pythonforandroid/bootstraps/common/build/templates/Service.tmpl.java b/p4a/pythonforandroid/bootstraps/common/build/templates/Service.tmpl.java index 3ed10c2..de84ac4 100644 --- a/p4a/pythonforandroid/bootstraps/common/build/templates/Service.tmpl.java +++ b/p4a/pythonforandroid/bootstraps/common/build/templates/Service.tmpl.java @@ -1,18 +1,11 @@ package {{ args.package }}; -import android.os.Build; -import java.lang.reflect.Method; -import java.lang.reflect.InvocationTargetException; import android.content.Intent; import android.content.Context; -import android.app.Notification; -import android.app.PendingIntent; -import android.os.Bundle; -import org.kivy.android.PythonService; -import org.kivy.android.PythonActivity; +import {{ args.service_class_name }}; -public class Service{{ name|capitalize }} extends PythonService { +public class Service{{ name|capitalize }} extends {{ base_service_class }} { {% if sticky %} @Override public int startType() { @@ -20,54 +13,35 @@ public class Service{{ name|capitalize }} extends PythonService { } {% endif %} - {% if not foreground %} @Override - public boolean canDisplayNotification() { - return false; - } - {% endif %} - - @Override - protected void doStartForeground(Bundle extras) { - Notification notification; - Context context = getApplicationContext(); - Intent contextIntent = new Intent(context, PythonActivity.class); - PendingIntent pIntent = PendingIntent.getActivity(context, 0, contextIntent, - PendingIntent.FLAG_UPDATE_CURRENT); - if (Build.VERSION.SDK_INT < Build.VERSION_CODES.HONEYCOMB) { - notification = new Notification( - context.getApplicationInfo().icon, "{{ args.name }}", System.currentTimeMillis()); - try { - // prevent using NotificationCompat, this saves 100kb on apk - Method func = notification.getClass().getMethod( - "setLatestEventInfo", Context.class, CharSequence.class, - CharSequence.class, PendingIntent.class); - func.invoke(notification, context, "{{ args.name }}", "{{ name| capitalize }}", pIntent); - } catch (NoSuchMethodException | IllegalAccessException | - IllegalArgumentException | InvocationTargetException e) { - } - } else { - Notification.Builder builder = new Notification.Builder(context); - builder.setContentTitle("{{ args.name }}"); - builder.setContentText("{{ name| capitalize }}"); - builder.setContentIntent(pIntent); - builder.setSmallIcon(context.getApplicationInfo().icon); - notification = builder.build(); - } - startForeground({{ service_id }}, notification); + protected int getServiceId() { + return {{ service_id }}; } static public void start(Context ctx, String pythonServiceArgument) { + Intent intent = getDefaultIntent(ctx, pythonServiceArgument); + ctx.startService(intent); + } + + static public Intent getDefaultIntent(Context ctx, String pythonServiceArgument) { Intent intent = new Intent(ctx, Service{{ name|capitalize }}.class); String argument = ctx.getFilesDir().getAbsolutePath() + "/app"; intent.putExtra("androidPrivate", ctx.getFilesDir().getAbsolutePath()); intent.putExtra("androidArgument", argument); + intent.putExtra("serviceTitle", "{{ args.name }}"); + intent.putExtra("serviceDescription", "{{ name|capitalize }}"); intent.putExtra("serviceEntrypoint", "{{ entrypoint }}"); intent.putExtra("pythonName", "{{ name }}"); + intent.putExtra("serviceStartAsForeground", "{{ foreground|lower }}"); intent.putExtra("pythonHome", argument); intent.putExtra("pythonPath", argument + ":" + argument + "/lib"); intent.putExtra("pythonServiceArgument", pythonServiceArgument); - ctx.startService(intent); + return intent; + } + + @Override + protected Intent getThisDefaultIntent(Context ctx, String pythonServiceArgument) { + return Service{{ name|capitalize }}.getDefaultIntent(ctx, pythonServiceArgument); } static public void stop(Context ctx) { diff --git a/p4a/pythonforandroid/bootstraps/common/build/templates/build.tmpl.gradle b/p4a/pythonforandroid/bootstraps/common/build/templates/build.tmpl.gradle index 32bd091..bb00039 100644 --- a/p4a/pythonforandroid/bootstraps/common/build/templates/build.tmpl.gradle +++ b/p4a/pythonforandroid/bootstraps/common/build/templates/build.tmpl.gradle @@ -5,7 +5,7 @@ buildscript { jcenter() } dependencies { - classpath 'com.android.tools.build:gradle:3.1.4' + classpath 'com.android.tools.build:gradle:7.1.2' } } @@ -13,23 +13,45 @@ allprojects { repositories { google() jcenter() - flatDir { - dirs 'libs' - } + {%- for repo in args.gradle_repositories %} + {{repo}} + {%- endfor %} + flatDir { + dirs 'libs' + } } } +{% if is_library %} +apply plugin: 'com.android.library' +{% else %} apply plugin: 'com.android.application' +{% endif %} android { - compileSdkVersion {{ android_api }} - buildToolsVersion '{{ build_tools_version }}' - defaultConfig { - minSdkVersion {{ args.min_sdk_version }} - targetSdkVersion {{ android_api }} - versionCode {{ args.numeric_version }} - versionName '{{ args.version }}' + compileSdkVersion {{ android_api }} + buildToolsVersion '{{ build_tools_version }}' + defaultConfig { + minSdkVersion {{ args.min_sdk_version }} + targetSdkVersion {{ android_api }} + versionCode {{ args.numeric_version }} + versionName '{{ args.version }}' + manifestPlaceholders = {{ args.manifest_placeholders}} + } + + + packagingOptions { + jniLibs { + useLegacyPackaging = true + } + {% if debug_build -%} + doNotStrip '**/*.so' + {% else %} + exclude 'lib/**/gdbserver' + exclude 'lib/**/gdb.setup' + {%- endif %} } + {% if args.sign -%} signingConfigs { @@ -40,41 +62,73 @@ android { keyPassword System.getenv("P4A_RELEASE_KEYALIAS_PASSWD") } } + {%- endif %} - buildTypes { - debug { - } - release { - {% if args.sign -%} - signingConfig signingConfigs.release - {%- endif %} - } - } + {% if args.packaging_options -%} + packagingOptions { + {%- for option in args.packaging_options %} + {{option}} + {%- endfor %} + } + {%- endif %} + + buildTypes { + debug { + } + release { + {% if args.sign -%} + signingConfig signingConfigs.release + {%- endif %} + } + } compileOptions { + {% if args.enable_androidx %} + sourceCompatibility JavaVersion.VERSION_1_8 + targetCompatibility JavaVersion.VERSION_1_8 + {% else %} sourceCompatibility JavaVersion.VERSION_1_7 targetCompatibility JavaVersion.VERSION_1_7 + {% endif %} + {%- for option in args.compile_options %} + {{option}} + {%- endfor %} } sourceSets { main { jniLibs.srcDir 'libs' + java { + + {%- for adir, pattern in args.extra_source_dirs -%} + srcDir '{{adir}}' + {%- endfor -%} + } + } + } + + aaptOptions { + noCompress "tflite" } } dependencies { - {%- for aar in aars %} - compile(name: '{{ aar }}', ext: 'aar') - {%- endfor -%} - {%- for jar in jars %} - compile files('src/main/libs/{{ jar }}') - {%- endfor -%} - {%- if args.depends -%} - {%- for depend in args.depends %} - compile '{{ depend }}' - {%- endfor %} - {%- endif %} + {%- for aar in aars %} + implementation(name: '{{ aar }}', ext: 'aar') + {%- endfor -%} + {%- for jar in jars %} + implementation files('src/main/libs/{{ jar }}') + {%- endfor -%} + {%- if args.depends -%} + {%- for depend in args.depends %} + implementation '{{ depend }}' + {%- endfor %} + {%- endif %} + {% if args.presplash_lottie %} + implementation 'com.airbnb.android:lottie:3.4.0' + {%- endif %} } + diff --git a/p4a/pythonforandroid/bootstraps/common/build/templates/gradle.tmpl.properties b/p4a/pythonforandroid/bootstraps/common/build/templates/gradle.tmpl.properties new file mode 100644 index 0000000..f99dd5a --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/common/build/templates/gradle.tmpl.properties @@ -0,0 +1,4 @@ +{% if args.enable_androidx %} +android.useAndroidX=true +android.enableJetifier=true +{% endif %} \ No newline at end of file diff --git a/p4a/pythonforandroid/bootstraps/common/build/templates/lottie.xml b/p4a/pythonforandroid/bootstraps/common/build/templates/lottie.xml new file mode 100644 index 0000000..49fe8c9 --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/common/build/templates/lottie.xml @@ -0,0 +1,22 @@ + + + + + + diff --git a/p4a/pythonforandroid/bootstraps/empty/__init__.py b/p4a/pythonforandroid/bootstraps/empty/__init__.py new file mode 100644 index 0000000..8d4c196 --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/empty/__init__.py @@ -0,0 +1,16 @@ +from pythonforandroid.toolchain import Bootstrap + + +class EmptyBootstrap(Bootstrap): + name = 'empty' + + recipe_depends = [] + + can_be_chosen_automatically = False + + def assemble_distribution(self): + print('empty bootstrap has no distribute') + exit(1) + + +bootstrap = EmptyBootstrap() diff --git a/p4a/pythonforandroid/bootstraps/empty/build/.gitkeep b/p4a/pythonforandroid/bootstraps/empty/build/.gitkeep new file mode 100644 index 0000000..8d1c8b6 --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/empty/build/.gitkeep @@ -0,0 +1 @@ + diff --git a/p4a/pythonforandroid/bootstraps/sdl2/__init__.py b/p4a/pythonforandroid/bootstraps/sdl2/__init__.py new file mode 100644 index 0000000..662d43c --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/sdl2/__init__.py @@ -0,0 +1,52 @@ +from pythonforandroid.toolchain import ( + Bootstrap, shprint, current_directory, info, info_main) +from pythonforandroid.util import ensure_dir +from os.path import join +import sh + + +class SDL2GradleBootstrap(Bootstrap): + name = 'sdl2' + + recipe_depends = list( + set(Bootstrap.recipe_depends).union({'sdl2'}) + ) + + def assemble_distribution(self): + info_main("# Creating Android project ({})".format(self.name)) + + info("Copying SDL2/gradle build") + shprint(sh.rm, "-rf", self.dist_dir) + shprint(sh.cp, "-r", self.build_dir, self.dist_dir) + + # either the build use environment variable (ANDROID_HOME) + # or the local.properties if exists + with current_directory(self.dist_dir): + with open('local.properties', 'w') as fileh: + fileh.write('sdk.dir={}'.format(self.ctx.sdk_dir)) + + with current_directory(self.dist_dir): + info("Copying Python distribution") + + self.distribute_javaclasses(self.ctx.javaclass_dir, + dest_dir=join("src", "main", "java")) + + for arch in self.ctx.archs: + python_bundle_dir = join(f'_python_bundle__{arch.arch}', '_python_bundle') + ensure_dir(python_bundle_dir) + + self.distribute_libs(arch, [self.ctx.get_libs_dir(arch.arch)]) + site_packages_dir = self.ctx.python_recipe.create_python_bundle( + join(self.dist_dir, python_bundle_dir), arch) + if not self.ctx.with_debug_symbols: + self.strip_libraries(arch) + self.fry_eggs(site_packages_dir) + + if 'sqlite3' not in self.ctx.recipe_build_order: + with open('blacklist.txt', 'a') as fileh: + fileh.write('\nsqlite3/*\nlib-dynload/_sqlite3.so\n') + + super().assemble_distribution() + + +bootstrap = SDL2GradleBootstrap() diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/.gitignore b/p4a/pythonforandroid/bootstraps/sdl2/build/.gitignore similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/.gitignore rename to p4a/pythonforandroid/bootstraps/sdl2/build/.gitignore diff --git a/p4a/pythonforandroid/bootstraps/sdl2/build/blacklist.txt b/p4a/pythonforandroid/bootstraps/sdl2/build/blacklist.txt new file mode 100644 index 0000000..d5e230c --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/sdl2/build/blacklist.txt @@ -0,0 +1,84 @@ +# prevent user to include invalid extensions +*.apk +*.aab +*.apks +*.pxd + +# eggs +*.egg-info + +# unit test +unittest/* + +# python config +config/makesetup + +# unused kivy files (platform specific) +kivy/input/providers/wm_* +kivy/input/providers/mactouch* +kivy/input/providers/probesysfs* +kivy/input/providers/mtdev* +kivy/input/providers/hidinput* +kivy/core/camera/camera_videocapture* +kivy/core/spelling/*osx* +kivy/core/video/video_pyglet* +kivy/tools +kivy/tests/* +kivy/*/*.h +kivy/*/*.pxi + +# unused encodings +lib-dynload/*codec* +encodings/cp*.pyo +encodings/tis* +encodings/shift* +encodings/bz2* +encodings/iso* +encodings/undefined* +encodings/johab* +encodings/p* +encodings/m* +encodings/euc* +encodings/k* +encodings/unicode_internal* +encodings/quo* +encodings/gb* +encodings/big5* +encodings/hp* +encodings/hz* + +# unused python modules +bsddb/* +wsgiref/* +hotshot/* +pydoc_data/* +tty.pyo +anydbm.pyo +nturl2path.pyo +LICENCE.txt +macurl2path.pyo +dummy_threading.pyo +audiodev.pyo +antigravity.pyo +dumbdbm.pyo +sndhdr.pyo +__phello__.foo.pyo +sunaudio.pyo +os2emxpath.pyo +multiprocessing/dummy* + +# unused binaries python modules +lib-dynload/termios.so +lib-dynload/_lsprof.so +lib-dynload/*audioop.so +lib-dynload/_hotshot.so +lib-dynload/_heapq.so +lib-dynload/_json.so +lib-dynload/grp.so +lib-dynload/resource.so +lib-dynload/pyexpat.so +lib-dynload/_ctypes_test.so +lib-dynload/_testcapi.so + +# odd files +plat-linux3/regen diff --git a/p4a/pythonforandroid/bootstraps/sdl2/build/jni/Application.mk b/p4a/pythonforandroid/bootstraps/sdl2/build/jni/Application.mk new file mode 100644 index 0000000..1559853 --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/sdl2/build/jni/Application.mk @@ -0,0 +1,8 @@ + +# Uncomment this if you're using STL in your project +# See CPLUSPLUS-SUPPORT.html in the NDK documentation for more information +# APP_STL := stlport_static + +# APP_ABI := armeabi armeabi-v7a x86 +APP_ABI := $(ARCH) +APP_PLATFORM := $(NDK_API) diff --git a/p4a/pythonforandroid/bootstraps/sdl2/build/jni/application/src/Android_static.mk b/p4a/pythonforandroid/bootstraps/sdl2/build/jni/application/src/Android_static.mk new file mode 100644 index 0000000..517660b --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/sdl2/build/jni/application/src/Android_static.mk @@ -0,0 +1,12 @@ +LOCAL_PATH := $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_MODULE := main + +LOCAL_SRC_FILES := start.c + +LOCAL_STATIC_LIBRARIES := SDL2_static + +include $(BUILD_SHARED_LIBRARY) +$(call import-module,SDL)LOCAL_PATH := $(call my-dir) diff --git a/p4a/pythonforandroid/bootstraps/sdl2/build/jni/application/src/bootstrap_name.h b/p4a/pythonforandroid/bootstraps/sdl2/build/jni/application/src/bootstrap_name.h new file mode 100644 index 0000000..83dec51 --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/sdl2/build/jni/application/src/bootstrap_name.h @@ -0,0 +1,5 @@ + +#define BOOTSTRAP_NAME_SDL2 + +const char bootstrap_name[] = "SDL2"; // capitalized for historic reasons + diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/res/drawable/.gitkeep b/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/assets/.gitkeep similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/res/drawable/.gitkeep rename to p4a/pythonforandroid/bootstraps/sdl2/build/src/main/assets/.gitkeep diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/assets/.gitkeep b/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/java/.gitkeep similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/assets/.gitkeep rename to p4a/pythonforandroid/bootstraps/sdl2/build/src/main/java/.gitkeep diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kivy/android/GenericBroadcastReceiver.java b/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/java/org/kivy/android/GenericBroadcastReceiver.java similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kivy/android/GenericBroadcastReceiver.java rename to p4a/pythonforandroid/bootstraps/sdl2/build/src/main/java/org/kivy/android/GenericBroadcastReceiver.java diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kivy/android/GenericBroadcastReceiverCallback.java b/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/java/org/kivy/android/GenericBroadcastReceiverCallback.java similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kivy/android/GenericBroadcastReceiverCallback.java rename to p4a/pythonforandroid/bootstraps/sdl2/build/src/main/java/org/kivy/android/GenericBroadcastReceiverCallback.java diff --git a/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/java/org/kivy/android/PythonActivity.java b/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/java/org/kivy/android/PythonActivity.java new file mode 100644 index 0000000..361975a --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/java/org/kivy/android/PythonActivity.java @@ -0,0 +1,643 @@ +package org.kivy.android; + +import java.io.InputStream; +import java.io.FileWriter; +import java.io.File; +import java.io.IOException; +import java.lang.reflect.InvocationTargetException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Timer; +import java.util.TimerTask; + +import android.app.Activity; +import android.content.Context; +import android.content.Intent; +import android.content.pm.ActivityInfo; +import android.content.pm.PackageManager; +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.graphics.Color; +import android.graphics.PixelFormat; +import android.os.AsyncTask; +import android.os.Bundle; +import android.os.PowerManager; +import android.util.Log; +import android.view.inputmethod.InputMethodManager; +import android.view.SurfaceView; +import android.view.ViewGroup; +import android.view.View; +import android.widget.ImageView; +import android.widget.Toast; +import android.content.res.Resources.NotFoundException; + +import org.libsdl.app.SDLActivity; + +import org.kivy.android.launcher.Project; + +import org.renpy.android.ResourceManager; + + +public class PythonActivity extends SDLActivity { + private static final String TAG = "PythonActivity"; + + public static PythonActivity mActivity = null; + + private ResourceManager resourceManager = null; + private Bundle mMetaData = null; + private PowerManager.WakeLock mWakeLock = null; + + public String getAppRoot() { + String app_root = getFilesDir().getAbsolutePath() + "/app"; + return app_root; + } + + @Override + protected void onCreate(Bundle savedInstanceState) { + Log.v(TAG, "PythonActivity onCreate running"); + resourceManager = new ResourceManager(this); + + Log.v(TAG, "About to do super onCreate"); + super.onCreate(savedInstanceState); + Log.v(TAG, "Did super onCreate"); + + this.mActivity = this; + this.showLoadingScreen(this.getLoadingScreen()); + + new UnpackFilesTask().execute(getAppRoot()); + } + + public void loadLibraries() { + String app_root = new String(getAppRoot()); + File app_root_file = new File(app_root); + PythonUtil.loadLibraries(app_root_file, + new File(getApplicationInfo().nativeLibraryDir)); + } + + /** + * Show an error using a toast. (Only makes sense from non-UI + * threads.) + */ + public void toastError(final String msg) { + + final Activity thisActivity = this; + + runOnUiThread(new Runnable () { + public void run() { + Toast.makeText(thisActivity, msg, Toast.LENGTH_LONG).show(); + } + }); + + // Wait to show the error. + synchronized (this) { + try { + this.wait(1000); + } catch (InterruptedException e) { + } + } + } + + private class UnpackFilesTask extends AsyncTask { + @Override + protected String doInBackground(String... params) { + File app_root_file = new File(params[0]); + Log.v(TAG, "Ready to unpack"); + PythonUtil.unpackAsset(mActivity, "private", app_root_file, true); + PythonUtil.unpackPyBundle(mActivity, getApplicationInfo().nativeLibraryDir + "/" + "libpybundle", app_root_file, false); + return null; + } + + @Override + protected void onPostExecute(String result) { + // Figure out the directory where the game is. If the game was + // given to us via an intent, then we use the scheme-specific + // part of that intent to determine the file to launch. We + // also use the android.txt file to determine the orientation. + // + // Otherwise, we use the public data, if we have it, or the + // private data if we do not. + mActivity.finishLoad(); + + // finishLoad called setContentView with the SDL view, which + // removed the loading screen. However, we still need it to + // show until the app is ready to render, so pop it back up + // on top of the SDL view. + mActivity.showLoadingScreen(getLoadingScreen()); + + String app_root_dir = getAppRoot(); + if (getIntent() != null && getIntent().getAction() != null && + getIntent().getAction().equals("org.kivy.LAUNCH")) { + File path = new File(getIntent().getData().getSchemeSpecificPart()); + + Project p = Project.scanDirectory(path); + String entry_point = getEntryPoint(p.dir); + SDLActivity.nativeSetenv("ANDROID_ENTRYPOINT", p.dir + "/" + entry_point); + SDLActivity.nativeSetenv("ANDROID_ARGUMENT", p.dir); + SDLActivity.nativeSetenv("ANDROID_APP_PATH", p.dir); + + if (p != null) { + if (p.landscape) { + setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); + } else { + setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); + } + } + + // Let old apps know they started. + try { + FileWriter f = new FileWriter(new File(path, ".launch")); + f.write("started"); + f.close(); + } catch (IOException e) { + // pass + } + } else { + String entry_point = getEntryPoint(app_root_dir); + SDLActivity.nativeSetenv("ANDROID_ENTRYPOINT", entry_point); + SDLActivity.nativeSetenv("ANDROID_ARGUMENT", app_root_dir); + SDLActivity.nativeSetenv("ANDROID_APP_PATH", app_root_dir); + } + + String mFilesDirectory = mActivity.getFilesDir().getAbsolutePath(); + Log.v(TAG, "Setting env vars for start.c and Python to use"); + SDLActivity.nativeSetenv("ANDROID_PRIVATE", mFilesDirectory); + SDLActivity.nativeSetenv("ANDROID_UNPACK", app_root_dir); + SDLActivity.nativeSetenv("PYTHONHOME", app_root_dir); + SDLActivity.nativeSetenv("PYTHONPATH", app_root_dir + ":" + app_root_dir + "/lib"); + SDLActivity.nativeSetenv("PYTHONOPTIMIZE", "2"); + + try { + Log.v(TAG, "Access to our meta-data..."); + mActivity.mMetaData = mActivity.getPackageManager().getApplicationInfo( + mActivity.getPackageName(), PackageManager.GET_META_DATA).metaData; + + PowerManager pm = (PowerManager) mActivity.getSystemService(Context.POWER_SERVICE); + if ( mActivity.mMetaData.getInt("wakelock") == 1 ) { + mActivity.mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, "Screen On"); + mActivity.mWakeLock.acquire(); + } + if ( mActivity.mMetaData.getInt("surface.transparent") != 0 ) { + Log.v(TAG, "Surface will be transparent."); + getSurface().setZOrderOnTop(true); + getSurface().getHolder().setFormat(PixelFormat.TRANSPARENT); + } else { + Log.i(TAG, "Surface will NOT be transparent"); + } + } catch (PackageManager.NameNotFoundException e) { + } + + // Launch app if that hasn't been done yet: + if (mActivity.mHasFocus && ( + // never went into proper resume state: + mActivity.mCurrentNativeState == NativeState.INIT || + ( + // resumed earlier but wasn't ready yet + mActivity.mCurrentNativeState == NativeState.RESUMED && + mActivity.mSDLThread == null + ))) { + // Because sometimes the app will get stuck here and never + // actually run, ensure that it gets launched if we're active: + mActivity.resumeNativeThread(); + } + } + + @Override + protected void onPreExecute() { + } + + @Override + protected void onProgressUpdate(Void... values) { + } + } + + public static ViewGroup getLayout() { + return mLayout; + } + + public static SurfaceView getSurface() { + return mSurface; + } + + //---------------------------------------------------------------------------- + // Listener interface for onNewIntent + // + + public interface NewIntentListener { + void onNewIntent(Intent intent); + } + + private List newIntentListeners = null; + + public void registerNewIntentListener(NewIntentListener listener) { + if ( this.newIntentListeners == null ) + this.newIntentListeners = Collections.synchronizedList(new ArrayList()); + this.newIntentListeners.add(listener); + } + + public void unregisterNewIntentListener(NewIntentListener listener) { + if ( this.newIntentListeners == null ) + return; + this.newIntentListeners.remove(listener); + } + + @Override + protected void onNewIntent(Intent intent) { + if ( this.newIntentListeners == null ) + return; + this.onResume(); + synchronized ( this.newIntentListeners ) { + Iterator iterator = this.newIntentListeners.iterator(); + while ( iterator.hasNext() ) { + (iterator.next()).onNewIntent(intent); + } + } + } + + //---------------------------------------------------------------------------- + // Listener interface for onActivityResult + // + + public interface ActivityResultListener { + void onActivityResult(int requestCode, int resultCode, Intent data); + } + + private List activityResultListeners = null; + + public void registerActivityResultListener(ActivityResultListener listener) { + if ( this.activityResultListeners == null ) + this.activityResultListeners = Collections.synchronizedList(new ArrayList()); + this.activityResultListeners.add(listener); + } + + public void unregisterActivityResultListener(ActivityResultListener listener) { + if ( this.activityResultListeners == null ) + return; + this.activityResultListeners.remove(listener); + } + + @Override + protected void onActivityResult(int requestCode, int resultCode, Intent intent) { + if ( this.activityResultListeners == null ) + return; + this.onResume(); + synchronized ( this.activityResultListeners ) { + Iterator iterator = this.activityResultListeners.iterator(); + while ( iterator.hasNext() ) + (iterator.next()).onActivityResult(requestCode, resultCode, intent); + } + } + + public static void start_service( + String serviceTitle, + String serviceDescription, + String pythonServiceArgument + ) { + _do_start_service( + serviceTitle, serviceDescription, pythonServiceArgument, true + ); + } + + public static void start_service_not_as_foreground( + String serviceTitle, + String serviceDescription, + String pythonServiceArgument + ) { + _do_start_service( + serviceTitle, serviceDescription, pythonServiceArgument, false + ); + } + + public static void _do_start_service( + String serviceTitle, + String serviceDescription, + String pythonServiceArgument, + boolean showForegroundNotification + ) { + Intent serviceIntent = new Intent(PythonActivity.mActivity, PythonService.class); + String argument = PythonActivity.mActivity.getFilesDir().getAbsolutePath(); + String app_root_dir = PythonActivity.mActivity.getAppRoot(); + String entry_point = PythonActivity.mActivity.getEntryPoint(app_root_dir + "/service"); + serviceIntent.putExtra("androidPrivate", argument); + serviceIntent.putExtra("androidArgument", app_root_dir); + serviceIntent.putExtra("serviceEntrypoint", "service/" + entry_point); + serviceIntent.putExtra("pythonName", "python"); + serviceIntent.putExtra("pythonHome", app_root_dir); + serviceIntent.putExtra("pythonPath", app_root_dir + ":" + app_root_dir + "/lib"); + serviceIntent.putExtra("serviceStartAsForeground", + (showForegroundNotification ? "true" : "false") + ); + serviceIntent.putExtra("serviceTitle", serviceTitle); + serviceIntent.putExtra("serviceDescription", serviceDescription); + serviceIntent.putExtra("pythonServiceArgument", pythonServiceArgument); + PythonActivity.mActivity.startService(serviceIntent); + } + + public static void stop_service() { + Intent serviceIntent = new Intent(PythonActivity.mActivity, PythonService.class); + PythonActivity.mActivity.stopService(serviceIntent); + } + + /** Loading screen view **/ + public static ImageView mImageView = null; + public static View mLottieView = null; + /** Whether main routine/actual app has started yet **/ + protected boolean mAppConfirmedActive = false; + /** Timer for delayed loading screen removal. **/ + protected Timer loadingScreenRemovalTimer = null; + + // Overridden since it's called often, to check whether to remove the + // loading screen: + @Override + protected boolean sendCommand(int command, Object data) { + boolean result = super.sendCommand(command, data); + considerLoadingScreenRemoval(); + return result; + } + + /** Confirm that the app's main routine has been launched. + **/ + @Override + public void appConfirmedActive() { + if (!mAppConfirmedActive) { + Log.v(TAG, "appConfirmedActive() -> preparing loading screen removal"); + mAppConfirmedActive = true; + considerLoadingScreenRemoval(); + } + } + + /** This is called from various places to check whether the app's main + * routine has been launched already, and if it has, then the loading + * screen will be removed. + **/ + public void considerLoadingScreenRemoval() { + if (loadingScreenRemovalTimer != null) + return; + runOnUiThread(new Runnable() { + public void run() { + if (((PythonActivity)PythonActivity.mSingleton).mAppConfirmedActive && + loadingScreenRemovalTimer == null) { + // Remove loading screen but with a delay. + // (app can use p4a's android.loadingscreen module to + // do it quicker if it wants to) + // get a handler (call from main thread) + // this will run when timer elapses + TimerTask removalTask = new TimerTask() { + @Override + public void run() { + // post a runnable to the handler + runOnUiThread(new Runnable() { + @Override + public void run() { + PythonActivity activity = + ((PythonActivity)PythonActivity.mSingleton); + if (activity != null) + activity.removeLoadingScreen(); + } + }); + } + }; + loadingScreenRemovalTimer = new Timer(); + loadingScreenRemovalTimer.schedule(removalTask, 5000); + } + } + }); + } + + public void removeLoadingScreen() { + runOnUiThread(new Runnable() { + public void run() { + View view = mLottieView != null ? mLottieView : mImageView; + if (view != null && view.getParent() != null) { + ((ViewGroup)view.getParent()).removeView(view); + mLottieView = null; + mImageView = null; + } + } + }); + } + + public String getEntryPoint(String search_dir) { + /* Get the main file (.pyc|.py) depending on if we + * have a compiled version or not. + */ + List entryPoints = new ArrayList(); + entryPoints.add("main.pyc"); // python 3 compiled files + for (String value : entryPoints) { + File mainFile = new File(search_dir + "/" + value); + if (mainFile.exists()) { + return value; + } + } + return "main.py"; + } + + protected void showLoadingScreen(View view) { + try { + if (mLayout == null) { + setContentView(view); + } else if (view.getParent() == null) { + mLayout.addView(view); + } + } catch (IllegalStateException e) { + // The loading screen can be attempted to be applied twice if app + // is tabbed in/out, quickly. + // (Gives error "The specified child already has a parent. + // You must call removeView() on the child's parent first.") + } + } + + protected void setBackgroundColor(View view) { + /* + * Set the presplash loading screen background color + * https://developer.android.com/reference/android/graphics/Color.html + * Parse the color string, and return the corresponding color-int. + * If the string cannot be parsed, throws an IllegalArgumentException exception. + * Supported formats are: #RRGGBB #AARRGGBB or one of the following names: + * 'red', 'blue', 'green', 'black', 'white', 'gray', 'cyan', 'magenta', 'yellow', + * 'lightgray', 'darkgray', 'grey', 'lightgrey', 'darkgrey', 'aqua', 'fuchsia', + * 'lime', 'maroon', 'navy', 'olive', 'purple', 'silver', 'teal'. + */ + String backgroundColor = resourceManager.getString("presplash_color"); + if (backgroundColor != null) { + try { + view.setBackgroundColor(Color.parseColor(backgroundColor)); + } catch (IllegalArgumentException e) {} + } + } + + protected View getLoadingScreen() { + // If we have an mLottieView or mImageView already, then do + // nothing because it will have already been made the content + // view or added to the layout. + if (mLottieView != null || mImageView != null) { + // we already have a splash screen + return mLottieView != null ? mLottieView : mImageView; + } + + // first try to load the lottie one + try { + mLottieView = getLayoutInflater().inflate( + this.resourceManager.getIdentifier("lottie", "layout"), + mLayout, + false + ); + try { + if (mLayout == null) { + setContentView(mLottieView); + } else if (PythonActivity.mLottieView.getParent() == null) { + mLayout.addView(mLottieView); + } + } catch (IllegalStateException e) { + // The loading screen can be attempted to be applied twice if app + // is tabbed in/out, quickly. + // (Gives error "The specified child already has a parent. + // You must call removeView() on the child's parent first.") + } + setBackgroundColor(mLottieView); + return mLottieView; + } + catch (NotFoundException e) { + Log.v("SDL", "couldn't find lottie layout or animation, trying static splash"); + } + + // no lottie asset, try to load the static image then + int presplashId = this.resourceManager.getIdentifier("presplash", "drawable"); + InputStream is = this.getResources().openRawResource(presplashId); + Bitmap bitmap = null; + try { + bitmap = BitmapFactory.decodeStream(is); + } finally { + try { + is.close(); + } catch (IOException e) {}; + } + + mImageView = new ImageView(this); + mImageView.setImageBitmap(bitmap); + setBackgroundColor(mImageView); + + mImageView.setLayoutParams(new ViewGroup.LayoutParams( + ViewGroup.LayoutParams.FILL_PARENT, + ViewGroup.LayoutParams.FILL_PARENT)); + mImageView.setScaleType(ImageView.ScaleType.FIT_CENTER); + return mImageView; + } + + @Override + protected void onPause() { + if (this.mWakeLock != null && mWakeLock.isHeld()) { + this.mWakeLock.release(); + } + + Log.v(TAG, "onPause()"); + try { + super.onPause(); + } catch (UnsatisfiedLinkError e) { + // Catch pause while still in loading screen failing to + // call native function (since it's not yet loaded) + } + } + + @Override + protected void onResume() { + if (this.mWakeLock != null) { + this.mWakeLock.acquire(); + } + Log.v(TAG, "onResume()"); + try { + super.onResume(); + } catch (UnsatisfiedLinkError e) { + // Catch resume while still in loading screen failing to + // call native function (since it's not yet loaded) + } + considerLoadingScreenRemoval(); + } + + @Override + public void onWindowFocusChanged(boolean hasFocus) { + try { + super.onWindowFocusChanged(hasFocus); + } catch (UnsatisfiedLinkError e) { + // Catch window focus while still in loading screen failing to + // call native function (since it's not yet loaded) + } + considerLoadingScreenRemoval(); + } + + /** + * Used by android.permissions p4a module to register a call back after + * requesting runtime permissions + **/ + public interface PermissionsCallback { + void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults); + } + + private PermissionsCallback permissionCallback; + private boolean havePermissionsCallback = false; + + public void addPermissionsCallback(PermissionsCallback callback) { + permissionCallback = callback; + havePermissionsCallback = true; + Log.v(TAG, "addPermissionsCallback(): Added callback for onRequestPermissionsResult"); + } + + @Override + public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) { + Log.v(TAG, "onRequestPermissionsResult()"); + if (havePermissionsCallback) { + Log.v(TAG, "onRequestPermissionsResult passed to callback"); + permissionCallback.onRequestPermissionsResult(requestCode, permissions, grantResults); + } + super.onRequestPermissionsResult(requestCode, permissions, grantResults); + } + + /** + * Used by android.permissions p4a module to check a permission + **/ + public boolean checkCurrentPermission(String permission) { + if (android.os.Build.VERSION.SDK_INT < 23) + return true; + + try { + java.lang.reflect.Method methodCheckPermission = + Activity.class.getMethod("checkSelfPermission", String.class); + Object resultObj = methodCheckPermission.invoke(this, permission); + int result = Integer.parseInt(resultObj.toString()); + if (result == PackageManager.PERMISSION_GRANTED) + return true; + } catch (IllegalAccessException | NoSuchMethodException | + InvocationTargetException e) { + } + return false; + } + + /** + * Used by android.permissions p4a module to request runtime permissions + **/ + public void requestPermissionsWithRequestCode(String[] permissions, int requestCode) { + if (android.os.Build.VERSION.SDK_INT < 23) + return; + try { + java.lang.reflect.Method methodRequestPermission = + Activity.class.getMethod("requestPermissions", + String[].class, int.class); + methodRequestPermission.invoke(this, permissions, requestCode); + } catch (IllegalAccessException | NoSuchMethodException | + InvocationTargetException e) { + } + } + + public void requestPermissions(String[] permissions) { + requestPermissionsWithRequestCode(permissions, 1); + } + + public static void changeKeyboard(int inputType) { + if (SDLActivity.keyboardInputType != inputType){ + SDLActivity.keyboardInputType = inputType; + InputMethodManager imm = (InputMethodManager) getContext().getSystemService(Context.INPUT_METHOD_SERVICE); + imm.restartInput(mTextEdit); + } + } +} diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kivy/android/launcher/Project.java b/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/java/org/kivy/android/launcher/Project.java similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kivy/android/launcher/Project.java rename to p4a/pythonforandroid/bootstraps/sdl2/build/src/main/java/org/kivy/android/launcher/Project.java diff --git a/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/java/org/kivy/android/launcher/ProjectAdapter.java b/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/java/org/kivy/android/launcher/ProjectAdapter.java new file mode 100644 index 0000000..457f83f --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/java/org/kivy/android/launcher/ProjectAdapter.java @@ -0,0 +1,35 @@ +package org.kivy.android.launcher; + +import android.app.Activity; +import android.view.View; +import android.view.ViewGroup; +import android.widget.ArrayAdapter; +import android.widget.TextView; +import android.widget.ImageView; + +import org.renpy.android.ResourceManager; + +public class ProjectAdapter extends ArrayAdapter { + + private ResourceManager resourceManager; + + public ProjectAdapter(Activity context) { + super(context, 0); + resourceManager = new ResourceManager(context); + } + + public View getView(int position, View convertView, ViewGroup parent) { + Project p = getItem(position); + + View v = resourceManager.inflateView("chooser_item"); + TextView title = (TextView) resourceManager.getViewById(v, "title"); + TextView author = (TextView) resourceManager.getViewById(v, "author"); + ImageView icon = (ImageView) resourceManager.getViewById(v, "icon"); + + title.setText(p.title); + author.setText(p.author); + icon.setImageBitmap(p.icon); + + return v; + } +} diff --git a/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/java/org/kivy/android/launcher/ProjectChooser.java b/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/java/org/kivy/android/launcher/ProjectChooser.java new file mode 100644 index 0000000..486f88b --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/java/org/kivy/android/launcher/ProjectChooser.java @@ -0,0 +1,90 @@ +package org.kivy.android.launcher; + +import android.app.Activity; + +import android.content.Intent; +import android.view.View; +import android.widget.ListView; +import android.widget.TextView; +import android.widget.AdapterView; +import android.os.Environment; + +import java.io.File; +import java.util.Arrays; +import android.net.Uri; + +import org.renpy.android.ResourceManager; + +public class ProjectChooser extends Activity implements AdapterView.OnItemClickListener { + + ResourceManager resourceManager; + + String urlScheme; + + @Override + public void onStart() + { + super.onStart(); + + resourceManager = new ResourceManager(this); + + urlScheme = resourceManager.getString("urlScheme"); + + // Set the window title. + setTitle(resourceManager.getString("appName")); + + // Scan the sdcard for files, and sort them. + File dir = new File(Environment.getExternalStorageDirectory(), urlScheme); + + File entries[] = dir.listFiles(); + + if (entries == null) { + entries = new File[0]; + } + + Arrays.sort(entries); + + // Create a ProjectAdapter and fill it with projects. + ProjectAdapter projectAdapter = new ProjectAdapter(this); + + // Populate it with the properties files. + for (File d : entries) { + Project p = Project.scanDirectory(d); + if (p != null) { + projectAdapter.add(p); + } + } + + if (projectAdapter.getCount() != 0) { + + View v = resourceManager.inflateView("project_chooser"); + ListView l = (ListView) resourceManager.getViewById(v, "projectList"); + + l.setAdapter(projectAdapter); + l.setOnItemClickListener(this); + + setContentView(v); + + } else { + + View v = resourceManager.inflateView("project_empty"); + TextView emptyText = (TextView) resourceManager.getViewById(v, "emptyText"); + + emptyText.setText("No projects are available to launch. Please place a project into " + dir + " and restart this application. Press the back button to exit."); + + setContentView(v); + } + } + + public void onItemClick(AdapterView parent, View view, int position, long id) { + Project p = (Project) parent.getItemAtPosition(position); + + Intent intent = new Intent( + "org.kivy.LAUNCH", + Uri.fromParts(urlScheme, p.dir, "")); + + intent.setClassName(getPackageName(), "org.kivy.android.PythonActivity"); + this.startActivity(intent); + this.finish(); + } +} diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/.gitkeep b/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/jniLibs/.gitkeep similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/.gitkeep rename to p4a/pythonforandroid/bootstraps/sdl2/build/src/main/jniLibs/.gitkeep diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/jniLibs/.gitkeep b/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/libs/.gitkeep similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/jniLibs/.gitkeep rename to p4a/pythonforandroid/bootstraps/sdl2/build/src/main/libs/.gitkeep diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-hdpi/ic_launcher.png b/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/res/drawable-hdpi/ic_launcher.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-hdpi/ic_launcher.png rename to p4a/pythonforandroid/bootstraps/sdl2/build/src/main/res/drawable-hdpi/ic_launcher.png diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-mdpi/ic_launcher.png b/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/res/drawable-mdpi/ic_launcher.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-mdpi/ic_launcher.png rename to p4a/pythonforandroid/bootstraps/sdl2/build/src/main/res/drawable-mdpi/ic_launcher.png diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-xhdpi/ic_launcher.png b/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/res/drawable-xhdpi/ic_launcher.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-xhdpi/ic_launcher.png rename to p4a/pythonforandroid/bootstraps/sdl2/build/src/main/res/drawable-xhdpi/ic_launcher.png diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-xxhdpi/ic_launcher.png b/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/res/drawable-xxhdpi/ic_launcher.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-xxhdpi/ic_launcher.png rename to p4a/pythonforandroid/bootstraps/sdl2/build/src/main/res/drawable-xxhdpi/ic_launcher.png diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/libs/.gitkeep b/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/res/drawable/.gitkeep similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/libs/.gitkeep rename to p4a/pythonforandroid/bootstraps/sdl2/build/src/main/res/drawable/.gitkeep diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/res/layout/chooser_item.xml b/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/res/layout/chooser_item.xml similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/res/layout/chooser_item.xml rename to p4a/pythonforandroid/bootstraps/sdl2/build/src/main/res/layout/chooser_item.xml diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/res/layout/main.xml b/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/res/layout/main.xml similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/res/layout/main.xml rename to p4a/pythonforandroid/bootstraps/sdl2/build/src/main/res/layout/main.xml diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/res/layout/project_chooser.xml b/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/res/layout/project_chooser.xml similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/res/layout/project_chooser.xml rename to p4a/pythonforandroid/bootstraps/sdl2/build/src/main/res/layout/project_chooser.xml diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/res/layout/project_empty.xml b/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/res/layout/project_empty.xml similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/res/layout/project_empty.xml rename to p4a/pythonforandroid/bootstraps/sdl2/build/src/main/res/layout/project_empty.xml diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable/.gitkeep b/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/res/mipmap-anydpi-v26/.gitkeep similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable/.gitkeep rename to p4a/pythonforandroid/bootstraps/sdl2/build/src/main/res/mipmap-anydpi-v26/.gitkeep diff --git a/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/res/mipmap/.gitkeep b/p4a/pythonforandroid/bootstraps/sdl2/build/src/main/res/mipmap/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/p4a/pythonforandroid/bootstraps/sdl2/build/src/patches/SDLActivity.java.patch b/p4a/pythonforandroid/bootstraps/sdl2/build/src/patches/SDLActivity.java.patch new file mode 100644 index 0000000..d061be8 --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/sdl2/build/src/patches/SDLActivity.java.patch @@ -0,0 +1,75 @@ +--- a/src/main/java/org/libsdl/app/SDLActivity.java ++++ b/src/main/java/org/libsdl/app/SDLActivity.java +@@ -222,6 +222,8 @@ public class SDLActivity extends Activity implements View.OnSystemUiVisibilityCh + // This is what SDL runs in. It invokes SDL_main(), eventually + protected static Thread mSDLThread; + ++ public static int keyboardInputType = InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_VARIATION_VISIBLE_PASSWORD; ++ + protected static SDLGenericMotionListener_API12 getMotionListener() { + if (mMotionListener == null) { + if (Build.VERSION.SDK_INT >= 26) { +@@ -324,6 +326,15 @@ public class SDLActivity extends Activity implements View.OnSystemUiVisibilityCh + Log.v(TAG, "onCreate()"); + super.onCreate(savedInstanceState); + ++ SDLActivity.initialize(); ++ // So we can call stuff from static callbacks ++ mSingleton = this; ++ } ++ ++ // We don't do this in onCreate because we unpack and load the app data on a thread ++ // and we can't run setup tasks until that thread completes. ++ protected void finishLoad() { ++ + try { + Thread.currentThread().setName("SDLActivity"); + } catch (Exception e) { +@@ -835,7 +846,7 @@ public class SDLActivity extends Activity implements View.OnSystemUiVisibilityCh + Handler commandHandler = new SDLCommandHandler(); + + // Send a message from the SDLMain thread +- boolean sendCommand(int command, Object data) { ++ protected boolean sendCommand(int command, Object data) { + Message msg = commandHandler.obtainMessage(); + msg.arg1 = command; + msg.obj = data; +@@ -1384,6 +1395,20 @@ public class SDLActivity extends Activity implements View.OnSystemUiVisibilityCh + return SDLActivity.mSurface.getNativeSurface(); + } + ++ /** ++ * Calls turnActive() on singleton to keep loading screen active ++ */ ++ public static void triggerAppConfirmedActive() { ++ mSingleton.appConfirmedActive(); ++ } ++ ++ /** ++ * Trick needed for loading screen, overridden by PythonActivity ++ * to keep loading screen active ++ */ ++ public void appConfirmedActive() { ++ } ++ + // Input + + /** +@@ -1878,6 +1903,7 @@ class SDLMain implements Runnable { + + Log.v("SDL", "Running main function " + function + " from library " + library); + ++ SDLActivity.mSingleton.appConfirmedActive(); + SDLActivity.nativeRunMain(library, function, arguments); + + Log.v("SDL", "Finished main function"); +@@ -1935,8 +1961,7 @@ class DummyEdit extends View implements View.OnKeyListener { + public InputConnection onCreateInputConnection(EditorInfo outAttrs) { + ic = new SDLInputConnection(this, true); + +- outAttrs.inputType = InputType.TYPE_CLASS_TEXT | +- InputType.TYPE_TEXT_FLAG_MULTI_LINE; ++ outAttrs.inputType = SDLActivity.keyboardInputType | InputType.TYPE_TEXT_FLAG_MULTI_LINE; + outAttrs.imeOptions = EditorInfo.IME_FLAG_NO_EXTRACT_UI | + EditorInfo.IME_FLAG_NO_FULLSCREEN /* API 11 */; + diff --git a/p4a/pythonforandroid/bootstraps/sdl2/build/templates/AndroidManifest.tmpl.xml b/p4a/pythonforandroid/bootstraps/sdl2/build/templates/AndroidManifest.tmpl.xml new file mode 100644 index 0000000..b5ddde3 --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/sdl2/build/templates/AndroidManifest.tmpl.xml @@ -0,0 +1,146 @@ + + + + + = 9 %} + android:xlargeScreens="true" + {% endif %} + /> + + + + + + + + + + {% for perm in args.permissions %} + {% if '.' in perm %} + + {% else %} + + {% endif %} + {% endfor %} + + {% if args.wakelock %} + + {% endif %} + + {% if args.billing_pubkey %} + + {% endif %} + + {{ args.extra_manifest_xml }} + + + + + {% for l in args.android_used_libs %} + + {% endfor %} + + {% for m in args.meta_data %} + {% endfor %} + + + + + {% if args.launcher %} + + + + + + {% else %} + + + + + {% endif %} + + {%- if args.intent_filters -%} + {{- args.intent_filters -}} + {%- endif -%} + + + {% if args.launcher %} + + + + + + + + + {% endif %} + + {% if service or args.launcher %} + + {% endif %} + {% for name in service_names %} + + {% endfor %} + {% for name in native_services %} + + {% endfor %} + + {% if args.billing_pubkey %} + + + + + + + + + {% endif %} + {% for a in args.add_activity %} + + {% endfor %} + + + diff --git a/p4a/pythonforandroid/bootstraps/sdl2/build/templates/strings.tmpl.xml b/p4a/pythonforandroid/bootstraps/sdl2/build/templates/strings.tmpl.xml new file mode 100644 index 0000000..c802551 --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/sdl2/build/templates/strings.tmpl.xml @@ -0,0 +1,7 @@ + + + {{ args.name }} + {{ private_version }} + {{ args.presplash_color }} + {{ url_scheme }} + diff --git a/p4a/pythonforandroid/bootstraps/service_library/__init__.py b/p4a/pythonforandroid/bootstraps/service_library/__init__.py new file mode 100644 index 0000000..0b41be8 --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/service_library/__init__.py @@ -0,0 +1,9 @@ +from pythonforandroid.bootstraps.service_only import ServiceOnlyBootstrap + + +class ServiceLibraryBootstrap(ServiceOnlyBootstrap): + + name = 'service_library' + + +bootstrap = ServiceLibraryBootstrap() diff --git a/p4a/pythonforandroid/bootstraps/service_library/build/jni/application/src/bootstrap_name.h b/p4a/pythonforandroid/bootstraps/service_library/build/jni/application/src/bootstrap_name.h new file mode 100644 index 0000000..01fd122 --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/service_library/build/jni/application/src/bootstrap_name.h @@ -0,0 +1,6 @@ + +#define BOOTSTRAP_NAME_LIBRARY +#define BOOTSTRAP_USES_NO_SDL_HEADERS + +const char bootstrap_name[] = "service_library"; + diff --git a/p4a/pythonforandroid/bootstraps/service_library/build/src/main/java/org/kivy/android/GenericBroadcastReceiver.java b/p4a/pythonforandroid/bootstraps/service_library/build/src/main/java/org/kivy/android/GenericBroadcastReceiver.java new file mode 100644 index 0000000..58a1c5e --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/service_library/build/src/main/java/org/kivy/android/GenericBroadcastReceiver.java @@ -0,0 +1,19 @@ +package org.kivy.android; + +import android.content.BroadcastReceiver; +import android.content.Intent; +import android.content.Context; + +public class GenericBroadcastReceiver extends BroadcastReceiver { + + GenericBroadcastReceiverCallback listener; + + public GenericBroadcastReceiver(GenericBroadcastReceiverCallback listener) { + super(); + this.listener = listener; + } + + public void onReceive(Context context, Intent intent) { + this.listener.onReceive(context, intent); + } +} diff --git a/p4a/pythonforandroid/bootstraps/service_library/build/src/main/java/org/kivy/android/GenericBroadcastReceiverCallback.java b/p4a/pythonforandroid/bootstraps/service_library/build/src/main/java/org/kivy/android/GenericBroadcastReceiverCallback.java new file mode 100644 index 0000000..1a87c98 --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/service_library/build/src/main/java/org/kivy/android/GenericBroadcastReceiverCallback.java @@ -0,0 +1,8 @@ +package org.kivy.android; + +import android.content.Intent; +import android.content.Context; + +public interface GenericBroadcastReceiverCallback { + void onReceive(Context context, Intent intent); +}; diff --git a/p4a/pythonforandroid/bootstraps/service_library/build/src/main/java/org/kivy/android/PythonActivity.java b/p4a/pythonforandroid/bootstraps/service_library/build/src/main/java/org/kivy/android/PythonActivity.java new file mode 100644 index 0000000..7be751d --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/service_library/build/src/main/java/org/kivy/android/PythonActivity.java @@ -0,0 +1,9 @@ +package org.kivy.android; + +import android.app.Activity; + +// Required by PythonService class +public class PythonActivity extends Activity { + public static PythonActivity mActivity = null; +} + diff --git a/p4a/pythonforandroid/bootstraps/service_library/build/src/main/res/mipmap/.gitkeep b/p4a/pythonforandroid/bootstraps/service_library/build/src/main/res/mipmap/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/p4a/pythonforandroid/bootstraps/service_library/build/templates/AndroidManifest.tmpl.xml b/p4a/pythonforandroid/bootstraps/service_library/build/templates/AndroidManifest.tmpl.xml new file mode 100644 index 0000000..f667651 --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/service_library/build/templates/AndroidManifest.tmpl.xml @@ -0,0 +1,18 @@ + + + + + + + + {% for name in service_names %} + + {% endfor %} + + + diff --git a/p4a/pythonforandroid/bootstraps/service_library/build/templates/Service.tmpl.java b/p4a/pythonforandroid/bootstraps/service_library/build/templates/Service.tmpl.java new file mode 100644 index 0000000..f1eaf07 --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/service_library/build/templates/Service.tmpl.java @@ -0,0 +1,82 @@ +package {{ args.package }}; + +import java.io.File; + +import android.os.Build; +import android.content.Intent; +import android.content.Context; +import android.content.res.Resources; +import android.util.Log; + +import org.kivy.android.PythonService; +import org.kivy.android.PythonUtil; + +public class Service{{ name|capitalize }} extends PythonService { + + private static final String TAG = "PythonService"; + + {% if sticky %} + @Override + public int startType() { + return START_STICKY; + } + {% endif %} + + @Override + protected int getServiceId() { + return {{ service_id }}; + } + + public static void prepare(Context ctx) { + String appRoot = PythonUtil.getAppRoot(ctx); + Log.v(TAG, "Ready to unpack"); + File app_root_file = new File(appRoot); + PythonUtil.unpackAsset(ctx, "private", app_root_file, true); + PythonUtil.unpackPyBundle(ctx, ctx.getApplicationInfo().nativeLibraryDir + "/" + "libpybundle", app_root_file, false); + } + + public static void start(Context ctx, String pythonServiceArgument) { + Intent intent = getDefaultIntent(ctx, pythonServiceArgument); + + //foreground: {{foreground}} + {% if foreground %} + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { + ctx.startForegroundService(intent); + } else { + ctx.startService(intent); + } + {% else %} + ctx.startService(intent); + {% endif %} + } + + static public Intent getDefaultIntent(Context ctx, String pythonServiceArgument) { + String appRoot = PythonUtil.getAppRoot(ctx); + Intent intent = new Intent(ctx, Service{{ name|capitalize }}.class); + intent.putExtra("androidPrivate", appRoot); + intent.putExtra("androidArgument", appRoot); + intent.putExtra("serviceEntrypoint", "{{ entrypoint }}"); + intent.putExtra("serviceTitle", "{{ name|capitalize }}"); + intent.putExtra("serviceDescription", ""); + intent.putExtra("pythonName", "{{ name }}"); + intent.putExtra("serviceStartAsForeground", "{{ foreground|lower }}"); + intent.putExtra("pythonHome", appRoot); + intent.putExtra("androidUnpack", appRoot); + intent.putExtra("pythonPath", appRoot + ":" + appRoot + "/lib"); + intent.putExtra("pythonServiceArgument", pythonServiceArgument); + return intent; + } + + @Override + protected Intent getThisDefaultIntent(Context ctx, String pythonServiceArgument) { + return Service{{ name|capitalize }}.getDefaultIntent(ctx, pythonServiceArgument); + } + + + + static public void stop(Context ctx) { + Intent intent = new Intent(ctx, Service{{ name|capitalize }}.class); + ctx.stopService(intent); + } + +} diff --git a/p4a/pythonforandroid/bootstraps/service_only/__init__.py b/p4a/pythonforandroid/bootstraps/service_only/__init__.py new file mode 100644 index 0000000..b9e000c --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/service_only/__init__.py @@ -0,0 +1,52 @@ +import sh +from os.path import join +from pythonforandroid.toolchain import ( + Bootstrap, current_directory, info, info_main, shprint) +from pythonforandroid.util import ensure_dir + + +class ServiceOnlyBootstrap(Bootstrap): + + name = 'service_only' + + recipe_depends = list( + set(Bootstrap.recipe_depends).union({'genericndkbuild'}) + ) + + def assemble_distribution(self): + info_main('# Creating Android project from build and {} bootstrap'.format( + self.name)) + + info('This currently just copies the build stuff straight from the build dir.') + shprint(sh.rm, '-rf', self.dist_dir) + shprint(sh.cp, '-r', self.build_dir, self.dist_dir) + with current_directory(self.dist_dir): + with open('local.properties', 'w') as fileh: + fileh.write('sdk.dir={}'.format(self.ctx.sdk_dir)) + + with current_directory(self.dist_dir): + info('Copying python distribution') + + self.distribute_javaclasses(self.ctx.javaclass_dir, + dest_dir=join("src", "main", "java")) + + for arch in self.ctx.archs: + self.distribute_libs(arch, [self.ctx.get_libs_dir(arch.arch)]) + self.distribute_aars(arch) + + python_bundle_dir = join(f'_python_bundle__{arch.arch}', '_python_bundle') + ensure_dir(python_bundle_dir) + site_packages_dir = self.ctx.python_recipe.create_python_bundle( + join(self.dist_dir, python_bundle_dir), arch) + if not self.ctx.with_debug_symbols: + self.strip_libraries(arch) + self.fry_eggs(site_packages_dir) + + if 'sqlite3' not in self.ctx.recipe_build_order: + with open('blacklist.txt', 'a') as fileh: + fileh.write('\nsqlite3/*\nlib-dynload/_sqlite3.so\n') + + super().assemble_distribution() + + +bootstrap = ServiceOnlyBootstrap() diff --git a/p4a/pythonforandroid/bootstraps/service_only/build/blacklist.txt b/p4a/pythonforandroid/bootstraps/service_only/build/blacklist.txt new file mode 100644 index 0000000..53cc634 --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/service_only/build/blacklist.txt @@ -0,0 +1,91 @@ +# prevent user to include invalid extensions +*.apk +*.aab +*.apks +*.pxd + +# eggs +*.egg-info + +# unit test +unittest/* + +# python config +config/makesetup + +# unused kivy files (platform specific) +kivy/input/providers/wm_* +kivy/input/providers/mactouch* +kivy/input/providers/probesysfs* +kivy/input/providers/mtdev* +kivy/input/providers/hidinput* +kivy/core/camera/camera_videocapture* +kivy/core/spelling/*osx* +kivy/core/video/video_pyglet* +kivy/tools +kivy/tests/* +kivy/*/*.h +kivy/*/*.pxi + +# unused encodings +lib-dynload/*codec* +encodings/cp*.pyo +encodings/tis* +encodings/shift* +encodings/bz2* +encodings/iso* +encodings/undefined* +encodings/johab* +encodings/p* +encodings/m* +encodings/euc* +encodings/k* +encodings/unicode_internal* +encodings/quo* +encodings/gb* +encodings/big5* +encodings/hp* +encodings/hz* + +# unused python modules +bsddb/* +wsgiref/* +hotshot/* +pydoc_data/* +tty.pyo +anydbm.pyo +nturl2path.pyo +LICENCE.txt +macurl2path.pyo +dummy_threading.pyo +audiodev.pyo +antigravity.pyo +dumbdbm.pyo +sndhdr.pyo +__phello__.foo.pyo +sunaudio.pyo +os2emxpath.pyo +multiprocessing/dummy* + +# unused binaries python modules +lib-dynload/termios.so +lib-dynload/_lsprof.so +lib-dynload/*audioop.so +lib-dynload/_hotshot.so +lib-dynload/_heapq.so +lib-dynload/_json.so +lib-dynload/grp.so +lib-dynload/resource.so +lib-dynload/pyexpat.so +lib-dynload/_ctypes_test.so +lib-dynload/_testcapi.so + +# odd files +plat-linux3/regen + +#>sqlite3 +# conditionnal include depending if some recipes are included or not. +sqlite3/* +lib-dynload/_sqlite3.so +# entryPoints = new ArrayList(); + entryPoints.add("main.pyc"); // python 3 compiled files + for (String value : entryPoints) { + File mainFile = new File(search_dir + "/" + value); + if (mainFile.exists()) { + return value; + } + } + return "main.py"; + } + + public static void initialize() { + // The static nature of the singleton and Android quirkiness force us to initialize everything here + // Otherwise, when exiting the app and returning to it, these variables *keep* their pre exit values + mBrokenLibraries = false; + } + + @Override + protected void onCreate(Bundle savedInstanceState) { + Log.v(TAG, "My oncreate running"); + resourceManager = new ResourceManager(this); + + Log.v(TAG, "Ready to unpack"); + File app_root_file = new File(getAppRoot()); + PythonUtil.unpackAsset(mActivity, "private", app_root_file, true); + PythonUtil.unpackPyBundle(mActivity, getApplicationInfo().nativeLibraryDir + "/" + "libpybundle", app_root_file, false); + + Log.v(TAG, "About to do super onCreate"); + super.onCreate(savedInstanceState); + Log.v(TAG, "Did super onCreate"); + + this.mActivity = this; + //this.showLoadingScreen(); + Log.v("Python", "Device: " + android.os.Build.DEVICE); + Log.v("Python", "Model: " + android.os.Build.MODEL); + + //Log.v(TAG, "Ready to unpack"); + //new UnpackFilesTask().execute(getAppRoot()); + + PythonActivity.initialize(); + + // Load shared libraries + String errorMsgBrokenLib = ""; + try { + loadLibraries(); + } catch(UnsatisfiedLinkError e) { + System.err.println(e.getMessage()); + mBrokenLibraries = true; + errorMsgBrokenLib = e.getMessage(); + } catch(Exception e) { + System.err.println(e.getMessage()); + mBrokenLibraries = true; + errorMsgBrokenLib = e.getMessage(); + } + + if (mBrokenLibraries) + { + AlertDialog.Builder dlgAlert = new AlertDialog.Builder(this); + dlgAlert.setMessage("An error occurred while trying to load the application libraries. Please try again and/or reinstall." + + System.getProperty("line.separator") + + System.getProperty("line.separator") + + "Error: " + errorMsgBrokenLib); + dlgAlert.setTitle("Python Error"); + dlgAlert.setPositiveButton("Exit", + new DialogInterface.OnClickListener() { + @Override + public void onClick(DialogInterface dialog,int id) { + // if this button is clicked, close current activity + PythonActivity.mActivity.finish(); + } + }); + dlgAlert.setCancelable(false); + dlgAlert.create().show(); + + return; + } + + // Set up the Python environment + String app_root_dir = getAppRoot(); + String mFilesDirectory = mActivity.getFilesDir().getAbsolutePath(); + String entry_point = getEntryPoint(app_root_dir); + + Log.v(TAG, "Setting env vars for start.c and Python to use"); + PythonActivity.nativeSetenv("ANDROID_ENTRYPOINT", entry_point); + PythonActivity.nativeSetenv("ANDROID_ARGUMENT", app_root_dir); + PythonActivity.nativeSetenv("ANDROID_APP_PATH", app_root_dir); + PythonActivity.nativeSetenv("ANDROID_PRIVATE", mFilesDirectory); + PythonActivity.nativeSetenv("ANDROID_UNPACK", app_root_dir); + PythonActivity.nativeSetenv("PYTHONHOME", app_root_dir); + PythonActivity.nativeSetenv("PYTHONPATH", app_root_dir + ":" + app_root_dir + "/lib"); + PythonActivity.nativeSetenv("PYTHONOPTIMIZE", "2"); + + try { + Log.v(TAG, "Access to our meta-data..."); + mActivity.mMetaData = mActivity.getPackageManager().getApplicationInfo( + mActivity.getPackageName(), PackageManager.GET_META_DATA).metaData; + + PowerManager pm = (PowerManager) mActivity.getSystemService(Context.POWER_SERVICE); + if ( mActivity.mMetaData.getInt("wakelock") == 1 ) { + mActivity.mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, "Screen On"); + mActivity.mWakeLock.acquire(); + } + } catch (PackageManager.NameNotFoundException e) { + } + + final Thread pythonThread = new Thread(new PythonMain(), "PythonThread"); + PythonActivity.mPythonThread = pythonThread; + pythonThread.start(); + + } + + @Override + public void onDestroy() { + Log.i("Destroy", "end of app"); + super.onDestroy(); + + // make sure all child threads (python_thread) are stopped + android.os.Process.killProcess(android.os.Process.myPid()); + } + + public void loadLibraries() { + String app_root = new String(getAppRoot()); + File app_root_file = new File(app_root); + PythonUtil.loadLibraries(app_root_file, + new File(getApplicationInfo().nativeLibraryDir)); + } + + long lastBackClick = 0; + @Override + public boolean onKeyDown(int keyCode, KeyEvent event) { + // Check if the key event was the Back button + if (keyCode == KeyEvent.KEYCODE_BACK) { + // If there's no web page history, bubble up to the default + // system behavior (probably exit the activity) + if (SystemClock.elapsedRealtime() - lastBackClick > 2000){ + lastBackClick = SystemClock.elapsedRealtime(); + Toast.makeText(this, "Tap again to close the app", Toast.LENGTH_LONG).show(); + return true; + } + + lastBackClick = SystemClock.elapsedRealtime(); + } + + return super.onKeyDown(keyCode, event); + } + + + //---------------------------------------------------------------------------- + // Listener interface for onNewIntent + // + + public interface NewIntentListener { + void onNewIntent(Intent intent); + } + + private List newIntentListeners = null; + + public void registerNewIntentListener(NewIntentListener listener) { + if ( this.newIntentListeners == null ) + this.newIntentListeners = Collections.synchronizedList(new ArrayList()); + this.newIntentListeners.add(listener); + } + + public void unregisterNewIntentListener(NewIntentListener listener) { + if ( this.newIntentListeners == null ) + return; + this.newIntentListeners.remove(listener); + } + + @Override + protected void onNewIntent(Intent intent) { + if ( this.newIntentListeners == null ) + return; + this.onResume(); + synchronized ( this.newIntentListeners ) { + Iterator iterator = this.newIntentListeners.iterator(); + while ( iterator.hasNext() ) { + (iterator.next()).onNewIntent(intent); + } + } + } + + //---------------------------------------------------------------------------- + // Listener interface for onActivityResult + // + + public interface ActivityResultListener { + void onActivityResult(int requestCode, int resultCode, Intent data); + } + + private List activityResultListeners = null; + + public void registerActivityResultListener(ActivityResultListener listener) { + if ( this.activityResultListeners == null ) + this.activityResultListeners = Collections.synchronizedList(new ArrayList()); + this.activityResultListeners.add(listener); + } + + public void unregisterActivityResultListener(ActivityResultListener listener) { + if ( this.activityResultListeners == null ) + return; + this.activityResultListeners.remove(listener); + } + + @Override + protected void onActivityResult(int requestCode, int resultCode, Intent intent) { + if ( this.activityResultListeners == null ) + return; + this.onResume(); + synchronized ( this.activityResultListeners ) { + Iterator iterator = this.activityResultListeners.iterator(); + while ( iterator.hasNext() ) + (iterator.next()).onActivityResult(requestCode, resultCode, intent); + } + } + + public static void start_service( + String serviceTitle, + String serviceDescription, + String pythonServiceArgument + ) { + _do_start_service( + serviceTitle, serviceDescription, pythonServiceArgument, true + ); + } + + public static void start_service_not_as_foreground( + String serviceTitle, + String serviceDescription, + String pythonServiceArgument + ) { + _do_start_service( + serviceTitle, serviceDescription, pythonServiceArgument, false + ); + } + + public static void _do_start_service( + String serviceTitle, + String serviceDescription, + String pythonServiceArgument, + boolean showForegroundNotification + ) { + Intent serviceIntent = new Intent(PythonActivity.mActivity, PythonService.class); + String argument = PythonActivity.mActivity.getFilesDir().getAbsolutePath(); + String app_root_dir = PythonActivity.mActivity.getAppRoot(); + String entry_point = PythonActivity.mActivity.getEntryPoint(app_root_dir + "/service"); + serviceIntent.putExtra("androidPrivate", argument); + serviceIntent.putExtra("androidArgument", app_root_dir); + serviceIntent.putExtra("serviceEntrypoint", "service/" + entry_point); + serviceIntent.putExtra("pythonName", "python"); + serviceIntent.putExtra("pythonHome", app_root_dir); + serviceIntent.putExtra("pythonPath", app_root_dir + ":" + app_root_dir + "/lib"); + serviceIntent.putExtra("serviceStartAsForeground", + (showForegroundNotification ? "true" : "false") + ); + serviceIntent.putExtra("serviceTitle", serviceTitle); + serviceIntent.putExtra("serviceDescription", serviceDescription); + serviceIntent.putExtra("pythonServiceArgument", pythonServiceArgument); + PythonActivity.mActivity.startService(serviceIntent); + } + + public static void stop_service() { + Intent serviceIntent = new Intent(PythonActivity.mActivity, PythonService.class); + PythonActivity.mActivity.stopService(serviceIntent); + } + + + public static native void nativeSetenv(String name, String value); + public static native int nativeInit(Object arguments); + +} + + +class PythonMain implements Runnable { + @Override + public void run() { + PythonActivity.nativeInit(new String[0]); + } +} diff --git a/p4a/pythonforandroid/bootstraps/service_only/build/src/main/jniLibs/.gitkeep b/p4a/pythonforandroid/bootstraps/service_only/build/src/main/jniLibs/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/p4a/pythonforandroid/bootstraps/service_only/build/src/main/res/drawable/.gitkeep b/p4a/pythonforandroid/bootstraps/service_only/build/src/main/res/drawable/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/p4a/pythonforandroid/bootstraps/service_only/build/src/main/res/mipmap/.gitkeep b/p4a/pythonforandroid/bootstraps/service_only/build/src/main/res/mipmap/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/p4a/pythonforandroid/bootstraps/service_only/build/templates/AndroidManifest.tmpl.xml b/p4a/pythonforandroid/bootstraps/service_only/build/templates/AndroidManifest.tmpl.xml new file mode 100644 index 0000000..d19ed32 --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/service_only/build/templates/AndroidManifest.tmpl.xml @@ -0,0 +1,101 @@ + + + + = 9 %} + android:xlargeScreens="true" + {% endif %} + /> + + + + + + {% for perm in args.permissions %} + {% if '.' in perm %} + + {% else %} + + {% endif %} + {% endfor %} + + {% if args.wakelock %} + + {% endif %} + + {% if args.billing_pubkey %} + + {% endif %} + + + + {% for l in args.android_used_libs %} + + {% endfor %} + {% for m in args.meta_data %} + {% endfor %} + + + + + + + + {%- if args.intent_filters -%} + {{- args.intent_filters -}} + {%- endif -%} + + + {% if service %} + + {% endif %} + {% for name in service_names %} + + {% endfor %} + + {% if args.billing_pubkey %} + + + + + + + + + {% endif %} + + + diff --git a/p4a/pythonforandroid/bootstraps/service_only/build/templates/Service.tmpl.java b/p4a/pythonforandroid/bootstraps/service_only/build/templates/Service.tmpl.java new file mode 100644 index 0000000..598549d --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/service_only/build/templates/Service.tmpl.java @@ -0,0 +1,70 @@ +package {{ args.package }}; + +import android.os.Binder; +import android.os.IBinder; +import android.content.Intent; +import android.content.Context; +import org.kivy.android.PythonService; + +public class Service{{ name|capitalize }} extends PythonService { + /** + * Binder given to clients + */ + private final IBinder mBinder = new Service{{ name|capitalize }}Binder(); + + {% if sticky %} + /** + * {@inheritDoc} + */ + @Override + public int startType() { + return START_STICKY; + } + {% endif %} + + @Override + protected int getServiceId() { + return {{ service_id }}; + } + + public static void start(Context ctx, String pythonServiceArgument) { + String argument = ctx.getFilesDir().getAbsolutePath() + "/app"; + Intent intent = new Intent(ctx, Service{{ name|capitalize }}.class); + intent.putExtra("androidPrivate", argument); + intent.putExtra("androidArgument", argument); + intent.putExtra("serviceEntrypoint", "{{ entrypoint }}"); + intent.putExtra("serviceTitle", "{{ name|capitalize }}"); + intent.putExtra("serviceDescription", ""); + intent.putExtra("pythonName", "{{ name }}"); + intent.putExtra("serviceStartAsForeground", "{{ foreground|lower }}"); + intent.putExtra("pythonHome", argument); + intent.putExtra("androidUnpack", argument); + intent.putExtra("pythonPath", argument + ":" + argument + "/lib"); + intent.putExtra("pythonServiceArgument", pythonServiceArgument); + ctx.startService(intent); + } + + public static void stop(Context ctx) { + Intent intent = new Intent(ctx, Service{{ name|capitalize }}.class); + ctx.stopService(intent); + } + + /** + * {@inheritDoc} + */ + @Override + public IBinder onBind(Intent intent) { + return mBinder; + } + + /** + * Class used for the client Binder. Because we know this service always + * runs in the same process as its clients, we don't need to deal with IPC. + */ + public class Service{{ name|capitalize }}Binder extends Binder { + Service{{ name|capitalize }} getService() { + // Return this instance of Service{{ name|capitalize }} so clients can call public methods + return Service{{ name|capitalize }}.this; + } + } +} diff --git a/p4a/pythonforandroid/bootstraps/service_only/build/templates/strings.tmpl.xml b/p4a/pythonforandroid/bootstraps/service_only/build/templates/strings.tmpl.xml new file mode 100644 index 0000000..2286657 --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/service_only/build/templates/strings.tmpl.xml @@ -0,0 +1,5 @@ + + + {{ args.name }} + {{ private_version }} + diff --git a/p4a/pythonforandroid/bootstraps/webview/__init__.py b/p4a/pythonforandroid/bootstraps/webview/__init__.py new file mode 100644 index 0000000..da33ac7 --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/webview/__init__.py @@ -0,0 +1,49 @@ +from pythonforandroid.toolchain import Bootstrap, current_directory, info, info_main, shprint +from pythonforandroid.util import ensure_dir +from os.path import join +import sh + + +class WebViewBootstrap(Bootstrap): + name = 'webview' + + recipe_depends = list( + set(Bootstrap.recipe_depends).union({'genericndkbuild'}) + ) + + def assemble_distribution(self): + info_main('# Creating Android project from build and {} bootstrap'.format( + self.name)) + + shprint(sh.rm, '-rf', self.dist_dir) + shprint(sh.cp, '-r', self.build_dir, self.dist_dir) + with current_directory(self.dist_dir): + with open('local.properties', 'w') as fileh: + fileh.write('sdk.dir={}'.format(self.ctx.sdk_dir)) + + with current_directory(self.dist_dir): + info('Copying python distribution') + + self.distribute_javaclasses(self.ctx.javaclass_dir, + dest_dir=join("src", "main", "java")) + + for arch in self.ctx.archs: + self.distribute_libs(arch, [self.ctx.get_libs_dir(arch.arch)]) + self.distribute_aars(arch) + + python_bundle_dir = join(f'_python_bundle__{arch.arch}', '_python_bundle') + ensure_dir(python_bundle_dir) + site_packages_dir = self.ctx.python_recipe.create_python_bundle( + join(self.dist_dir, python_bundle_dir), arch) + if not self.ctx.with_debug_symbols: + self.strip_libraries(arch) + self.fry_eggs(site_packages_dir) + + if 'sqlite3' not in self.ctx.recipe_build_order: + with open('blacklist.txt', 'a') as fileh: + fileh.write('\nsqlite3/*\nlib-dynload/_sqlite3.so\n') + + super().assemble_distribution() + + +bootstrap = WebViewBootstrap() diff --git a/p4a/pythonforandroid/bootstraps/webview/build/blacklist.txt b/p4a/pythonforandroid/bootstraps/webview/build/blacklist.txt new file mode 100644 index 0000000..53cc634 --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/webview/build/blacklist.txt @@ -0,0 +1,91 @@ +# prevent user to include invalid extensions +*.apk +*.aab +*.apks +*.pxd + +# eggs +*.egg-info + +# unit test +unittest/* + +# python config +config/makesetup + +# unused kivy files (platform specific) +kivy/input/providers/wm_* +kivy/input/providers/mactouch* +kivy/input/providers/probesysfs* +kivy/input/providers/mtdev* +kivy/input/providers/hidinput* +kivy/core/camera/camera_videocapture* +kivy/core/spelling/*osx* +kivy/core/video/video_pyglet* +kivy/tools +kivy/tests/* +kivy/*/*.h +kivy/*/*.pxi + +# unused encodings +lib-dynload/*codec* +encodings/cp*.pyo +encodings/tis* +encodings/shift* +encodings/bz2* +encodings/iso* +encodings/undefined* +encodings/johab* +encodings/p* +encodings/m* +encodings/euc* +encodings/k* +encodings/unicode_internal* +encodings/quo* +encodings/gb* +encodings/big5* +encodings/hp* +encodings/hz* + +# unused python modules +bsddb/* +wsgiref/* +hotshot/* +pydoc_data/* +tty.pyo +anydbm.pyo +nturl2path.pyo +LICENCE.txt +macurl2path.pyo +dummy_threading.pyo +audiodev.pyo +antigravity.pyo +dumbdbm.pyo +sndhdr.pyo +__phello__.foo.pyo +sunaudio.pyo +os2emxpath.pyo +multiprocessing/dummy* + +# unused binaries python modules +lib-dynload/termios.so +lib-dynload/_lsprof.so +lib-dynload/*audioop.so +lib-dynload/_hotshot.so +lib-dynload/_heapq.so +lib-dynload/_json.so +lib-dynload/grp.so +lib-dynload/resource.so +lib-dynload/pyexpat.so +lib-dynload/_ctypes_test.so +lib-dynload/_testcapi.so + +# odd files +plat-linux3/regen + +#>sqlite3 +# conditionnal include depending if some recipes are included or not. +sqlite3/* +lib-dynload/_sqlite3.so +# +#include + +#define LOGI(...) do {} while (0) +#define LOGE(...) do {} while (0) + +#include "android/log.h" + +/* These JNI management functions are taken from SDL2, but modified to refer to pyjnius */ + +/* #define LOG(n, x) __android_log_write(ANDROID_LOG_INFO, (n), (x)) */ +/* #define LOGP(x) LOG("python", (x)) */ +#define LOG_TAG "Python_android" +#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__) +#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__) + + +/* Function headers */ +JNIEnv* Android_JNI_GetEnv(void); +static void Android_JNI_ThreadDestroyed(void*); + +static pthread_key_t mThreadKey; +static JavaVM* mJavaVM; + +int Android_JNI_SetupThread(void) +{ + Android_JNI_GetEnv(); + return 1; +} + +/* Library init */ +JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM* vm, void* reserved) +{ + JNIEnv *env; + mJavaVM = vm; + LOGI("JNI_OnLoad called"); + if ((*mJavaVM)->GetEnv(mJavaVM, (void**) &env, JNI_VERSION_1_4) != JNI_OK) { + LOGE("Failed to get the environment using GetEnv()"); + return -1; + } + /* + * Create mThreadKey so we can keep track of the JNIEnv assigned to each thread + * Refer to http://developer.android.com/guide/practices/design/jni.html for the rationale behind this + */ + if (pthread_key_create(&mThreadKey, Android_JNI_ThreadDestroyed) != 0) { + + __android_log_print(ANDROID_LOG_ERROR, "pyjniusjni", "Error initializing pthread key"); + } + Android_JNI_SetupThread(); + + return JNI_VERSION_1_4; +} + +JNIEnv* Android_JNI_GetEnv(void) +{ + /* From http://developer.android.com/guide/practices/jni.html + * All threads are Linux threads, scheduled by the kernel. + * They're usually started from managed code (using Thread.start), but they can also be created elsewhere and then + * attached to the JavaVM. For example, a thread started with pthread_create can be attached with the + * JNI AttachCurrentThread or AttachCurrentThreadAsDaemon functions. Until a thread is attached, it has no JNIEnv, + * and cannot make JNI calls. + * Attaching a natively-created thread causes a java.lang.Thread object to be constructed and added to the "main" + * ThreadGroup, making it visible to the debugger. Calling AttachCurrentThread on an already-attached thread + * is a no-op. + * Note: You can call this function any number of times for the same thread, there's no harm in it + */ + + JNIEnv *env; + int status = (*mJavaVM)->AttachCurrentThread(mJavaVM, &env, NULL); + if(status < 0) { + LOGE("failed to attach current thread"); + return 0; + } + + /* From http://developer.android.com/guide/practices/jni.html + * Threads attached through JNI must call DetachCurrentThread before they exit. If coding this directly is awkward, + * in Android 2.0 (Eclair) and higher you can use pthread_key_create to define a destructor function that will be + * called before the thread exits, and call DetachCurrentThread from there. (Use that key with pthread_setspecific + * to store the JNIEnv in thread-local-storage; that way it'll be passed into your destructor as the argument.) + * Note: The destructor is not called unless the stored value is != NULL + * Note: You can call this function any number of times for the same thread, there's no harm in it + * (except for some lost CPU cycles) + */ + pthread_setspecific(mThreadKey, (void*) env); + + return env; +} + +static void Android_JNI_ThreadDestroyed(void* value) +{ + /* The thread is being destroyed, detach it from the Java VM and set the mThreadKey value to NULL as required */ + JNIEnv *env = (JNIEnv*) value; + if (env != NULL) { + (*mJavaVM)->DetachCurrentThread(mJavaVM); + pthread_setspecific(mThreadKey, NULL); + } +} + +void *WebView_AndroidGetJNIEnv() +{ + return Android_JNI_GetEnv(); +} diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/proguard-project.txt b/p4a/pythonforandroid/bootstraps/webview/build/proguard-project.txt similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/proguard-project.txt rename to p4a/pythonforandroid/bootstraps/webview/build/proguard-project.txt diff --git a/p4a/pythonforandroid/bootstraps/webview/build/src/main/assets/.gitkeep b/p4a/pythonforandroid/bootstraps/webview/build/src/main/assets/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/p4a/pythonforandroid/bootstraps/webview/build/src/main/java/org/kivy/android/GenericBroadcastReceiver.java b/p4a/pythonforandroid/bootstraps/webview/build/src/main/java/org/kivy/android/GenericBroadcastReceiver.java new file mode 100644 index 0000000..58a1c5e --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/webview/build/src/main/java/org/kivy/android/GenericBroadcastReceiver.java @@ -0,0 +1,19 @@ +package org.kivy.android; + +import android.content.BroadcastReceiver; +import android.content.Intent; +import android.content.Context; + +public class GenericBroadcastReceiver extends BroadcastReceiver { + + GenericBroadcastReceiverCallback listener; + + public GenericBroadcastReceiver(GenericBroadcastReceiverCallback listener) { + super(); + this.listener = listener; + } + + public void onReceive(Context context, Intent intent) { + this.listener.onReceive(context, intent); + } +} diff --git a/p4a/pythonforandroid/bootstraps/webview/build/src/main/java/org/kivy/android/GenericBroadcastReceiverCallback.java b/p4a/pythonforandroid/bootstraps/webview/build/src/main/java/org/kivy/android/GenericBroadcastReceiverCallback.java new file mode 100644 index 0000000..1a87c98 --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/webview/build/src/main/java/org/kivy/android/GenericBroadcastReceiverCallback.java @@ -0,0 +1,8 @@ +package org.kivy.android; + +import android.content.Intent; +import android.content.Context; + +public interface GenericBroadcastReceiverCallback { + void onReceive(Context context, Intent intent); +}; diff --git a/p4a/pythonforandroid/bootstraps/webview/build/src/main/java/org/kivy/android/PythonActivity.java b/p4a/pythonforandroid/bootstraps/webview/build/src/main/java/org/kivy/android/PythonActivity.java new file mode 100644 index 0000000..2f0afdc --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/webview/build/src/main/java/org/kivy/android/PythonActivity.java @@ -0,0 +1,572 @@ +package org.kivy.android; + +import android.os.SystemClock; + +import java.io.InputStream; +import java.io.File; +import java.io.IOException; +import java.lang.reflect.InvocationTargetException; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.ArrayList; + +import android.view.ViewGroup; +import android.view.KeyEvent; +import android.app.Activity; +import android.app.AlertDialog; +import android.content.DialogInterface; +import android.content.Intent; +import android.util.Log; +import android.widget.Toast; +import android.os.AsyncTask; +import android.os.Bundle; +import android.os.PowerManager; +import android.content.Context; +import android.content.pm.PackageManager; +import android.widget.ImageView; +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.graphics.Color; + +import android.widget.AbsoluteLayout; +import android.view.ViewGroup.LayoutParams; + +import android.webkit.WebBackForwardList; +import android.webkit.WebViewClient; +import android.webkit.WebView; +import android.webkit.CookieManager; +import android.net.Uri; + +import org.renpy.android.ResourceManager; + +public class PythonActivity extends Activity { + // This activity is modified from a mixture of the SDLActivity and + // PythonActivity in the SDL2 bootstrap, but removing all the SDL2 + // specifics. + + private static final String TAG = "PythonActivity"; + + public static PythonActivity mActivity = null; + public static boolean mOpenExternalLinksInBrowser = false; + + /** If shared libraries (e.g. SDL or the native application) could not be loaded. */ + public static boolean mBrokenLibraries; + + protected static ViewGroup mLayout; + protected static WebView mWebView; + + protected static Thread mPythonThread; + + private ResourceManager resourceManager = null; + private Bundle mMetaData = null; + private PowerManager.WakeLock mWakeLock = null; + + public String getAppRoot() { + String app_root = getFilesDir().getAbsolutePath() + "/app"; + return app_root; + } + + public String getEntryPoint(String search_dir) { + /* Get the main file (.pyc|.py) depending on if we + * have a compiled version or not. + */ + List entryPoints = new ArrayList(); + entryPoints.add("main.pyc"); // python 3 compiled files + for (String value : entryPoints) { + File mainFile = new File(search_dir + "/" + value); + if (mainFile.exists()) { + return value; + } + } + return "main.py"; + } + + public static void initialize() { + // The static nature of the singleton and Android quirkyness force us to initialize everything here + // Otherwise, when exiting the app and returning to it, these variables *keep* their pre exit values + mWebView = null; + mLayout = null; + mBrokenLibraries = false; + } + + @Override + protected void onCreate(Bundle savedInstanceState) { + Log.v(TAG, "My oncreate running"); + resourceManager = new ResourceManager(this); + super.onCreate(savedInstanceState); + + this.mActivity = this; + this.showLoadingScreen(); + new UnpackFilesTask().execute(getAppRoot()); + } + + private class UnpackFilesTask extends AsyncTask { + @Override + protected String doInBackground(String... params) { + File app_root_file = new File(params[0]); + Log.v(TAG, "Ready to unpack"); + PythonUtil.unpackAsset(mActivity, "private", app_root_file, true); + PythonUtil.unpackPyBundle(mActivity, getApplicationInfo().nativeLibraryDir + "/" + "libpybundle", app_root_file, false); + return null; + } + + @Override + protected void onPostExecute(String result) { + Log.v("Python", "Device: " + android.os.Build.DEVICE); + Log.v("Python", "Model: " + android.os.Build.MODEL); + + PythonActivity.initialize(); + + // Load shared libraries + String errorMsgBrokenLib = ""; + try { + loadLibraries(); + } catch(UnsatisfiedLinkError e) { + System.err.println(e.getMessage()); + mBrokenLibraries = true; + errorMsgBrokenLib = e.getMessage(); + } catch(Exception e) { + System.err.println(e.getMessage()); + mBrokenLibraries = true; + errorMsgBrokenLib = e.getMessage(); + } + + if (mBrokenLibraries) + { + AlertDialog.Builder dlgAlert = new AlertDialog.Builder(PythonActivity.mActivity); + dlgAlert.setMessage("An error occurred while trying to load the application libraries. Please try again and/or reinstall." + + System.getProperty("line.separator") + + System.getProperty("line.separator") + + "Error: " + errorMsgBrokenLib); + dlgAlert.setTitle("Python Error"); + dlgAlert.setPositiveButton("Exit", + new DialogInterface.OnClickListener() { + @Override + public void onClick(DialogInterface dialog,int id) { + // if this button is clicked, close current activity + PythonActivity.mActivity.finish(); + } + }); + dlgAlert.setCancelable(false); + dlgAlert.create().show(); + + return; + } + + // Set up the webview + String app_root_dir = getAppRoot(); + + mWebView = new WebView(PythonActivity.mActivity); + mWebView.getSettings().setJavaScriptEnabled(true); + mWebView.getSettings().setDomStorageEnabled(true); + mWebView.loadUrl("file:///android_asset/_load.html"); + + mWebView.setLayoutParams(new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT)); + mWebView.setWebViewClient(new WebViewClient() { + @Override + public boolean shouldOverrideUrlLoading(WebView view, String url) { + Uri u = Uri.parse(url); + if (mOpenExternalLinksInBrowser) { + if (!(u.getScheme().equals("file") || u.getHost().equals("127.0.0.1"))) { + Intent i = new Intent(Intent.ACTION_VIEW, u); + startActivity(i); + return true; + } + } + return false; + } + + @Override + public void onPageFinished(WebView view, String url) { + CookieManager.getInstance().flush(); + } + }); + mLayout = new AbsoluteLayout(PythonActivity.mActivity); + mLayout.addView(mWebView); + + setContentView(mLayout); + + String mFilesDirectory = mActivity.getFilesDir().getAbsolutePath(); + String entry_point = getEntryPoint(app_root_dir); + + Log.v(TAG, "Setting env vars for start.c and Python to use"); + PythonActivity.nativeSetenv("ANDROID_ENTRYPOINT", entry_point); + PythonActivity.nativeSetenv("ANDROID_ARGUMENT", app_root_dir); + PythonActivity.nativeSetenv("ANDROID_APP_PATH", app_root_dir); + PythonActivity.nativeSetenv("ANDROID_PRIVATE", mFilesDirectory); + PythonActivity.nativeSetenv("ANDROID_UNPACK", app_root_dir); + PythonActivity.nativeSetenv("PYTHONHOME", app_root_dir); + PythonActivity.nativeSetenv("PYTHONPATH", app_root_dir + ":" + app_root_dir + "/lib"); + PythonActivity.nativeSetenv("PYTHONOPTIMIZE", "2"); + + try { + Log.v(TAG, "Access to our meta-data..."); + mActivity.mMetaData = mActivity.getPackageManager().getApplicationInfo( + mActivity.getPackageName(), PackageManager.GET_META_DATA).metaData; + + PowerManager pm = (PowerManager) mActivity.getSystemService(Context.POWER_SERVICE); + if ( mActivity.mMetaData.getInt("wakelock") == 1 ) { + mActivity.mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, "Screen On"); + mActivity.mWakeLock.acquire(); + } + } catch (PackageManager.NameNotFoundException e) { + } + + final Thread pythonThread = new Thread(new PythonMain(), "PythonThread"); + PythonActivity.mPythonThread = pythonThread; + pythonThread.start(); + + final Thread wvThread = new Thread(new WebViewLoaderMain(), "WvThread"); + wvThread.start(); + } + } + + @Override + public void onDestroy() { + Log.i("Destroy", "end of app"); + super.onDestroy(); + + // make sure all child threads (python_thread) are stopped + android.os.Process.killProcess(android.os.Process.myPid()); + } + + public void loadLibraries() { + String app_root = new String(getAppRoot()); + File app_root_file = new File(app_root); + PythonUtil.loadLibraries(app_root_file, + new File(getApplicationInfo().nativeLibraryDir)); + } + + public static void loadUrl(String url) { + class LoadUrl implements Runnable { + private String mUrl; + + public LoadUrl(String url) { + mUrl = url; + } + + public void run() { + mWebView.loadUrl(mUrl); + } + } + + Log.i(TAG, "Opening URL: " + url); + mActivity.runOnUiThread(new LoadUrl(url)); + } + + public static void enableZoom() { + mActivity.runOnUiThread(new Runnable() { + @Override + public void run() { + mWebView.getSettings().setBuiltInZoomControls(true); + mWebView.getSettings().setDisplayZoomControls(false); + } + }); + } + + public static ViewGroup getLayout() { + return mLayout; + } + + long lastBackClick = 0; + @Override + public boolean onKeyDown(int keyCode, KeyEvent event) { + // Check if the key event was the Back button + if (keyCode == KeyEvent.KEYCODE_BACK) { + // Go back if there is web page history behind, + // but not to the start preloader + WebBackForwardList webViewBackForwardList = mWebView.copyBackForwardList(); + if (webViewBackForwardList.getCurrentIndex() > 1) { + mWebView.goBack(); + return true; + } + + // If there's no web page history, bubble up to the default + // system behavior (probably exit the activity) + if (SystemClock.elapsedRealtime() - lastBackClick > 2000){ + lastBackClick = SystemClock.elapsedRealtime(); + Toast.makeText(this, "Tap again to close the app", Toast.LENGTH_LONG).show(); + return true; + } + + lastBackClick = SystemClock.elapsedRealtime(); + } + + return super.onKeyDown(keyCode, event); + } + + // loading screen implementation + public static ImageView mImageView = null; + public void removeLoadingScreen() { + runOnUiThread(new Runnable() { + public void run() { + if (PythonActivity.mImageView != null && + PythonActivity.mImageView.getParent() != null) { + ((ViewGroup)PythonActivity.mImageView.getParent()).removeView( + PythonActivity.mImageView); + PythonActivity.mImageView = null; + } + } + }); + } + + protected void showLoadingScreen() { + // load the bitmap + // 1. if the image is valid and we don't have layout yet, assign this bitmap + // as main view. + // 2. if we have a layout, just set it in the layout. + // 3. If we have an mImageView already, then do nothing because it will have + // already been made the content view or added to the layout. + + if (mImageView == null) { + int presplashId = this.resourceManager.getIdentifier("presplash", "drawable"); + InputStream is = this.getResources().openRawResource(presplashId); + Bitmap bitmap = null; + try { + bitmap = BitmapFactory.decodeStream(is); + } finally { + try { + is.close(); + } catch (IOException e) {}; + } + + mImageView = new ImageView(this); + mImageView.setImageBitmap(bitmap); + + /* + * Set the presplash loading screen background color + * https://developer.android.com/reference/android/graphics/Color.html + * Parse the color string, and return the corresponding color-int. + * If the string cannot be parsed, throws an IllegalArgumentException exception. + * Supported formats are: #RRGGBB #AARRGGBB or one of the following names: + * 'red', 'blue', 'green', 'black', 'white', 'gray', 'cyan', 'magenta', 'yellow', + * 'lightgray', 'darkgray', 'grey', 'lightgrey', 'darkgrey', 'aqua', 'fuchsia', + * 'lime', 'maroon', 'navy', 'olive', 'purple', 'silver', 'teal'. + */ + String backgroundColor = resourceManager.getString("presplash_color"); + if (backgroundColor != null) { + try { + mImageView.setBackgroundColor(Color.parseColor(backgroundColor)); + } catch (IllegalArgumentException e) {} + } + mImageView.setLayoutParams(new ViewGroup.LayoutParams( + ViewGroup.LayoutParams.FILL_PARENT, + ViewGroup.LayoutParams.FILL_PARENT)); + mImageView.setScaleType(ImageView.ScaleType.FIT_CENTER); + + } + + if (mLayout == null) { + setContentView(mImageView); + } else if (PythonActivity.mImageView.getParent() == null){ + mLayout.addView(mImageView); + } + } + + //---------------------------------------------------------------------------- + // Listener interface for onNewIntent + // + + public interface NewIntentListener { + void onNewIntent(Intent intent); + } + + private List newIntentListeners = null; + + public void registerNewIntentListener(NewIntentListener listener) { + if ( this.newIntentListeners == null ) + this.newIntentListeners = Collections.synchronizedList(new ArrayList()); + this.newIntentListeners.add(listener); + } + + public void unregisterNewIntentListener(NewIntentListener listener) { + if ( this.newIntentListeners == null ) + return; + this.newIntentListeners.remove(listener); + } + + @Override + protected void onNewIntent(Intent intent) { + if ( this.newIntentListeners == null ) + return; + this.onResume(); + synchronized ( this.newIntentListeners ) { + Iterator iterator = this.newIntentListeners.iterator(); + while ( iterator.hasNext() ) { + (iterator.next()).onNewIntent(intent); + } + } + } + + //---------------------------------------------------------------------------- + // Listener interface for onActivityResult + // + + public interface ActivityResultListener { + void onActivityResult(int requestCode, int resultCode, Intent data); + } + + private List activityResultListeners = null; + + public void registerActivityResultListener(ActivityResultListener listener) { + if ( this.activityResultListeners == null ) + this.activityResultListeners = Collections.synchronizedList(new ArrayList()); + this.activityResultListeners.add(listener); + } + + public void unregisterActivityResultListener(ActivityResultListener listener) { + if ( this.activityResultListeners == null ) + return; + this.activityResultListeners.remove(listener); + } + + @Override + protected void onActivityResult(int requestCode, int resultCode, Intent intent) { + if ( this.activityResultListeners == null ) + return; + this.onResume(); + synchronized ( this.activityResultListeners ) { + Iterator iterator = this.activityResultListeners.iterator(); + while ( iterator.hasNext() ) + (iterator.next()).onActivityResult(requestCode, resultCode, intent); + } + } + + public static void start_service( + String serviceTitle, + String serviceDescription, + String pythonServiceArgument + ) { + _do_start_service( + serviceTitle, serviceDescription, pythonServiceArgument, true + ); + } + + public static void start_service_not_as_foreground( + String serviceTitle, + String serviceDescription, + String pythonServiceArgument + ) { + _do_start_service( + serviceTitle, serviceDescription, pythonServiceArgument, false + ); + } + + public static void _do_start_service( + String serviceTitle, + String serviceDescription, + String pythonServiceArgument, + boolean showForegroundNotification + ) { + Intent serviceIntent = new Intent(PythonActivity.mActivity, PythonService.class); + String argument = PythonActivity.mActivity.getFilesDir().getAbsolutePath(); + String app_root_dir = PythonActivity.mActivity.getAppRoot(); + String entry_point = PythonActivity.mActivity.getEntryPoint(app_root_dir + "/service"); + serviceIntent.putExtra("androidPrivate", argument); + serviceIntent.putExtra("androidArgument", app_root_dir); + serviceIntent.putExtra("serviceEntrypoint", "service/" + entry_point); + serviceIntent.putExtra("pythonName", "python"); + serviceIntent.putExtra("pythonHome", app_root_dir); + serviceIntent.putExtra("pythonPath", app_root_dir + ":" + app_root_dir + "/lib"); + serviceIntent.putExtra("serviceStartAsForeground", + (showForegroundNotification ? "true" : "false") + ); + serviceIntent.putExtra("serviceTitle", serviceTitle); + serviceIntent.putExtra("serviceDescription", serviceDescription); + serviceIntent.putExtra("pythonServiceArgument", pythonServiceArgument); + PythonActivity.mActivity.startService(serviceIntent); + } + + public static void stop_service() { + Intent serviceIntent = new Intent(PythonActivity.mActivity, PythonService.class); + PythonActivity.mActivity.stopService(serviceIntent); + } + + + public static native void nativeSetenv(String name, String value); + public static native int nativeInit(Object arguments); + + + /** + * Used by android.permissions p4a module to register a call back after + * requesting runtime permissions + **/ + public interface PermissionsCallback { + void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults); + } + + private PermissionsCallback permissionCallback; + private boolean havePermissionsCallback = false; + + public void addPermissionsCallback(PermissionsCallback callback) { + permissionCallback = callback; + havePermissionsCallback = true; + Log.v(TAG, "addPermissionsCallback(): Added callback for onRequestPermissionsResult"); + } + + @Override + public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) { + Log.v(TAG, "onRequestPermissionsResult()"); + if (havePermissionsCallback) { + Log.v(TAG, "onRequestPermissionsResult passed to callback"); + permissionCallback.onRequestPermissionsResult(requestCode, permissions, grantResults); + } + super.onRequestPermissionsResult(requestCode, permissions, grantResults); + } + + /** + * Used by android.permissions p4a module to check a permission + **/ + public boolean checkCurrentPermission(String permission) { + if (android.os.Build.VERSION.SDK_INT < 23) + return true; + + try { + java.lang.reflect.Method methodCheckPermission = + Activity.class.getMethod("checkSelfPermission", String.class); + Object resultObj = methodCheckPermission.invoke(this, permission); + int result = Integer.parseInt(resultObj.toString()); + if (result == PackageManager.PERMISSION_GRANTED) + return true; + } catch (IllegalAccessException | NoSuchMethodException | + InvocationTargetException e) { + } + return false; + } + + /** + * Used by android.permissions p4a module to request runtime permissions + **/ + public void requestPermissionsWithRequestCode(String[] permissions, int requestCode) { + if (android.os.Build.VERSION.SDK_INT < 23) + return; + try { + java.lang.reflect.Method methodRequestPermission = + Activity.class.getMethod("requestPermissions", + String[].class, int.class); + methodRequestPermission.invoke(this, permissions, requestCode); + } catch (IllegalAccessException | NoSuchMethodException | + InvocationTargetException e) { + } + } + + public void requestPermissions(String[] permissions) { + requestPermissionsWithRequestCode(permissions, 1); + } +} + + +class PythonMain implements Runnable { + @Override + public void run() { + PythonActivity.nativeInit(new String[0]); + } +} + +class WebViewLoaderMain implements Runnable { + @Override + public void run() { + WebViewLoader.testConnection(); + } +} diff --git a/p4a/pythonforandroid/bootstraps/webview/build/src/main/jniLibs/.gitkeep b/p4a/pythonforandroid/bootstraps/webview/build/src/main/jniLibs/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/p4a/pythonforandroid/bootstraps/webview/build/src/main/res/drawable/.gitkeep b/p4a/pythonforandroid/bootstraps/webview/build/src/main/res/drawable/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/p4a/pythonforandroid/bootstraps/webview/build/src/main/res/drawable/icon.png b/p4a/pythonforandroid/bootstraps/webview/build/src/main/res/drawable/icon.png new file mode 100644 index 0000000..59a00ba Binary files /dev/null and b/p4a/pythonforandroid/bootstraps/webview/build/src/main/res/drawable/icon.png differ diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/layout/main.xml b/p4a/pythonforandroid/bootstraps/webview/build/src/main/res/layout/main.xml similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/layout/main.xml rename to p4a/pythonforandroid/bootstraps/webview/build/src/main/res/layout/main.xml diff --git a/p4a/pythonforandroid/bootstraps/webview/build/src/main/res/mipmap-anydpi-v26/.gitkeep b/p4a/pythonforandroid/bootstraps/webview/build/src/main/res/mipmap-anydpi-v26/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/p4a/pythonforandroid/bootstraps/webview/build/src/main/res/mipmap/.gitkeep b/p4a/pythonforandroid/bootstraps/webview/build/src/main/res/mipmap/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/res/values/strings.xml b/p4a/pythonforandroid/bootstraps/webview/build/src/main/res/values/strings.xml similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/res/values/strings.xml rename to p4a/pythonforandroid/bootstraps/webview/build/src/main/res/values/strings.xml diff --git a/p4a/pythonforandroid/bootstraps/webview/build/templates/AndroidManifest.tmpl.xml b/p4a/pythonforandroid/bootstraps/webview/build/templates/AndroidManifest.tmpl.xml new file mode 100644 index 0000000..f77533b --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/webview/build/templates/AndroidManifest.tmpl.xml @@ -0,0 +1,108 @@ + + + + = 9 %} + android:xlargeScreens="true" + {% endif %} + /> + + + + + + + {% for perm in args.permissions %} + {% if '.' in perm %} + + {% else %} + + {% endif %} + {% endfor %} + + {% if args.wakelock %} + + {% endif %} + + {% if args.billing_pubkey %} + + {% endif %} + + + + {% for l in args.android_used_libs %} + + {% endfor %} + {% for m in args.meta_data %} + {% endfor %} + + + + + + + + {%- if args.intent_filters -%} + {{- args.intent_filters -}} + {%- endif -%} + + + {% if service %} + + {% endif %} + {% for name in service_names %} + + {% endfor %} + + {% if args.billing_pubkey %} + + + + + + + + + {% endif %} + + + diff --git a/p4a/pythonforandroid/bootstraps/webview/build/templates/WebViewLoader.tmpl.java b/p4a/pythonforandroid/bootstraps/webview/build/templates/WebViewLoader.tmpl.java new file mode 100644 index 0000000..5482da8 --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/webview/build/templates/WebViewLoader.tmpl.java @@ -0,0 +1,56 @@ +package org.kivy.android; + +import android.util.Log; + +import java.io.IOException; +import java.net.Socket; +import java.net.InetSocketAddress; + +import android.os.SystemClock; + +import android.os.Handler; + +import org.kivy.android.PythonActivity; + +public class WebViewLoader { + private static final String TAG = "WebViewLoader"; + + public static void testConnection() { + + while (true) { + if (WebViewLoader.pingHost("localhost", {{ args.port }}, 100)) { + Log.v(TAG, "Successfully pinged localhost:{{ args.port }}"); + Handler mainHandler = new Handler(PythonActivity.mActivity.getMainLooper()); + Runnable myRunnable = new Runnable() { + @Override + public void run() { + PythonActivity.mActivity.loadUrl("http://127.0.0.1:{{ args.port }}/"); + Log.v(TAG, "Loaded webserver in webview"); + } + }; + mainHandler.post(myRunnable); + break; + + } else { + Log.v(TAG, "Could not ping localhost:{{ args.port }}"); + try { + Thread.sleep(100); + } catch(InterruptedException e) { + Log.v(TAG, "InterruptedException occurred when sleeping"); + } + } + } + } + + public static boolean pingHost(String host, int port, int timeout) { + Socket socket = new Socket(); + try { + socket.connect(new InetSocketAddress(host, port), timeout); + socket.close(); + return true; + } catch (IOException e) { + try {socket.close();} catch (IOException f) {return false;} + return false; // Either timeout or unreachable or failed DNS lookup. + } + } +} diff --git a/p4a/pythonforandroid/bootstraps/webview/build/templates/strings.tmpl.xml b/p4a/pythonforandroid/bootstraps/webview/build/templates/strings.tmpl.xml new file mode 100644 index 0000000..41c20ac --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/webview/build/templates/strings.tmpl.xml @@ -0,0 +1,6 @@ + + + {{ args.name }} + {{ private_version }} + {{ args.presplash_color }} + diff --git a/p4a/pythonforandroid/bootstraps/webview/build/templates/test/build.tmpl.xml b/p4a/pythonforandroid/bootstraps/webview/build/templates/test/build.tmpl.xml new file mode 100644 index 0000000..9564aae --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/webview/build/templates/test/build.tmpl.xml @@ -0,0 +1,93 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/p4a/pythonforandroid/bootstraps/webview/build/templates/test/build.xml.tmpl b/p4a/pythonforandroid/bootstraps/webview/build/templates/test/build.xml.tmpl new file mode 100644 index 0000000..9564aae --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/webview/build/templates/test/build.xml.tmpl @@ -0,0 +1,93 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/p4a/pythonforandroid/bootstraps/webview/build/webview_includes/_load.html b/p4a/pythonforandroid/bootstraps/webview/build/webview_includes/_load.html new file mode 100644 index 0000000..1896d63 --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/webview/build/webview_includes/_load.html @@ -0,0 +1,16 @@ + + + + + + + + Python WebView loader + + + +
+
Loading...
+
+ + diff --git a/p4a/pythonforandroid/bootstraps/webview/build/webview_includes/_loading_style.css b/p4a/pythonforandroid/bootstraps/webview/build/webview_includes/_loading_style.css new file mode 100644 index 0000000..6833855 --- /dev/null +++ b/p4a/pythonforandroid/bootstraps/webview/build/webview_includes/_loading_style.css @@ -0,0 +1,78 @@ + +h1 { + font-size: 30px; + color: blue; + font-weight: bold; + text-align:center; +} + +h2 { + text-align:center; +} + +button { + margin-left: auto; + margin-right: auto; + display: block; + margin-top: 50px; + font-size: 30px; +} + + +/* Loader from http://projects.lukehaas.me/css-loaders/#load1 */ + +.loader, +.loader:before, +.loader:after { + background: #aaaaff; + -webkit-animation: load1 1s infinite ease-in-out; + animation: load1 1s infinite ease-in-out; + width: 1em; + height: 4em; +} +.loader:before, +.loader:after { + position: absolute; + top: 0; + content: ''; +} +.loader:before { + left: -1.5em; +} +.loader { + text-indent: -9999em; + margin: 8em auto; + position: relative; + font-size: 11px; + -webkit-animation-delay: -0.16s; + animation-delay: -0.16s; +} +.loader:after { + left: 1.5em; + -webkit-animation-delay: -0.32s; + animation-delay: -0.32s; +} +@-webkit-keyframes load1 { + 0%, + 80%, + 100% { + box-shadow: 0 0 #aaaaff; + height: 4em; + } + 40% { + box-shadow: 0 -2em #aaaaff; + height: 5em; + } +} +@keyframes load1 { + 0%, + 80%, + 100% { + box-shadow: 0 0 #aaaaff; + height: 4em; + } + 40% { + box-shadow: 0 -2em #aaaaff; + height: 5em; + } +} diff --git a/p4a/pythonforandroid/build.py b/p4a/pythonforandroid/build.py index 374929d..645b368 100644 --- a/p4a/pythonforandroid/build.py +++ b/p4a/pythonforandroid/build.py @@ -1,32 +1,66 @@ -from __future__ import print_function - -from os.path import (join, realpath, dirname, expanduser, exists, - split, isdir) +from os.path import ( + abspath, join, realpath, dirname, expanduser, exists +) from os import environ import copy import os import glob -import sys import re import sh +import shutil import subprocess +from contextlib import suppress from pythonforandroid.util import ( - current_directory, ensure_dir, get_virtualenv_executable, - BuildInterruptingException + current_directory, ensure_dir, + BuildInterruptingException, ) from pythonforandroid.logger import (info, warning, info_notify, info_main, shprint) from pythonforandroid.archs import ArchARM, ArchARMv7_a, ArchAarch_64, Archx86, Archx86_64 +from pythonforandroid.pythonpackage import get_package_name from pythonforandroid.recipe import CythonRecipe, Recipe from pythonforandroid.recommendations import ( check_ndk_version, check_target_api, check_ndk_api, RECOMMENDED_NDK_API, RECOMMENDED_TARGET_API) +from pythonforandroid.androidndk import AndroidNDK -class Context(object): +def get_targets(sdk_dir): + if exists(join(sdk_dir, 'cmdline-tools', 'latest', 'bin', 'avdmanager')): + avdmanager = sh.Command(join(sdk_dir, 'cmdline-tools', 'latest', 'bin', 'avdmanager')) + targets = avdmanager('list', 'target').stdout.decode('utf-8').split('\n') + + elif exists(join(sdk_dir, 'tools', 'bin', 'avdmanager')): + avdmanager = sh.Command(join(sdk_dir, 'tools', 'bin', 'avdmanager')) + targets = avdmanager('list', 'target').stdout.decode('utf-8').split('\n') + elif exists(join(sdk_dir, 'tools', 'android')): + android = sh.Command(join(sdk_dir, 'tools', 'android')) + targets = android('list').stdout.decode('utf-8').split('\n') + else: + raise BuildInterruptingException( + 'Could not find `android` or `sdkmanager` binaries in Android SDK', + instructions='Make sure the path to the Android SDK is correct') + return targets + + +def get_available_apis(sdk_dir): + targets = get_targets(sdk_dir) + apis = [s for s in targets if re.match(r'^ *API level: ', s)] + apis = [re.findall(r'[0-9]+', s) for s in apis] + apis = [int(s[0]) for s in apis if s] + return apis + + +class Context: '''A build context. If anything will be built, an instance this class will be instantiated and used to hold all the build state.''' + # Whether to make a debug or release build + build_as_debuggable = False + + # Whether to strip debug symbols in `.so` files + with_debug_symbols = False + env = environ.copy() # the filepath of toolchain.py root_dir = None @@ -36,25 +70,31 @@ class Context(object): # in which bootstraps are copied for building # and recipes are built build_dir = None + + distribution = None + """The Distribution object representing the current build target location.""" + # the Android project folder where everything ends up dist_dir = None + # where Android libs are cached after build # but before being placed in dists libs_dir = None aars_dir = None + # Whether setup.py or similar should be used if present: + use_setup_py = False + ccache = None # whether to use ccache - cython = None # the cython interpreter name - ndk_platform = None # the ndk platform directory + ndk = None - dist_name = None # should be deprecated in favour of self.dist.dist_name bootstrap = None bootstrap_build_dir = None recipe_build_order = None # Will hold the list of all built recipes - symlink_java_src = False # If True, will symlink instead of copying during build + symlink_bootstrap_files = False # If True, will symlink instead of copying during build java_build_tool = 'auto' @@ -70,34 +110,33 @@ class Context(object): @property def libs_dir(self): # Was previously hardcoded as self.build_dir/libs - dir = join(self.build_dir, 'libs_collections', - self.bootstrap.distribution.name) - ensure_dir(dir) - return dir + directory = join(self.build_dir, 'libs_collections', + self.bootstrap.distribution.name) + ensure_dir(directory) + return directory @property def javaclass_dir(self): # Was previously hardcoded as self.build_dir/java - dir = join(self.build_dir, 'javaclasses', - self.bootstrap.distribution.name) - ensure_dir(dir) - return dir + directory = join(self.build_dir, 'javaclasses', + self.bootstrap.distribution.name) + ensure_dir(directory) + return directory @property def aars_dir(self): - dir = join(self.build_dir, 'aars', self.bootstrap.distribution.name) - ensure_dir(dir) - return dir + directory = join(self.build_dir, 'aars', self.bootstrap.distribution.name) + ensure_dir(directory) + return directory @property def python_installs_dir(self): - dir = join(self.build_dir, 'python-installs') - ensure_dir(dir) - return dir + directory = join(self.build_dir, 'python-installs') + ensure_dir(directory) + return directory - def get_python_install_dir(self): - dir = join(self.python_installs_dir, self.bootstrap.distribution.name) - return dir + def get_python_install_dir(self, arch): + return join(self.python_installs_dir, self.bootstrap.distribution.name, arch) def setup_dirs(self, storage_dir): '''Calculates all the storage and build dirs, and makes sure @@ -185,8 +224,6 @@ class Context(object): if self._build_env_prepared: return - ok = True - # Work out where the Android SDK is sdk_dir = None if user_sdk_dir: @@ -202,10 +239,10 @@ class Context(object): possible_dirs = glob.glob(expanduser(join( '~', '.buildozer', 'android', 'platform', 'android-sdk-*'))) possible_dirs = [d for d in possible_dirs if not - (d.endswith('.bz2') or d.endswith('.gz'))] + d.endswith(('.bz2', '.gz'))] if possible_dirs: info('Found possible SDK dirs in buildozer dir: {}'.format( - ', '.join([d.split(os.sep)[-1] for d in possible_dirs]))) + ', '.join(d.split(os.sep)[-1] for d in possible_dirs))) info('Will attempt to use SDK at {}'.format(possible_dirs[0])) warning('This SDK lookup is intended for debug only, if you ' 'use python-for-android much you should probably ' @@ -230,21 +267,10 @@ class Context(object): android_api = int(android_api) self.android_api = android_api - check_target_api(android_api, self.archs[0].arch) - - if exists(join(sdk_dir, 'tools', 'bin', 'avdmanager')): - avdmanager = sh.Command(join(sdk_dir, 'tools', 'bin', 'avdmanager')) - targets = avdmanager('list', 'target').stdout.decode('utf-8').split('\n') - elif exists(join(sdk_dir, 'tools', 'android')): - android = sh.Command(join(sdk_dir, 'tools', 'android')) - targets = android('list').stdout.decode('utf-8').split('\n') - else: - raise BuildInterruptingException( - 'Could not find `android` or `sdkmanager` binaries in Android SDK', - instructions='Make sure the path to the Android SDK is correct') - apis = [s for s in targets if re.match(r'^ *API level: ', s)] - apis = [re.findall(r'[0-9]+', s) for s in apis] - apis = [int(s[0]) for s in apis if s] + for arch in self.archs: + # Maybe We could remove this one in a near future (ARMv5 is definitely old) + check_target_api(android_api, arch) + apis = get_available_apis(self.sdk_dir) info('Available Android APIs are ({})'.format( ', '.join(map(str, apis)))) if android_api in apis: @@ -279,7 +305,7 @@ class Context(object): '~', '.buildozer', 'android', 'platform', 'android-ndk-r*'))) if possible_dirs: info('Found possible NDK dirs in buildozer dir: {}'.format( - ', '.join([d.split(os.sep)[-1] for d in possible_dirs]))) + ', '.join(d.split(os.sep)[-1] for d in possible_dirs))) info('Will attempt to use NDK at {}'.format(possible_dirs[0])) warning('This NDK lookup is intended for debug only, if you ' 'use python-for-android much you should probably ' @@ -288,11 +314,8 @@ class Context(object): if ndk_dir is None: raise BuildInterruptingException('Android NDK dir was not specified') self.ndk_dir = realpath(ndk_dir) - check_ndk_version(ndk_dir) - self.ndk = 'crystax' # force crystax detection - ndk_api = None if user_ndk_api: ndk_api = user_ndk_api @@ -310,103 +333,32 @@ class Context(object): check_ndk_api(ndk_api, self.android_api) - virtualenv = get_virtualenv_executable() - if virtualenv is None: - raise IOError('Couldn\'t find a virtualenv executable, ' - 'you must install this to use p4a.') - self.virtualenv = virtualenv - info('Found virtualenv at {}'.format(virtualenv)) + self.ndk = AndroidNDK(self.ndk_dir) # path to some tools - self.ccache = sh.which("ccache") + self.ccache = shutil.which("ccache") if not self.ccache: info('ccache is missing, the build will not be optimized in the ' 'future.') - for cython_fn in ("cython", "cython3", "cython2", "cython-2.7"): - cython = sh.which(cython_fn) - if cython: - self.cython = cython - break - else: - raise BuildInterruptingException('No cython binary found.') - if not self.cython: - ok = False - warning("Missing requirement: cython is not installed") + try: + subprocess.check_output([ + "python3", "-m", "cython", "--help", + ]) + except subprocess.CalledProcessError: + warning('Cython for python3 missing. If you are building for ' + ' a python 3 target (which is the default)' + ' then THINGS WILL BREAK.') - # This would need to be changed if supporting multiarch APKs - arch = self.archs[0] - platform_dir = arch.platform_dir - toolchain_prefix = arch.toolchain_prefix - toolchain_version = None - self.ndk_platform = join( - self.ndk_dir, - 'platforms', - 'android-{}'.format(self.ndk_api), - platform_dir) - if not exists(self.ndk_platform): - warning('ndk_platform doesn\'t exist: {}'.format( - self.ndk_platform)) - ok = False - - py_platform = sys.platform - if py_platform in ['linux2', 'linux3']: - py_platform = 'linux' - - toolchain_versions = [] - toolchain_path = join(self.ndk_dir, 'toolchains') - if isdir(toolchain_path): - toolchain_contents = glob.glob('{}/{}-*'.format(toolchain_path, - toolchain_prefix)) - toolchain_versions = [split(path)[-1][len(toolchain_prefix) + 1:] - for path in toolchain_contents] - else: - warning('Could not find toolchain subdirectory!') - ok = False - toolchain_versions.sort() - - toolchain_versions_gcc = [] - for toolchain_version in toolchain_versions: - if toolchain_version[0].isdigit(): - # GCC toolchains begin with a number - toolchain_versions_gcc.append(toolchain_version) - - if toolchain_versions: - info('Found the following toolchain versions: {}'.format( - toolchain_versions)) - info('Picking the latest gcc toolchain, here {}'.format( - toolchain_versions_gcc[-1])) - toolchain_version = toolchain_versions_gcc[-1] - else: - warning('Could not find any toolchain for {}!'.format( - toolchain_prefix)) - ok = False - - self.toolchain_prefix = toolchain_prefix - self.toolchain_version = toolchain_version - # Modify the path so that sh finds modules appropriately - environ['PATH'] = ( - '{ndk_dir}/toolchains/{toolchain_prefix}-{toolchain_version}/' - 'prebuilt/{py_platform}-x86/bin/:{ndk_dir}/toolchains/' - '{toolchain_prefix}-{toolchain_version}/prebuilt/' - '{py_platform}-x86_64/bin/:{ndk_dir}:{sdk_dir}/' - 'tools:{path}').format( - sdk_dir=self.sdk_dir, ndk_dir=self.ndk_dir, - toolchain_prefix=toolchain_prefix, - toolchain_version=toolchain_version, - py_platform=py_platform, path=environ.get('PATH')) - - for executable in ("pkg-config", "autoconf", "automake", "libtoolize", - "tar", "bzip2", "unzip", "make", "gcc", "g++"): - if not sh.which(executable): - warning("Missing executable: {} is not installed".format( - executable)) - - if not ok: - raise BuildInterruptingException( - 'python-for-android cannot continue due to the missing executables above') + self.env["PATH"] = ":".join( + [ + self.ndk.llvm_bin_dir, + self.ndk_dir, + f"{self.sdk_dir}/tools", + environ.get("PATH"), + ] + ) def __init__(self): - super(Context, self).__init__() self.include_dirs = [] self._build_env_prepared = False @@ -417,12 +369,12 @@ class Context(object): self._ndk_api = None self.ndk = None - self.toolchain_prefix = None - self.toolchain_version = None - self.local_recipes = None self.copy_libs = False + self.activity_class_name = u'org.kivy.android.PythonActivity' + self.service_class_name = u'org.kivy.android.PythonService' + # this list should contain all Archs, it is pruned later self.archs = ( ArchARM(self), @@ -452,26 +404,24 @@ class Context(object): if not self.archs: raise BuildInterruptingException('Asked to compile for no Archs, so failing.') info('Will compile for the following archs: {}'.format( - ', '.join([arch.arch for arch in self.archs]))) + ', '.join(arch.arch for arch in self.archs))) - def prepare_bootstrap(self, bs): - bs.ctx = self - self.bootstrap = bs + def prepare_bootstrap(self, bootstrap): + if not bootstrap: + raise TypeError("None is not allowed for bootstrap") + bootstrap.ctx = self + self.bootstrap = bootstrap self.bootstrap.prepare_build_dir() self.bootstrap_build_dir = self.bootstrap.build_dir - def prepare_dist(self, name): - self.dist_name = name - self.bootstrap.prepare_dist_dir(self.dist_name) + def prepare_dist(self): + self.bootstrap.prepare_dist_dir() - def get_site_packages_dir(self, arch=None): + def get_site_packages_dir(self, arch): '''Returns the location of site-packages in the python-install build dir. ''' - if self.python_recipe.name == 'python2legacy': - return join(self.get_python_install_dir(), - 'lib', 'python2.7', 'site-packages') - return self.get_python_install_dir() + return self.get_python_install_dir(arch.arch) def get_libs_dir(self, arch): '''The libs dir for a given arch.''' @@ -488,21 +438,10 @@ class Context(object): if not os.path.exists(name): # Non-existing dir, cannot look this up. return False - if os.path.exists(os.path.join(name, "setup.py")): - # Get name from setup.py: - name = subprocess.check_output([ - sys.executable, "setup.py", "--name"], - cwd=name) - try: - name = name.decode('utf-8', 'replace') - except AttributeError: - pass - name = name.strip() - if len(name) == 0: - # Failed to look up any meaningful name. - return False - else: - # A folder with whatever, cannot look this up. + try: + name = get_package_name(os.path.abspath(name)) + except ValueError: + # Failed to look up any meaningful name. return False # Try to look up recipe by name: @@ -517,7 +456,6 @@ class Context(object): return (exists(join(site_packages_dir, name)) or exists(join(site_packages_dir, name + '.py')) or exists(join(site_packages_dir, name + '.pyc')) or - exists(join(site_packages_dir, name + '.pyo')) or exists(join(site_packages_dir, name + '.so')) or glob.glob(join(site_packages_dir, name + '-*.egg'))) @@ -525,7 +463,9 @@ class Context(object): return not self.has_package(name, arch) -def build_recipes(build_order, python_modules, ctx): +def build_recipes(build_order, python_modules, ctx, project_dir, + ignore_project_setup_py=False + ): # Put recipes in correct build order info_notify("Recipe build order is {}".format(build_order)) if python_modules: @@ -565,13 +505,17 @@ def build_recipes(build_order, python_modules, ctx): else: info('{} said it is already built, skipping' .format(recipe.name)) + recipe.install_libraries(arch) # 4) biglink everything info_main('# Biglinking object files') - if not ctx.python_recipe or not ctx.python_recipe.from_crystax: + if not ctx.python_recipe: biglink(ctx, arch) else: - info('NDK is crystax, skipping biglink (will this work?)') + warning( + "Context's python recipe found, " + "skipping biglink (will this work?)" + ) # 5) postbuild packages info_main('# Postbuilding recipes') @@ -580,46 +524,185 @@ def build_recipes(build_order, python_modules, ctx): recipe.postbuild_arch(arch) info_main('# Installing pure Python modules') - run_pymodules_install(ctx, python_modules) - - return + for arch in ctx.archs: + run_pymodules_install( + ctx, arch, python_modules, project_dir, + ignore_setup_py=ignore_project_setup_py + ) -def run_pymodules_install(ctx, modules): - modules = list(filter(ctx.not_has_package, modules)) +def project_has_setup_py(project_dir): + return (project_dir is not None and + (exists(join(project_dir, "setup.py")) or + exists(join(project_dir, "pyproject.toml")) + )) - if not modules: - info('There are no Python modules to install, skipping') + +def run_setuppy_install(ctx, project_dir, env=None, arch=None): + env = env or {} + + with current_directory(project_dir): + info('got setup.py or similar, running project install. ' + + '(disable this behavior with --ignore-setup-py)') + + # Compute & output the constraints we will use: + info('Contents that will be used for constraints.txt:') + constraints = subprocess.check_output([ + join( + ctx.build_dir, "venv", "bin", "pip" + ), + "freeze" + ], env=copy.copy(env)) + with suppress(AttributeError): + constraints = constraints.decode("utf-8", "replace") + info(constraints) + + # Make sure all packages found are fixed in version + # by writing a constraint file, to avoid recipes being + # upgraded & reinstalled: + with open('._tmp_p4a_recipe_constraints.txt', 'wb') as fileh: + fileh.write(constraints.encode("utf-8", "replace")) + try: + + info('Populating venv\'s site-packages with ' + 'ctx.get_site_packages_dir()...') + + # Copy dist contents into site-packages for discovery. + # Why this is needed: + # --target is somewhat evil and messes with discovery of + # packages in PYTHONPATH if that also includes the target + # folder. So we need to use the regular virtualenv + # site-packages folder instead. + # Reference: + # https://github.com/pypa/pip/issues/6223 + ctx_site_packages_dir = os.path.normpath( + os.path.abspath(ctx.get_site_packages_dir(arch)) + ) + venv_site_packages_dir = os.path.normpath(os.path.join( + ctx.build_dir, "venv", "lib", [ + f for f in os.listdir(os.path.join( + ctx.build_dir, "venv", "lib" + )) if f.startswith("python") + ][0], "site-packages" + )) + copied_over_contents = [] + for f in os.listdir(ctx_site_packages_dir): + full_path = os.path.join(ctx_site_packages_dir, f) + if not os.path.exists(os.path.join( + venv_site_packages_dir, f + )): + if os.path.isdir(full_path): + shutil.copytree(full_path, os.path.join( + venv_site_packages_dir, f + )) + else: + shutil.copy2(full_path, os.path.join( + venv_site_packages_dir, f + )) + copied_over_contents.append(f) + + # Get listing of virtualenv's site-packages, to see the + # newly added things afterwards & copy them back into + # the distribution folder / build context site-packages: + previous_venv_contents = os.listdir( + venv_site_packages_dir + ) + + # Actually run setup.py: + info('Launching package install...') + shprint(sh.bash, '-c', ( + "'" + join( + ctx.build_dir, "venv", "bin", "pip" + ).replace("'", "'\"'\"'") + "' " + + "install -c ._tmp_p4a_recipe_constraints.txt -v ." + ).format(ctx.get_site_packages_dir(arch). + replace("'", "'\"'\"'")), + _env=copy.copy(env)) + + # Go over all new additions and copy them back: + info('Copying additions resulting from setup.py back ' + 'into ctx.get_site_packages_dir()...') + new_venv_additions = [] + for f in (set(os.listdir(venv_site_packages_dir)) - + set(previous_venv_contents)): + new_venv_additions.append(f) + full_path = os.path.join(venv_site_packages_dir, f) + if os.path.isdir(full_path): + shutil.copytree(full_path, os.path.join( + ctx_site_packages_dir, f + )) + else: + shutil.copy2(full_path, os.path.join( + ctx_site_packages_dir, f + )) + + # Undo all the changes we did to the venv-site packages: + info('Reverting additions to ' + 'virtualenv\'s site-packages...') + for f in set(copied_over_contents + new_venv_additions): + full_path = os.path.join(venv_site_packages_dir, f) + if os.path.isdir(full_path): + shutil.rmtree(full_path) + else: + os.remove(full_path) + finally: + os.remove("._tmp_p4a_recipe_constraints.txt") + + +def run_pymodules_install(ctx, arch, modules, project_dir=None, + ignore_setup_py=False): + """ This function will take care of all non-recipe things, by: + + 1. Processing them from --requirements (the modules argument) + and installing them + + 2. Installing the user project/app itself via setup.py if + ignore_setup_py=True + + """ + + info('*** PYTHON PACKAGE / PROJECT INSTALL STAGE FOR ARCH: {} ***'.format(arch)) + + modules = [m for m in modules if ctx.not_has_package(m, arch)] + + # We change current working directory later, so this has to be an absolute + # path or `None` in case that we didn't supply the `project_dir` via kwargs + project_dir = abspath(project_dir) if project_dir else None + + # Bail out if no python deps and no setup.py to process: + if not modules and ( + ignore_setup_py or + project_dir is None or + not project_has_setup_py(project_dir) + ): + info('No Python modules and no setup.py to process, skipping') return - info('The requirements ({}) don\'t have recipes, attempting to install ' - 'them with pip'.format(', '.join(modules))) - info('If this fails, it may mean that the module has compiled ' - 'components and needs a recipe.') + # Output messages about what we're going to do: + if modules: + info( + "The requirements ({}) don\'t have recipes, attempting to " + "install them with pip".format(', '.join(modules)) + ) + info( + "If this fails, it may mean that the module has compiled " + "components and needs a recipe." + ) + if project_dir is not None and \ + project_has_setup_py(project_dir) and not ignore_setup_py: + info( + "Will process project install, if it fails then the " + "project may not be compatible for Android install." + ) - venv = sh.Command(ctx.virtualenv) + # Use our hostpython to create the virtualenv + host_python = sh.Command(ctx.hostpython) with current_directory(join(ctx.build_dir)): - shprint(venv, - '--python=python{}.{}'.format( - ctx.python_recipe.major_minor_version_string.partition(".")[0], - ctx.python_recipe.major_minor_version_string.partition(".")[2] - ), - 'venv' - ) - - info('Creating a requirements.txt file for the Python modules') - with open('requirements.txt', 'w') as fileh: - for module in modules: - key = 'VERSION_' + module - if key in environ: - line = '{}=={}\n'.format(module, environ[key]) - else: - line = '{}\n'.format(module) - fileh.write(line) + shprint(host_python, '-m', 'venv', 'venv') # Prepare base environment and upgrade pip: - base_env = copy.copy(os.environ) - base_env["PYTHONPATH"] = ctx.get_site_packages_dir() + base_env = dict(copy.copy(os.environ)) + base_env["PYTHONPATH"] = ctx.get_site_packages_dir(arch) info('Upgrade pip to latest version') shprint(sh.bash, '-c', ( "source venv/bin/activate && pip install -U pip" @@ -640,39 +723,56 @@ def run_pymodules_install(ctx, modules): env = copy.copy(base_env) env.update(recipe_env) - info('Installing Python modules with pip') - info('IF THIS FAILS, THE MODULES MAY NEED A RECIPE. ' - 'A reason for this is often modules compiling ' - 'native code that is unaware of Android cross-compilation ' - 'and does not work without additional ' - 'changes / workarounds.') - # Make sure our build package dir is available, and the virtualenv # site packages come FIRST (so the proper pip version is used): - env["PYTHONPATH"] += ":" + ctx.get_site_packages_dir() + env["PYTHONPATH"] += ":" + ctx.get_site_packages_dir(arch) env["PYTHONPATH"] = os.path.abspath(join( ctx.build_dir, "venv", "lib", "python" + ctx.python_recipe.major_minor_version_string, "site-packages")) + ":" + env["PYTHONPATH"] - ''' - # Do actual install: - shprint(sh.bash, '-c', ( - "venv/bin/pip " + - "install -v --target '{0}' --no-deps -r requirements.txt" - ).format(ctx.get_site_packages_dir().replace("'", "'\"'\"'")), - _env=copy.copy(env)) - ''' + # Install the manually specified requirements first: + if not modules: + info('There are no Python modules to install, skipping') + else: + info('Creating a requirements.txt file for the Python modules') + with open('requirements.txt', 'w') as fileh: + for module in modules: + key = 'VERSION_' + module + if key in environ: + line = '{}=={}\n'.format(module, environ[key]) + else: + line = '{}\n'.format(module) + fileh.write(line) - # use old install script - shprint(sh.bash, '-c', ( - "source venv/bin/activate && env CC=/bin/false CXX=/bin/false " - "PYTHONPATH={0} pip install --target '{0}' --no-deps -r requirements.txt" - ).format(ctx.get_site_packages_dir())) + info('Installing Python modules with pip') + info( + "IF THIS FAILS, THE MODULES MAY NEED A RECIPE. " + "A reason for this is often modules compiling " + "native code that is unaware of Android cross-compilation " + "and does not work without additional " + "changes / workarounds." + ) + + shprint(sh.bash, '-c', ( + "venv/bin/pip " + + "install -v --target '{0}' --no-deps -r requirements.txt" + ).format(ctx.get_site_packages_dir(arch).replace("'", "'\"'\"'")), + _env=copy.copy(env)) + + # Afterwards, run setup.py if present: + if project_dir is not None and ( + project_has_setup_py(project_dir) and not ignore_setup_py + ): + run_setuppy_install(ctx, project_dir, env, arch.arch) + elif not ignore_setup_py: + info("No setup.py found in project directory: " + str(project_dir)) # Strip object files after potential Cython or native code builds: - standard_recipe.strip_object_files(ctx.archs[0], env, - build_dir=ctx.build_dir) + if not ctx.with_debug_symbols: + standard_recipe.strip_object_files( + arch, env, build_dir=ctx.build_dir + ) def biglink(ctx, arch): @@ -711,7 +811,7 @@ def biglink(ctx, arch): # Move to the directory containing crtstart_so.o and crtend_so.o # This is necessary with newer NDKs? A gcc bug? - with current_directory(join(ctx.ndk_platform, 'usr', 'lib')): + with current_directory(arch.ndk_lib_dir): do_biglink( join(ctx.get_libs_dir(arch.arch), 'libpymodules.so'), obj_dir.split(' '), @@ -721,7 +821,9 @@ def biglink(ctx, arch): env=env) -def biglink_function(soname, objs_paths, extra_link_dirs=[], env=None): +def biglink_function(soname, objs_paths, extra_link_dirs=None, env=None): + if extra_link_dirs is None: + extra_link_dirs = [] print('objs_paths are', objs_paths) sofiles = [] @@ -768,7 +870,9 @@ def biglink_function(soname, objs_paths, extra_link_dirs=[], env=None): shprint(cc, '-shared', '-O3', '-o', soname, *unique_args, _env=env) -def copylibs_function(soname, objs_paths, extra_link_dirs=[], env=None): +def copylibs_function(soname, objs_paths, extra_link_dirs=None, env=None): + if extra_link_dirs is None: + extra_link_dirs = [] print('objs_paths are', objs_paths) re_needso = re.compile(r'^.*\(NEEDED\)\s+Shared library: \[lib(.*)\.so\]\s*$') @@ -800,7 +904,7 @@ def copylibs_function(soname, objs_paths, extra_link_dirs=[], env=None): elif 'READELF' in os.environ: readelf = os.environ['READELF'] else: - readelf = sh.which('readelf').strip() + readelf = shutil.which('readelf').strip() readelf = sh.Command(readelf).bake('-d') dest = dirname(soname) @@ -896,5 +1000,4 @@ def copylibs_function(soname, objs_paths, extra_link_dirs=[], env=None): '\n\t'.join(needed_libs)) print('Copying libraries') - for lib in sofiles: - shprint(sh.cp, lib, dest) + shprint(sh.cp, *sofiles, dest) diff --git a/p4a/pythonforandroid/distribution.py b/p4a/pythonforandroid/distribution.py index 9fa7b4c..ff97f92 100644 --- a/p4a/pythonforandroid/distribution.py +++ b/p4a/pythonforandroid/distribution.py @@ -7,7 +7,7 @@ from pythonforandroid.util import current_directory, BuildInterruptingException from shutil import rmtree -class Distribution(object): +class Distribution: '''State container for information about a distribution (i.e. an Android project). @@ -24,7 +24,7 @@ class Distribution(object): ndk_api = None archs = [] - '''The arch targets that the dist is built for.''' + '''The names of the arch targets that the dist is built for.''' recipes = [] @@ -42,12 +42,19 @@ class Distribution(object): return str(self) @classmethod - def get_distribution(cls, ctx, name=None, recipes=[], - ndk_api=None, - force_build=False, - extra_dist_dirs=[], - require_perfect_match=False, - allow_replace_dist=True): + def get_distribution( + cls, + ctx, + *, + archs, # required keyword argument: there is no sensible default + name=None, + recipes=[], + ndk_api=None, + force_build=False, + extra_dist_dirs=[], + require_perfect_match=False, + allow_replace_dist=True + ): '''Takes information about the distribution, and decides what kind of distribution it will be. @@ -60,6 +67,12 @@ class Distribution(object): name : str The name of the distribution. If a dist with this name already ' exists, it will be used. + ndk_api : int + The NDK API to compile against, included in the dist because it cannot + be changed later during APK packaging. + archs : list + The target architectures list to compile against, included in the dist because + it cannot be changed later during APK packaging. recipes : list The recipes that the distribution must contain. force_download: bool @@ -77,17 +90,24 @@ class Distribution(object): a new one with the current requirements. ''' - existing_dists = Distribution.get_distributions(ctx) + possible_dists = Distribution.get_distributions(ctx) - possible_dists = existing_dists + # Will hold dists that would be built in the same folder as an existing dist + folder_match_dist = None - name_match_dist = None - - # 0) Check if a dist with that name already exists + # 0) Check if a dist with that name and architecture already exists if name is not None and name: - possible_dists = [d for d in possible_dists if d.name == name] + possible_dists = [ + d for d in possible_dists if + (d.name == name) and all(arch_name in d.archs for arch_name in archs)] + if possible_dists: - name_match_dist = possible_dists[0] + # There should only be one folder with a given dist name *and* arch. + # We could check that here, but for compatibility let's let it slide + # and just record the details of one of them. We only use this data to + # possibly fail the build later, so it doesn't really matter if there + # was more than one clash. + folder_match_dist = possible_dists[0] # 1) Check if any existing dists meet the requirements _possible_dists = [] @@ -110,12 +130,14 @@ class Distribution(object): else: info('No existing dists meet the given requirements!') - # If any dist has perfect recipes and ndk API, return it + # If any dist has perfect recipes, arch and NDK API, return it for dist in possible_dists: if force_build: continue if ndk_api is not None and dist.ndk_api != ndk_api: continue + if not all(arch_name in dist.archs for arch_name in archs): + continue if (set(dist.recipes) == set(recipes) or (set(recipes).issubset(set(dist.recipes)) and not require_perfect_match)): @@ -123,12 +145,10 @@ class Distribution(object): .format(dist.name)) return dist - assert len(possible_dists) < 2 - # If there was a name match but we didn't already choose it, # then the existing dist is incompatible with the requested # configuration and the build cannot continue - if name_match_dist is not None and not allow_replace_dist: + if folder_match_dist is not None and not allow_replace_dist: raise BuildInterruptingException( 'Asked for dist with name {name} with recipes ({req_recipes}) and ' 'NDK API {req_ndk_api}, but a dist ' @@ -136,9 +156,11 @@ class Distribution(object): '({dist_recipes}) or NDK API {dist_ndk_api}'.format( name=name, req_ndk_api=ndk_api, - dist_ndk_api=name_match_dist.ndk_api, + dist_ndk_api=folder_match_dist.ndk_api, req_recipes=', '.join(recipes), - dist_recipes=', '.join(name_match_dist.recipes))) + dist_recipes=', '.join(folder_match_dist.recipes))) + + assert len(possible_dists) < 2 # If we got this far, we need to build a new dist dist = Distribution(ctx) @@ -152,9 +174,12 @@ class Distribution(object): name = filen.format(i) dist.name = name - dist.dist_dir = join(ctx.dist_dir, dist.name) + dist.dist_dir = join( + ctx.dist_dir, + name) dist.recipes = recipes dist.ndk_api = ctx.ndk_api + dist.archs = archs return dist @@ -182,7 +207,7 @@ class Distribution(object): with open(join(folder, 'dist_info.json')) as fileh: dist_info = json.load(fileh) dist = cls(ctx) - dist.name = folder.split('/')[-1] + dist.name = dist_info['dist_name'] dist.dist_dir = folder dist.needs_build = False dist.recipes = dist_info['recipes'] @@ -210,10 +235,11 @@ class Distribution(object): with current_directory(dirn): info('Saving distribution info') with open('dist_info.json', 'w') as fileh: - json.dump({'dist_name': self.ctx.dist_name, + json.dump({'dist_name': self.name, 'bootstrap': self.ctx.bootstrap.name, 'archs': [arch.arch for arch in self.ctx.archs], 'ndk_api': self.ctx.ndk_api, + 'use_setup_py': self.ctx.use_setup_py, 'recipes': self.ctx.recipe_build_order + self.ctx.python_modules, 'hostpython': self.ctx.hostpython, 'python_version': self.ctx.python_recipe.major_minor_version_string}, diff --git a/p4a/pythonforandroid/entrypoints.py b/p4a/pythonforandroid/entrypoints.py new file mode 100644 index 0000000..1ba6a26 --- /dev/null +++ b/p4a/pythonforandroid/entrypoints.py @@ -0,0 +1,20 @@ +from pythonforandroid.recommendations import check_python_version +from pythonforandroid.util import BuildInterruptingException, handle_build_exception + + +def main(): + """ + Main entrypoint for running python-for-android as a script. + """ + + try: + # Check the Python version before importing anything heavier than + # the util functions. This lets us provide a nice message about + # incompatibility rather than having the interpreter crash if it + # reaches unsupported syntax from a newer Python version. + check_python_version() + + from pythonforandroid.toolchain import ToolchainCL + ToolchainCL() + except BuildInterruptingException as exc: + handle_build_exception(exc) diff --git a/p4a/pythonforandroid/graph.py b/p4a/pythonforandroid/graph.py index 646a66e..bdaca43 100644 --- a/p4a/pythonforandroid/graph.py +++ b/p4a/pythonforandroid/graph.py @@ -45,7 +45,7 @@ def get_dependency_tuple_list_for_recipe(recipe, blacklist=None): """ if blacklist is None: blacklist = set() - assert(type(blacklist) == set) + assert type(blacklist) == set if recipe.depends is None: dependencies = [] else: @@ -130,7 +130,7 @@ def find_order(graph): ''' while graph: # Find all items without a parent - leftmost = [l for l, s in graph.items() if not s] + leftmost = [name for name, dep in graph.items() if not dep] if not leftmost: raise ValueError('Dependency cycle detected! %s' % graph) # If there is more than one, sort them for predictable order @@ -160,7 +160,7 @@ def obvious_conflict_checker(ctx, name_tuples, blacklist=None): current_to_be_added = list(to_be_added) to_be_added = [] for (added_tuple, adding_recipe) in current_to_be_added: - assert(type(added_tuple) == tuple) + assert type(added_tuple) == tuple if len(added_tuple) > 1: # No obvious commitment in what to add, don't check it itself # but throw it into deps for later comparing against @@ -293,7 +293,8 @@ def get_recipe_order_and_bootstrap(ctx, names, bs=None, blacklist=None): orders.append(list(order)) # prefer python3 and SDL2 if available - orders.sort(key=lambda order: -('python3' in order) - ('sdl2' in order)) + orders = sorted(orders, + key=lambda order: -('python3' in order) - ('sdl2' in order)) if not orders: raise BuildInterruptingException( diff --git a/p4a/pythonforandroid/logger.py b/p4a/pythonforandroid/logger.py index b25b94c..8bcf85c 100644 --- a/p4a/pythonforandroid/logger.py +++ b/p4a/pythonforandroid/logger.py @@ -6,18 +6,7 @@ from sys import stdout, stderr from math import log10 from collections import defaultdict from colorama import Style as Colo_Style, Fore as Colo_Fore -import six -# This codecs change fixes a bug with log output, but crashes under python3 -if not six.PY3: - import codecs - stdout = codecs.getwriter('utf8')(stdout) - stderr = codecs.getwriter('utf8')(stderr) - -if six.PY2: - unistr = unicode # noqa F821 -else: - unistr = str # monkey patch to show full output sh.ErrorReturnCode.truncate_cap = 999999 @@ -40,7 +29,7 @@ class LevelDifferentiatingFormatter(logging.Formatter): record.msg = '{}{}[DEBUG]{}{}: '.format( Err_Style.BRIGHT, Err_Fore.LIGHTBLACK_EX, Err_Fore.RESET, Err_Style.RESET_ALL) + record.msg - return super(LevelDifferentiatingFormatter, self).format(record) + return super().format(record) logger = logging.getLogger('p4a') @@ -59,7 +48,7 @@ warning = logger.warning error = logger.error -class colorama_shim(object): +class colorama_shim: def __init__(self, real): self._dict = defaultdict(str) @@ -112,12 +101,12 @@ def shorten_string(string, max_width): return string visible = max_width - 16 - int(log10(string_len)) # expected suffix len "...(and XXXXX more)" - if not isinstance(string, unistr): - visstring = unistr(string[:visible], errors='ignore') + if not isinstance(string, str): + visstring = str(string[:visible], errors='ignore') else: visstring = string[:visible] return u''.join((visstring, u'...(and ', - unistr(string_len - visible), u' more)')) + str(string_len - visible), u' more)')) def get_console_width(): @@ -159,7 +148,6 @@ def shprint(command, *args, **kwargs): columns = get_console_width() command_path = str(command).split('/') command_string = command_path[-1] - string = ' '.join(['{}->{} running'.format(Out_Fore.LIGHTBLACK_EX, Out_Style.RESET_ALL), command_string] + list(args)) @@ -211,9 +199,9 @@ def shprint(command, *args, **kwargs): re_filter_in=None, re_filter_out=None): lines = out.splitlines() if re_filter_in is not None: - lines = [l for l in lines if re_filter_in.search(l)] + lines = [line for line in lines if re_filter_in.search(line)] if re_filter_out is not None: - lines = [l for l in lines if not re_filter_out.search(l)] + lines = [line for line in lines if not re_filter_out.search(line)] if tail_n == 0 or len(lines) <= tail_n: info('{}:\n{}\t{}{}'.format( name, forecolor, '\t\n'.join(lines), Out_Fore.RESET)) @@ -226,17 +214,18 @@ def shprint(command, *args, **kwargs): re.compile(filter_in) if filter_in else None, re.compile(filter_out) if filter_out else None) printtail(err.stderr.decode('utf-8'), 'STDERR', Err_Fore.RED) - if is_critical: - env = kwargs.get("env") + if is_critical or full_debug: + env = kwargs.get("_env") if env is not None: info("{}ENV:{}\n{}\n".format( Err_Fore.YELLOW, Err_Fore.RESET, "\n".join( - "set {}={}".format(n, v) for n, v in env.items()))) + "export {}='{}'".format(n, v) for n, v in env.items()))) info("{}COMMAND:{}\ncd {} && {} {}\n".format( Err_Fore.YELLOW, Err_Fore.RESET, os.getcwd(), command, ' '.join(args))) warning("{}ERROR: {} failed!{}".format( Err_Fore.RED, command, Err_Fore.RESET)) + if is_critical: exit(1) else: raise diff --git a/p4a/pythonforandroid/patching.py b/p4a/pythonforandroid/patching.py index 2a47733..96a3b27 100644 --- a/p4a/pythonforandroid/patching.py +++ b/p4a/pythonforandroid/patching.py @@ -1,4 +1,5 @@ from os import uname +from distutils.version import LooseVersion def check_all(*callables): @@ -69,3 +70,20 @@ def is_ndk(ndk): def is_x(recipe, **kwargs): return recipe.ctx.ndk == ndk return is_x + + +def is_version_gt(version): + def is_x(recipe, **kwargs): + return LooseVersion(recipe.version) > version + + +def is_version_lt(version): + def is_x(recipe, **kwargs): + return LooseVersion(recipe.version) < version + return is_x + + +def version_starts_with(version): + def is_x(recipe, **kwargs): + return recipe.version.startswith(version) + return is_x diff --git a/p4a/pythonforandroid/prerequisites.py b/p4a/pythonforandroid/prerequisites.py new file mode 100644 index 0000000..d85eb0b --- /dev/null +++ b/p4a/pythonforandroid/prerequisites.py @@ -0,0 +1,415 @@ +#!/usr/bin/env python3 + +import sys +import platform +import os +import subprocess +import shutil +from pythonforandroid.logger import info, warning, error + + +class Prerequisite(object): + name = "Default" + homebrew_formula_name = "" + mandatory = dict(linux=False, darwin=False) + installer_is_supported = dict(linux=False, darwin=False) + + def is_valid(self): + if self.checker(): + info(f"Prerequisite {self.name} is met") + return (True, "") + elif not self.mandatory[sys.platform]: + warning( + f"Prerequisite {self.name} is not met, but is marked as non-mandatory" + ) + else: + error(f"Prerequisite {self.name} is not met") + + def checker(self): + if sys.platform == "darwin": + return self.darwin_checker() + elif sys.platform == "linux": + return self.linux_checker() + else: + raise Exception("Unsupported platform") + + def ask_to_install(self): + if ( + os.environ.get("PYTHONFORANDROID_PREREQUISITES_INSTALL_INTERACTIVE", "1") + == "1" + ): + res = input( + f"Do you want automatically install prerequisite {self.name}? [y/N] " + ) + if res.lower() == "y": + return True + else: + return False + else: + info( + "Session is not interactive (usually this happens during a CI run), so let's consider it as a YES" + ) + return True + + def install(self): + info(f"python-for-android can automatically install prerequisite: {self.name}") + if self.ask_to_install(): + if sys.platform == "darwin": + self.darwin_installer() + elif sys.platform == "linux": + self.linux_installer() + else: + raise Exception("Unsupported platform") + else: + info( + f"Skipping installation of prerequisite {self.name} as per user request" + ) + + def show_helper(self): + if sys.platform == "darwin": + self.darwin_helper() + elif sys.platform == "linux": + self.linux_helper() + else: + raise Exception("Unsupported platform") + + def install_is_supported(self): + return self.installer_is_supported[sys.platform] + + def linux_checker(self): + raise Exception(f"Unsupported prerequisite check on linux for {self.name}") + + def darwin_checker(self): + raise Exception(f"Unsupported prerequisite check on macOS for {self.name}") + + def linux_installer(self): + raise Exception(f"Unsupported prerequisite installer on linux for {self.name}") + + def darwin_installer(self): + raise Exception(f"Unsupported prerequisite installer on macOS for {self.name}") + + def darwin_helper(self): + info(f"No helper available for prerequisite: {self.name} on macOS") + + def linux_helper(self): + info(f"No helper available for prerequisite: {self.name} on linux") + + def _darwin_get_brew_formula_location_prefix(self, formula, installed=False): + opts = ["--installed"] if installed else [] + p = subprocess.Popen( + ["brew", "--prefix", formula, *opts], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + _stdout_res, _stderr_res = p.communicate() + + if p.returncode != 0: + error(_stderr_res.decode("utf-8").strip()) + return None + else: + return _stdout_res.decode("utf-8").strip() + + def darwin_pkg_config_location(self): + warning( + f"pkg-config location is not supported on macOS for prerequisite: {self.name}" + ) + return "" + + def linux_pkg_config_location(self): + warning( + f"pkg-config location is not supported on linux for prerequisite: {self.name}" + ) + return "" + + @property + def pkg_config_location(self): + if sys.platform == "darwin": + return self.darwin_pkg_config_location() + elif sys.platform == "linux": + return self.linux_pkg_config_location() + + +class HomebrewPrerequisite(Prerequisite): + name = "homebrew" + mandatory = dict(linux=False, darwin=True) + installer_is_supported = dict(linux=False, darwin=False) + + def darwin_checker(self): + return shutil.which("brew") is not None + + def darwin_helper(self): + info( + "Installer for homebrew is not yet supported on macOS," + "the nice news is that the installation process is easy!" + "See: https://brew.sh for further instructions." + ) + + +class JDKPrerequisite(Prerequisite): + name = "JDK" + mandatory = dict(linux=False, darwin=True) + installer_is_supported = dict(linux=False, darwin=True) + min_supported_version = 11 + + def darwin_checker(self): + if "JAVA_HOME" in os.environ: + info("Found JAVA_HOME environment variable, using it") + jdk_path = os.environ["JAVA_HOME"] + else: + jdk_path = self._darwin_get_libexec_jdk_path(version=None) + return self._darwin_jdk_is_supported(jdk_path) + + def _darwin_get_libexec_jdk_path(self, version=None): + version_args = [] + if version is not None: + version_args = ["-v", version] + return ( + subprocess.run( + ["/usr/libexec/java_home", *version_args], + stdout=subprocess.PIPE, + ) + .stdout.strip() + .decode() + ) + + def _darwin_jdk_is_supported(self, jdk_path): + if not jdk_path: + return False + + javac_bin = os.path.join(jdk_path, "bin", "javac") + if not os.path.exists(javac_bin): + return False + + p = subprocess.Popen( + [javac_bin, "-version"], stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + _stdout_res, _stderr_res = p.communicate() + + if p.returncode != 0: + error("Failed to run javac to check JDK version") + return False + + if not _stdout_res: + _stdout_res = _stderr_res + + res = _stdout_res.strip().decode() + + major_version = int(res.split(" ")[-1].split(".")[0]) + if major_version >= self.min_supported_version: + info(f"Found a valid JDK at {jdk_path}") + return True + else: + error(f"JDK {self.min_supported_version} or higher is required") + return False + + def darwin_helper(self): + info( + "python-for-android requires a JDK 11 or higher to be installed on macOS," + "but seems like you don't have one installed." + ) + info( + "If you think that a valid JDK is already installed, please verify that " + "you have a JDK 11 or higher installed and that `/usr/libexec/java_home` " + "shows the correct path." + ) + info( + "If you have multiple JDK installations, please make sure that you have " + "`JAVA_HOME` environment variable set to the correct JDK installation." + ) + + def darwin_installer(self): + info( + "Looking for a JDK 11 or higher installation which is not the default one ..." + ) + jdk_path = self._darwin_get_libexec_jdk_path(version="11+") + + if not self._darwin_jdk_is_supported(jdk_path): + info("We're unlucky, there's no JDK 11 or higher installation available") + + base_url = "https://github.com/adoptium/temurin17-binaries/releases/download/jdk-17.0.2%2B8/" + if platform.machine() == "arm64": + filename = "OpenJDK17U-jdk_aarch64_mac_hotspot_17.0.2_8.tar.gz" + else: + filename = "OpenJDK17U-jdk_x64_mac_hotspot_17.0.2_8.tar.gz" + + info(f"Downloading {filename} from {base_url}") + subprocess.check_output( + [ + "curl", + "-L", + f"{base_url}{filename}", + "-o", + f"/tmp/{filename}", + ] + ) + + user_library_java_path = os.path.expanduser( + "~/Library/Java/JavaVirtualMachines" + ) + info(f"Extracting {filename} to {user_library_java_path}") + subprocess.check_output( + [ + "mkdir", + "-p", + user_library_java_path, + ], + ) + subprocess.check_output( + ["tar", "xzf", f"/tmp/{filename}", "-C", user_library_java_path], + ) + + jdk_path = self._darwin_get_libexec_jdk_path(version="17.0.2+8") + + info(f"Setting JAVA_HOME to {jdk_path}") + os.environ["JAVA_HOME"] = jdk_path + + +class OpenSSLPrerequisite(Prerequisite): + name = "openssl" + homebrew_formula_name = "openssl@1.1" + mandatory = dict(linux=False, darwin=True) + installer_is_supported = dict(linux=False, darwin=True) + + def darwin_checker(self): + return ( + self._darwin_get_brew_formula_location_prefix( + self.homebrew_formula_name, installed=True + ) + is not None + ) + + def darwin_pkg_config_location(self): + return os.path.join( + self._darwin_get_brew_formula_location_prefix(self.homebrew_formula_name), + "lib/pkgconfig", + ) + + def darwin_installer(self): + info("Installing OpenSSL ...") + subprocess.check_output(["brew", "install", self.homebrew_formula_name]) + + +class AutoconfPrerequisite(Prerequisite): + name = "autoconf" + mandatory = dict(linux=False, darwin=True) + installer_is_supported = dict(linux=False, darwin=True) + + def darwin_checker(self): + return ( + self._darwin_get_brew_formula_location_prefix("autoconf", installed=True) + is not None + ) + + def darwin_installer(self): + info("Installing Autoconf ...") + subprocess.check_output(["brew", "install", "autoconf"]) + + +class AutomakePrerequisite(Prerequisite): + name = "automake" + mandatory = dict(linux=False, darwin=True) + installer_is_supported = dict(linux=False, darwin=True) + + def darwin_checker(self): + return ( + self._darwin_get_brew_formula_location_prefix("automake", installed=True) + is not None + ) + + def darwin_installer(self): + info("Installing Automake ...") + subprocess.check_output(["brew", "install", "automake"]) + + +class LibtoolPrerequisite(Prerequisite): + name = "libtool" + mandatory = dict(linux=False, darwin=True) + installer_is_supported = dict(linux=False, darwin=True) + + def darwin_checker(self): + return ( + self._darwin_get_brew_formula_location_prefix("libtool", installed=True) + is not None + ) + + def darwin_installer(self): + info("Installing Libtool ...") + subprocess.check_output(["brew", "install", "libtool"]) + + +class PkgConfigPrerequisite(Prerequisite): + name = "pkg-config" + mandatory = dict(linux=False, darwin=True) + installer_is_supported = dict(linux=False, darwin=True) + + def darwin_checker(self): + return ( + self._darwin_get_brew_formula_location_prefix("pkg-config", installed=True) + is not None + ) + + def darwin_installer(self): + info("Installing Pkg-Config ...") + subprocess.check_output(["brew", "install", "pkg-config"]) + + +class CmakePrerequisite(Prerequisite): + name = "cmake" + mandatory = dict(linux=False, darwin=True) + installer_is_supported = dict(linux=False, darwin=True) + + def darwin_checker(self): + return ( + self._darwin_get_brew_formula_location_prefix("cmake", installed=True) + is not None + ) + + def darwin_installer(self): + info("Installing cmake ...") + subprocess.check_output(["brew", "install", "cmake"]) + + +def get_required_prerequisites(platform="linux"): + DEFAULT_PREREQUISITES = dict( + darwin=[ + HomebrewPrerequisite(), + AutoconfPrerequisite(), + AutomakePrerequisite(), + LibtoolPrerequisite(), + PkgConfigPrerequisite(), + CmakePrerequisite(), + OpenSSLPrerequisite(), + JDKPrerequisite(), + ], + linux=[], + all_platforms=[], + ) + + return DEFAULT_PREREQUISITES["all_platforms"] + DEFAULT_PREREQUISITES[platform] + + +def check_and_install_default_prerequisites(): + + prerequisites_not_met = [] + + warning( + "prerequisites.py is experimental and does not support all prerequisites yet." + ) + warning("Please report any issues to the python-for-android issue tracker.") + + # Phase 1: Check if all prerequisites are met and add the ones + # which are not to `prerequisites_not_met` + for prerequisite in get_required_prerequisites(sys.platform): + if not prerequisite.is_valid(): + prerequisites_not_met.append(prerequisite) + + # Phase 2: Setup/Install all prerequisites that are not met + # (where possible), otherwise show an helper. + for prerequisite in prerequisites_not_met: + prerequisite.show_helper() + if prerequisite.install_is_supported(): + prerequisite.install() + + +if __name__ == "__main__": + check_and_install_default_prerequisites() diff --git a/p4a/pythonforandroid/pythonpackage.py b/p4a/pythonforandroid/pythonpackage.py new file mode 100644 index 0000000..3b03a51 --- /dev/null +++ b/p4a/pythonforandroid/pythonpackage.py @@ -0,0 +1,787 @@ +""" This module offers highlevel functions to get package metadata + like the METADATA file, the name, or a list of dependencies. + + Usage examples: + + # Getting package name from pip reference: + from pytonforandroid.pythonpackage import get_package_name + print(get_package_name("pillow")) + # Outputs: "Pillow" (note the spelling!) + + # Getting package dependencies: + from pytonforandroid.pythonpackage import get_package_dependencies + print(get_package_dependencies("pep517")) + # Outputs: "['pytoml']" + + # Get package name from arbitrary package source: + from pytonforandroid.pythonpackage import get_package_name + print(get_package_name("/some/local/project/folder/")) + # Outputs package name + + NOTE: + + Yes, this module doesn't fit well into python-for-android, but this + functionality isn't available ANYWHERE ELSE, and upstream (pip, ...) + currently has no interest in taking this over, so it has no other place + to go. + (Unless someone reading this puts it into yet another packaging lib) + + Reference discussion/upstream inclusion attempt: + + https://github.com/pypa/packaging-problems/issues/247 + +""" + + +import functools +import os +import shutil +import subprocess +import sys +import tarfile +import tempfile +import textwrap +import time +import zipfile +from io import open # needed for python 2 +from urllib.parse import unquote as urlunquote +from urllib.parse import urlparse + +import toml +from pep517.envbuild import BuildEnvironment +from pep517.wrappers import Pep517HookCaller + + +def transform_dep_for_pip(dependency): + if dependency.find("@") > 0 and ( + dependency.find("@") < dependency.find("://") or + "://" not in dependency + ): + # WORKAROUND FOR UPSTREAM BUG: + # https://github.com/pypa/pip/issues/6097 + # (Please REMOVE workaround once that is fixed & released upstream!) + # + # Basically, setup_requires() can contain a format pip won't install + # from a requirements.txt (PEP 508 URLs). + # To avoid this, translate to an #egg= reference: + if dependency.endswith("#"): + dependency = dependency[:-1] + url = (dependency.partition("@")[2].strip().partition("#egg")[0] + + "#egg=" + + dependency.partition("@")[0].strip() + ) + return url + return dependency + + +def extract_metainfo_files_from_package( + package, + output_folder, + debug=False + ): + """ Extracts metdata files from the given package to the given folder, + which may be referenced in any way that is permitted in + a requirements.txt file or install_requires=[] listing. + + Current supported metadata files that will be extracted: + + - pytoml.yml (only if package wasn't obtained as wheel) + - METADATA + """ + + if package is None: + raise ValueError("package cannot be None") + + if not os.path.exists(output_folder) or os.path.isfile(output_folder): + raise ValueError("output folder needs to be existing folder") + + if debug: + print("extract_metainfo_files_from_package: extracting for " + + "package: " + str(package)) + + # A temp folder for making a package copy in case it's a local folder, + # because extracting metadata might modify files + # (creating sdists/wheels...) + temp_folder = tempfile.mkdtemp(prefix="pythonpackage-package-copy-") + try: + # Package is indeed a folder! Get a temp copy to work on: + if is_filesystem_path(package): + shutil.copytree( + parse_as_folder_reference(package), + os.path.join(temp_folder, "package"), + ignore=shutil.ignore_patterns(".tox") + ) + package = os.path.join(temp_folder, "package") + + # Because PEP517 can be noisy and contextlib.redirect_* fails to + # contain it, we will run the actual analysis in a separate process: + try: + subprocess.check_output([ + sys.executable, + "-c", + "import importlib\n" + "import json\n" + "import os\n" + "import sys\n" + "sys.path = [os.path.dirname(sys.argv[3])] + sys.path\n" + "m = importlib.import_module(\n" + " os.path.basename(sys.argv[3]).partition('.')[0]\n" + ")\n" + "m._extract_metainfo_files_from_package_unsafe(" + " sys.argv[1]," + " sys.argv[2]," + ")", + package, output_folder, os.path.abspath(__file__)], + stderr=subprocess.STDOUT, # make sure stderr is muted. + cwd=os.path.join(os.path.dirname(__file__), "..") + ) + except subprocess.CalledProcessError as e: + output = e.output.decode("utf-8", "replace") + if debug: + print("Got error obtaining meta info.") + print("Detail output:") + print(output) + print("End of Detail output.") + raise ValueError( + "failed to obtain meta info - " + "is '{}' a valid package? " + "Detailed output:\n{}".format(package, output) + ) + finally: + shutil.rmtree(temp_folder) + + +def _get_system_python_executable(): + """ Returns the path the system-wide python binary. + (In case we're running in a virtualenv or venv) + """ + # This function is required by get_package_as_folder() to work + # inside a virtualenv, since venv creation will fail with + # the virtualenv's local python binary. + # (venv/virtualenv incompatibility) + + # Abort if not in virtualenv or venv: + if not hasattr(sys, "real_prefix") and ( + not hasattr(sys, "base_prefix") or + os.path.normpath(sys.base_prefix) == + os.path.normpath(sys.prefix)): + return sys.executable + + # Extract prefix we need to look in: + if hasattr(sys, "real_prefix"): + search_prefix = sys.real_prefix # virtualenv + else: + search_prefix = sys.base_prefix # venv + + def python_binary_from_folder(path): + def binary_is_usable(python_bin): + """ Helper function to see if a given binary name refers + to a usable python interpreter binary + """ + + # Abort if path isn't present at all or a directory: + if not os.path.exists( + os.path.join(path, python_bin) + ) or os.path.isdir(os.path.join(path, python_bin)): + return + # We should check file not found anyway trying to run it, + # since it might be a dead symlink: + try: + filenotfounderror = FileNotFoundError + except NameError: # Python 2 + filenotfounderror = OSError + try: + # Run it and see if version output works with no error: + subprocess.check_output([ + os.path.join(path, python_bin), "--version" + ], stderr=subprocess.STDOUT) + return True + except (subprocess.CalledProcessError, filenotfounderror): + return False + + python_name = "python" + sys.version + while (not binary_is_usable(python_name) and + python_name.find(".") > 0): + # Try less specific binary name: + python_name = python_name.rpartition(".")[0] + if binary_is_usable(python_name): + return os.path.join(path, python_name) + return None + + # Return from sys.real_prefix if present: + result = python_binary_from_folder(search_prefix) + if result is not None: + return result + + # Check out all paths in $PATH: + bad_candidates = [] + good_candidates = [] + ever_had_nonvenv_path = False + ever_had_path_starting_with_prefix = False + for p in os.environ.get("PATH", "").split(":"): + # Skip if not possibly the real system python: + if not os.path.normpath(p).startswith( + os.path.normpath(search_prefix) + ): + continue + + ever_had_path_starting_with_prefix = True + + # First folders might be virtualenv/venv we want to avoid: + if not ever_had_nonvenv_path: + sep = os.path.sep + if ( + ("system32" not in p.lower() and + "usr" not in p and + not p.startswith("/opt/python")) or + {"home", ".tox"}.intersection(set(p.split(sep))) or + "users" in p.lower() + ): + # Doesn't look like bog-standard system path. + if (p.endswith(os.path.sep + "bin") or + p.endswith(os.path.sep + "bin" + os.path.sep)): + # Also ends in "bin" -> likely virtualenv/venv. + # Add as unfavorable / end of candidates: + bad_candidates.append(p) + continue + ever_had_nonvenv_path = True + + good_candidates.append(p) + + # If we have a bad env with PATH not containing any reference to our + # real python (travis, why would you do that to me?) then just guess + # based from the search prefix location itself: + if not ever_had_path_starting_with_prefix: + # ... and yes we're scanning all the folders for that, it's dumb + # but i'm not aware of a better way: (@JonasT) + for root, dirs, files in os.walk(search_prefix, topdown=True): + for name in dirs: + bad_candidates.append(os.path.join(root, name)) + + # Sort candidates by length (to prefer shorter ones): + def candidate_cmp(a, b): + return len(a) - len(b) + good_candidates = sorted( + good_candidates, key=functools.cmp_to_key(candidate_cmp) + ) + bad_candidates = sorted( + bad_candidates, key=functools.cmp_to_key(candidate_cmp) + ) + + # See if we can now actually find the system python: + for p in good_candidates + bad_candidates: + result = python_binary_from_folder(p) + if result is not None: + return result + + raise RuntimeError( + "failed to locate system python in: {}" + " - checked candidates were: {}, {}" + .format(sys.real_prefix, good_candidates, bad_candidates) + ) + + +def get_package_as_folder(dependency): + """ This function downloads the given package / dependency and extracts + the raw contents into a folder. + + Afterwards, it returns a tuple with the type of distribution obtained, + and the temporary folder it extracted to. It is the caller's + responsibility to delete the returned temp folder after use. + + Examples of returned values: + + ("source", "/tmp/pythonpackage-venv-e84toiwjw") + ("wheel", "/tmp/pythonpackage-venv-85u78uj") + + What the distribution type will be depends on what pip decides to + download. + """ + + venv_parent = tempfile.mkdtemp( + prefix="pythonpackage-venv-" + ) + try: + # Create a venv to install into: + try: + if int(sys.version.partition(".")[0]) < 3: + # Python 2.x has no venv. + subprocess.check_output([ + sys.executable, # no venv conflict possible, + # -> no need to use system python + "-m", "virtualenv", + "--python=" + _get_system_python_executable(), + os.path.join(venv_parent, 'venv') + ], cwd=venv_parent) + else: + # On modern Python 3, use venv. + subprocess.check_output([ + _get_system_python_executable(), "-m", "venv", + os.path.join(venv_parent, 'venv') + ], cwd=venv_parent) + except subprocess.CalledProcessError as e: + output = e.output.decode('utf-8', 'replace') + raise ValueError( + 'venv creation unexpectedly ' + + 'failed. error output: ' + str(output) + ) + venv_path = os.path.join(venv_parent, "venv") + + # Update pip and wheel in venv for latest feature support: + try: + filenotfounderror = FileNotFoundError + except NameError: # Python 2. + filenotfounderror = OSError + try: + subprocess.check_output([ + os.path.join(venv_path, "bin", "pip"), + "install", "-U", "pip", "wheel", + ]) + except filenotfounderror: + raise RuntimeError( + "venv appears to be missing pip. " + "did we fail to use a proper system python??\n" + "system python path detected: {}\n" + "os.environ['PATH']: {}".format( + _get_system_python_executable(), + os.environ.get("PATH", "") + ) + ) + + # Create download subfolder: + os.mkdir(os.path.join(venv_path, "download")) + + # Write a requirements.txt with our package and download: + with open(os.path.join(venv_path, "requirements.txt"), + "w", encoding="utf-8" + ) as f: + def to_unicode(s): # Needed for Python 2. + try: + return s.decode("utf-8") + except AttributeError: + return s + f.write(to_unicode(transform_dep_for_pip(dependency))) + try: + subprocess.check_output( + [ + os.path.join(venv_path, "bin", "pip"), + "download", "--no-deps", "-r", "../requirements.txt", + "-d", os.path.join(venv_path, "download") + ], + stderr=subprocess.STDOUT, + cwd=os.path.join(venv_path, "download") + ) + except subprocess.CalledProcessError as e: + raise RuntimeError("package download failed: " + str(e.output)) + + if len(os.listdir(os.path.join(venv_path, "download"))) == 0: + # No download. This can happen if the dependency has a condition + # which prohibits install in our environment. + # (the "package ; ... conditional ... " type of condition) + return (None, None) + + # Get the result and make sure it's an extracted directory: + result_folder_or_file = os.path.join( + venv_path, "download", + os.listdir(os.path.join(venv_path, "download"))[0] + ) + dl_type = "source" + if not os.path.isdir(result_folder_or_file): + # Must be an archive. + if result_folder_or_file.endswith((".zip", ".whl")): + if result_folder_or_file.endswith(".whl"): + dl_type = "wheel" + with zipfile.ZipFile(result_folder_or_file) as f: + f.extractall(os.path.join(venv_path, + "download", "extracted" + )) + result_folder_or_file = os.path.join( + venv_path, "download", "extracted" + ) + elif result_folder_or_file.find(".tar.") > 0: + # Probably a tarball. + with tarfile.open(result_folder_or_file) as f: + f.extractall(os.path.join(venv_path, + "download", "extracted" + )) + result_folder_or_file = os.path.join( + venv_path, "download", "extracted" + ) + else: + raise RuntimeError( + "unknown archive or download " + + "type: " + str(result_folder_or_file) + ) + + # If the result is hidden away in an additional subfolder, + # descend into it: + while os.path.isdir(result_folder_or_file) and \ + len(os.listdir(result_folder_or_file)) == 1 and \ + os.path.isdir(os.path.join( + result_folder_or_file, + os.listdir(result_folder_or_file)[0] + )): + result_folder_or_file = os.path.join( + result_folder_or_file, + os.listdir(result_folder_or_file)[0] + ) + + # Copy result to new dedicated folder so we can throw away + # our entire virtualenv nonsense after returning: + result_path = tempfile.mkdtemp() + shutil.rmtree(result_path) + shutil.copytree(result_folder_or_file, result_path) + return (dl_type, result_path) + finally: + shutil.rmtree(venv_parent) + + +def _extract_metainfo_files_from_package_unsafe( + package, + output_path + ): + # This is the unwrapped function that will + # 1. make lots of stdout/stderr noise + # 2. possibly modify files (if the package source is a local folder) + # Use extract_metainfo_files_from_package_folder instead which avoids + # these issues. + + clean_up_path = False + path_type = "source" + path = parse_as_folder_reference(package) + if path is None: + # This is not a path. Download it: + (path_type, path) = get_package_as_folder(package) + if path_type is None: + # Download failed. + raise ValueError( + "cannot get info for this package, " + + "pip says it has no downloads (conditional dependency?)" + ) + clean_up_path = True + + try: + build_requires = [] + metadata_path = None + + if path_type != "wheel": + # We need to process this first to get the metadata. + + # Ensure pyproject.toml is available (pep517 expects it) + if not os.path.exists(os.path.join(path, "pyproject.toml")): + with open(os.path.join(path, "pyproject.toml"), "w") as f: + f.write(textwrap.dedent(u"""\ + [build-system] + requires = ["setuptools", "wheel"] + build-backend = "setuptools.build_meta" + """)) + + # Copy the pyproject.toml: + shutil.copyfile( + os.path.join(path, 'pyproject.toml'), + os.path.join(output_path, 'pyproject.toml') + ) + + # Get build backend and requirements from pyproject.toml: + with open(os.path.join(path, 'pyproject.toml')) as f: + build_sys = toml.load(f)['build-system'] + backend = build_sys["build-backend"] + build_requires.extend(build_sys["requires"]) + + # Get a virtualenv with build requirements and get all metadata: + env = BuildEnvironment() + metadata = None + with env: + hooks = Pep517HookCaller(path, backend) + env.pip_install( + [transform_dep_for_pip(req) for req in build_requires] + ) + reqs = hooks.get_requires_for_build_wheel({}) + env.pip_install([transform_dep_for_pip(req) for req in reqs]) + try: + metadata = hooks.prepare_metadata_for_build_wheel(path) + except Exception: # sadly, pep517 has no good error here + pass + if metadata is not None: + metadata_path = os.path.join( + path, metadata, "METADATA" + ) + else: + # This is a wheel, so metadata should be in *.dist-info folder: + metadata_path = os.path.join( + path, + [f for f in os.listdir(path) if f.endswith(".dist-info")][0], + "METADATA" + ) + + # Store type of metadata source. Can be "wheel", "source" for source + # distribution, and others get_package_as_folder() may support + # in the future. + with open(os.path.join(output_path, "metadata_source"), "w") as f: + try: + f.write(path_type) + except TypeError: # in python 2 path_type may be str/bytes: + f.write(path_type.decode("utf-8", "replace")) + + # Copy the metadata file: + shutil.copyfile(metadata_path, os.path.join(output_path, "METADATA")) + finally: + if clean_up_path: + shutil.rmtree(path) + + +def is_filesystem_path(dep): + """ Convenience function around parse_as_folder_reference() to + check if a dependency refers to a folder path or something remote. + + Returns True if local, False if remote. + """ + return (parse_as_folder_reference(dep) is not None) + + +def parse_as_folder_reference(dep): + """ See if a dependency reference refers to a folder path. + If it does, return the folder path (which parses and + resolves file:// urls in the process). + If it doesn't, return None. + """ + # Special case: pep508 urls + if dep.find("@") > 0 and ( + (dep.find("@") < dep.find("/") or "/" not in dep) and + (dep.find("@") < dep.find(":") or ":" not in dep) + ): + # This should be a 'pkgname @ https://...' style path, or + # 'pkname @ /local/file/path'. + return parse_as_folder_reference(dep.partition("@")[2].lstrip()) + + # Check if this is either not an url, or a file URL: + if dep.startswith(("/", "file://")) or ( + dep.find("/") > 0 and + dep.find("://") < 0) or (dep in ["", "."]): + if dep.startswith("file://"): + dep = urlunquote(urlparse(dep).path) + return dep + return None + + +def _extract_info_from_package(dependency, + extract_type=None, + debug=False, + include_build_requirements=False + ): + """ Internal function to extract metainfo from a package. + Currently supported info types: + + - name + - dependencies (a list of dependencies) + """ + if debug: + print("_extract_info_from_package called with " + "extract_type={} include_build_requirements={}".format( + extract_type, include_build_requirements, + )) + output_folder = tempfile.mkdtemp(prefix="pythonpackage-metafolder-") + try: + extract_metainfo_files_from_package( + dependency, output_folder, debug=debug + ) + + # Extract the type of data source we used to get the metadata: + with open(os.path.join(output_folder, + "metadata_source"), "r") as f: + metadata_source_type = f.read().strip() + + # Extract main METADATA file: + with open(os.path.join(output_folder, "METADATA"), + "r", encoding="utf-8" + ) as f: + # Get metadata and cut away description (is after 2 linebreaks) + metadata_entries = f.read().partition("\n\n")[0].splitlines() + + if extract_type == "name": + name = None + for meta_entry in metadata_entries: + if meta_entry.lower().startswith("name:"): + return meta_entry.partition(":")[2].strip() + if name is None: + raise ValueError("failed to obtain package name") + return name + elif extract_type == "dependencies": + # First, make sure we don't attempt to return build requirements + # for wheels since they usually come without pyproject.toml + # and we haven't implemented another way to get them: + if include_build_requirements and \ + metadata_source_type == "wheel": + if debug: + print("_extract_info_from_package: was called " + "with include_build_requirements=True on " + "package obtained as wheel, raising error...") + raise NotImplementedError( + "fetching build requirements for " + "wheels is not implemented" + ) + + # Get build requirements from pyproject.toml if requested: + requirements = [] + if os.path.exists(os.path.join(output_folder, + 'pyproject.toml') + ) and include_build_requirements: + # Read build system from pyproject.toml file: (PEP518) + with open(os.path.join(output_folder, 'pyproject.toml')) as f: + build_sys = toml.load(f)['build-system'] + if "requires" in build_sys: + requirements += build_sys["requires"] + elif include_build_requirements: + # For legacy packages with no pyproject.toml, we have to + # add setuptools as default build system. + requirements.append("setuptools") + + # Add requirements from metadata: + requirements += [ + entry.rpartition("Requires-Dist:")[2].strip() + for entry in metadata_entries + if entry.startswith("Requires-Dist") + ] + + return list(set(requirements)) # remove duplicates + finally: + shutil.rmtree(output_folder) + + +package_name_cache = dict() + + +def get_package_name(dependency, + use_cache=True): + def timestamp(): + try: + return time.monotonic() + except AttributeError: + return time.time() # Python 2. + try: + value = package_name_cache[dependency] + if value[0] + 600.0 > timestamp() and use_cache: + return value[1] + except KeyError: + pass + result = _extract_info_from_package(dependency, extract_type="name") + package_name_cache[dependency] = (timestamp(), result) + return result + + +def get_package_dependencies(package, + recursive=False, + verbose=False, + include_build_requirements=False): + """ Obtain the dependencies from a package. Please note this + function is possibly SLOW, especially if you enable + the recursive mode. + """ + packages_processed = set() + package_queue = [package] + reqs = set() + reqs_as_names = set() + while len(package_queue) > 0: + current_queue = package_queue + package_queue = [] + for package_dep in current_queue: + new_reqs = set() + if verbose: + print("get_package_dependencies: resolving dependency " + f"to package name: {package_dep}") + package = get_package_name(package_dep) + if package.lower() in packages_processed: + continue + if verbose: + print("get_package_dependencies: " + "processing package: {}".format(package)) + print("get_package_dependencies: " + "Packages seen so far: {}".format( + packages_processed + )) + packages_processed.add(package.lower()) + + # Use our regular folder processing to examine: + new_reqs = new_reqs.union(_extract_info_from_package( + package_dep, extract_type="dependencies", + debug=verbose, + include_build_requirements=include_build_requirements, + )) + + # Process new requirements: + if verbose: + print('get_package_dependencies: collected ' + "deps of '{}': {}".format( + package_dep, str(new_reqs), + )) + for new_req in new_reqs: + try: + req_name = get_package_name(new_req) + except ValueError as e: + if new_req.find(";") >= 0: + # Conditional dep where condition isn't met? + # --> ignore it + continue + if verbose: + print("get_package_dependencies: " + + "unexpected failure to get name " + + "of '" + str(new_req) + "': " + + str(e)) + raise RuntimeError( + "failed to get " + + "name of dependency: " + str(e) + ) + if req_name.lower() in reqs_as_names: + continue + if req_name.lower() not in packages_processed: + package_queue.append(new_req) + reqs.add(new_req) + reqs_as_names.add(req_name.lower()) + + # Bail out here if we're not scanning recursively: + if not recursive: + package_queue[:] = [] # wipe queue + break + if verbose: + print("get_package_dependencies: returning result: {}".format(reqs)) + return reqs + + +def get_dep_names_of_package( + package, + keep_version_pins=False, + recursive=False, + verbose=False, + include_build_requirements=False + ): + """ Gets the dependencies from the package in the given folder, + then attempts to deduce the actual package name resulting + from each dependency line, stripping away everything else. + """ + + # First, obtain the dependencies: + dependencies = get_package_dependencies( + package, recursive=recursive, verbose=verbose, + include_build_requirements=include_build_requirements, + ) + if verbose: + print("get_dep_names_of_package_folder: " + + "processing dependency list to names: " + + str(dependencies)) + + # Transform dependencies to their stripped down names: + # (they can still have version pins/restrictions, conditionals, ...) + dependency_names = set() + for dep in dependencies: + # If we are supposed to keep exact version pins, extract first: + pin_to_append = "" + if keep_version_pins and "(==" in dep and dep.endswith(")"): + # This is a dependency of the format: 'pkg (==1.0)' + pin_to_append = "==" + dep.rpartition("==")[2][:-1] + elif keep_version_pins and "==" in dep and not dep.endswith(")"): + # This is a dependency of the format: 'pkg==1.0' + pin_to_append = "==" + dep.rpartition("==")[2] + # Now get true (and e.g. case-corrected) dependency name: + dep_name = get_package_name(dep) + pin_to_append + dependency_names.add(dep_name) + return dependency_names diff --git a/p4a/pythonforandroid/recipe.py b/p4a/pythonforandroid/recipe.py index 071aa22..67c309e 100644 --- a/p4a/pythonforandroid/recipe.py +++ b/p4a/pythonforandroid/recipe.py @@ -1,8 +1,6 @@ from os.path import basename, dirname, exists, isdir, isfile, join, realpath, split -import importlib import glob from shutil import rmtree -from six import PY2, with_metaclass import hashlib from re import match @@ -10,6 +8,8 @@ from re import match import sh import shutil import fnmatch +import urllib.request +from urllib.request import urlretrieve from os import listdir, unlink, environ, mkdir, curdir, walk from sys import stdout import time @@ -18,26 +18,14 @@ try: except ImportError: from urllib.parse import urlparse from pythonforandroid.logger import (logger, info, warning, debug, shprint, info_main) -from pythonforandroid.util import (urlretrieve, current_directory, ensure_dir, +from pythonforandroid.util import (current_directory, ensure_dir, BuildInterruptingException) +from pythonforandroid.util import load_source as import_recipe -# this import is necessary to keep imp.load_source from complaining :) -if PY2: - import imp - import_recipe = imp.load_source -else: - import importlib.util - if hasattr(importlib.util, 'module_from_spec'): - def import_recipe(module, filename): - spec = importlib.util.spec_from_file_location(module, filename) - mod = importlib.util.module_from_spec(spec) - spec.loader.exec_module(mod) - return mod - else: - from importlib.machinery import SourceFileLoader - def import_recipe(module, filename): - return SourceFileLoader(module, filename).load_module() +url_opener = urllib.request.build_opener() +url_orig_headers = url_opener.addheaders +urllib.request.install_opener(url_opener) class RecipeMeta(type): @@ -48,10 +36,10 @@ class RecipeMeta(type): if 'version' in dct: dct['_version'] = dct.pop('version') - return super(RecipeMeta, cls).__new__(cls, name, bases, dct) + return super().__new__(cls, name, bases, dct) -class Recipe(with_metaclass(RecipeMeta)): +class Recipe(metaclass=RecipeMeta): _url = None '''The address from which the recipe may be downloaded. This is not essential, it may be omitted if the source is available some other @@ -76,6 +64,18 @@ class Recipe(with_metaclass(RecipeMeta)): finished correctly. ''' + sha512sum = None + '''The sha512sum of the source from the :attr:`url`. Non-essential, but + you should try to include this, it is used to check that the download + finished correctly. + ''' + + blake2bsum = None + '''The blake2bsum of the source from the :attr:`url`. Non-essential, but + you should try to include this, it is used to check that the download + finished correctly. + ''' + depends = [] '''A list containing the names of any recipes that this recipe depends on. ''' @@ -102,6 +102,50 @@ class Recipe(with_metaclass(RecipeMeta)): archs = ['armeabi'] # Not currently implemented properly + built_libraries = {} + """Each recipe that builds a system library (e.g.:libffi, openssl, etc...) + should contain a dict holding the relevant information of the library. The + keys should be the generated libraries and the values the relative path of + the library inside his build folder. This dict will be used to perform + different operations: + - copy the library into the right location, depending on if it's shared + or static) + - check if we have to rebuild the library + + Here an example of how it would look like for `libffi` recipe: + + - `built_libraries = {'libffi.so': '.libs'}` + + .. note:: in case that the built library resides in recipe's build + directory, you can set the following values for the relative + path: `'.', None or ''` + """ + + need_stl_shared = False + '''Some libraries or python packages may need the c++_shared in APK. + We can automatically do this for any recipe if we set this property to + `True`''' + + stl_lib_name = 'c++_shared' + ''' + The default STL shared lib to use: `c++_shared`. + + .. note:: Android NDK version > 17 only supports 'c++_shared', because + starting from NDK r18 the `gnustl_shared` lib has been deprecated. + ''' + + def get_stl_library(self, arch): + return join( + arch.ndk_lib_dir, + 'lib{name}.so'.format(name=self.stl_lib_name), + ) + + def install_stl_lib(self, arch): + if not self.ctx.has_lib( + arch.arch, 'lib{name}.so'.format(name=self.stl_lib_name) + ): + self.install_libs(arch, self.get_stl_library(arch)) + @property def version(self): key = 'VERSION_' + self.name @@ -149,35 +193,46 @@ class Recipe(with_metaclass(RecipeMeta)): # Download item with multiple attempts (for bad connections): attempts = 0 + seconds = 1 while True: try: + # jqueryui.com returns a 403 w/ the default user agent + # Mozilla/5.0 doesnt handle redirection for liblzma + url_opener.addheaders = [('User-agent', 'Wget/1.0')] urlretrieve(url, target, report_hook) except OSError as e: attempts += 1 if attempts >= 5: - raise e - stdout.write('Download failed retrying in a second...') - time.sleep(1) + raise + stdout.write('Download failed: {}; retrying in {} second(s)...'.format(e, seconds)) + time.sleep(seconds) + seconds *= 2 continue + finally: + url_opener.addheaders = url_orig_headers break return target elif parsed_url.scheme in ('git', 'git+file', 'git+ssh', 'git+http', 'git+https'): - if isdir(target): - with current_directory(target): - shprint(sh.git, 'fetch', '--tags') - if self.version: - shprint(sh.git, 'checkout', self.version) - shprint(sh.git, 'pull') - shprint(sh.git, 'pull', '--recurse-submodules') - shprint(sh.git, 'submodule', 'update', '--recursive') - else: + if not isdir(target): if url.startswith('git+'): url = url[4:] - shprint(sh.git, 'clone', '--recursive', url, target) + # if 'version' is specified, do a shallow clone if self.version: + shprint(sh.mkdir, '-p', target) with current_directory(target): - shprint(sh.git, 'checkout', self.version) - shprint(sh.git, 'submodule', 'update', '--recursive') + shprint(sh.git, 'init') + shprint(sh.git, 'remote', 'add', 'origin', url) + else: + shprint(sh.git, 'clone', '--recursive', url, target) + with current_directory(target): + if self.version: + shprint(sh.git, 'fetch', '--depth', '1', 'origin', self.version) + shprint(sh.git, 'checkout', self.version) + branch = sh.git('branch', '--show-current') + if branch: + shprint(sh.git, 'pull') + shprint(sh.git, 'pull', '--recurse-submodules') + shprint(sh.git, 'submodule', 'update', '--recursive', '--init', '--depth', '1') return target def apply_patch(self, filename, arch, build_dir=None): @@ -298,16 +353,19 @@ class Recipe(with_metaclass(RecipeMeta)): return url = self.versioned_url - ma = match(u'^(.+)#md5=([0-9a-f]{32})$', url) - if ma: # fragmented URL? - if self.md5sum: - raise ValueError( - ('Received md5sum from both the {} recipe ' - 'and its url').format(self.name)) - url = ma.group(1) - expected_md5 = ma.group(2) - else: - expected_md5 = self.md5sum + expected_digests = {} + for alg in set(hashlib.algorithms_guaranteed) | set(('md5', 'sha512', 'blake2b')): + expected_digest = getattr(self, alg + 'sum') if hasattr(self, alg + 'sum') else None + ma = match(u'^(.+)#' + alg + u'=([0-9a-f]{32,})$', url) + if ma: # fragmented URL? + if expected_digest: + raise ValueError( + ('Received {}sum from both the {} recipe ' + 'and its url').format(alg, self.name)) + url = ma.group(1) + expected_digest = ma.group(2) + if expected_digest: + expected_digests[alg] = expected_digest shprint(sh.mkdir, '-p', join(self.ctx.packages_path, self.name)) @@ -319,16 +377,17 @@ class Recipe(with_metaclass(RecipeMeta)): if exists(filename) and isfile(filename): if not exists(marker_filename): shprint(sh.rm, filename) - elif expected_md5: - current_md5 = md5sum(filename) - if current_md5 != expected_md5: - debug('* Generated md5sum: {}'.format(current_md5)) - debug('* Expected md5sum: {}'.format(expected_md5)) - raise ValueError( - ('Generated md5sum does not match expected md5sum ' - 'for {} recipe').format(self.name)) - do_download = False else: + for alg, expected_digest in expected_digests.items(): + current_digest = algsum(alg, filename) + if current_digest != expected_digest: + debug('* Generated {}sum: {}'.format(alg, + current_digest)) + debug('* Expected {}sum: {}'.format(alg, + expected_digest)) + raise ValueError( + ('Generated {0}sum does not match expected {0}sum ' + 'for {1} recipe').format(alg, self.name)) do_download = False # If we got this far, we will download @@ -339,15 +398,17 @@ class Recipe(with_metaclass(RecipeMeta)): self.download_file(self.versioned_url, filename) shprint(sh.touch, marker_filename) - if exists(filename) and isfile(filename) and expected_md5: - current_md5 = md5sum(filename) - if expected_md5 is not None: - if current_md5 != expected_md5: - debug('* Generated md5sum: {}'.format(current_md5)) - debug('* Expected md5sum: {}'.format(expected_md5)) + if exists(filename) and isfile(filename): + for alg, expected_digest in expected_digests.items(): + current_digest = algsum(alg, filename) + if current_digest != expected_digest: + debug('* Generated {}sum: {}'.format(alg, + current_digest)) + debug('* Expected {}sum: {}'.format(alg, + expected_digest)) raise ValueError( - ('Generated md5sum does not match expected md5sum ' - 'for {} recipe').format(self.name)) + ('Generated {0}sum does not match expected {0}sum ' + 'for {1} recipe').format(alg, self.name)) else: info('{} download already cached, skipping'.format(self.name)) @@ -375,7 +436,7 @@ class Recipe(with_metaclass(RecipeMeta)): filename = shprint( sh.basename, self.versioned_url).stdout[:-1].decode('utf-8') - ma = match(u'^(.+)#md5=([0-9a-f]{32})$', filename) + ma = match(u'^(.+)#[a-z0-9_]{3,}=([0-9a-f]{32,})$', filename) if ma: # fragmented URL? filename = ma.group(1) @@ -405,7 +466,7 @@ class Recipe(with_metaclass(RecipeMeta)): sh.tar('xf', extraction_filename) root_directory = sh.tar('tf', extraction_filename).stdout.decode( 'utf-8').split('\n')[0].split('/')[0] - if root_directory != directory_name: + if root_directory != basename(directory_name): shprint(sh.mv, root_directory, directory_name) else: raise Exception( @@ -426,12 +487,13 @@ class Recipe(with_metaclass(RecipeMeta)): else: info('{} is already unpacked, skipping'.format(self.name)) - def get_recipe_env(self, arch=None, with_flags_in_cc=True, clang=False): + def get_recipe_env(self, arch=None, with_flags_in_cc=True): """Return the env specialized for the recipe """ if arch is None: arch = self.filtered_archs[0] - return arch.get_env(with_flags_in_cc=with_flags_in_cc, clang=clang) + env = arch.get_env(with_flags_in_cc=with_flags_in_cc) + return env def prebuild_arch(self, arch): '''Run any pre-build tasks for the Recipe. By default, this checks if @@ -475,9 +537,14 @@ class Recipe(with_metaclass(RecipeMeta)): def should_build(self, arch): '''Should perform any necessary test and return True only if it needs - building again. + building again. Per default we implement a library test, in case that + we detect so. ''' + if self.built_libraries: + return not all( + exists(lib) for lib in self.get_libraries(arch.arch) + ) return True def build_arch(self, arch): @@ -488,6 +555,19 @@ class Recipe(with_metaclass(RecipeMeta)): if hasattr(self, build): getattr(self, build)() + def install_libraries(self, arch): + '''This method is always called after `build_arch`. In case that we + detect a library recipe, defined by the class attribute + `built_libraries`, we will copy all defined libraries into the + right location. + ''' + if not self.built_libraries: + return + shared_libs = [ + lib for lib in self.get_libraries(arch) if lib.endswith(".so") + ] + self.install_libs(arch, *shared_libs) + def postbuild_arch(self, arch): '''Run any post-build tasks for the Recipe. By default, this checks if any postbuild_archname methods exist for the archname of the @@ -497,6 +577,9 @@ class Recipe(with_metaclass(RecipeMeta)): if hasattr(self, postbuild): getattr(self, postbuild)() + if self.need_stl_shared: + self.install_stl_lib(arch) + def prepare_build_dir(self, arch): '''Copies the recipe data into a build dir for the given arch. By default, this unpacks a downloaded recipe. You should override @@ -548,7 +631,28 @@ class Recipe(with_metaclass(RecipeMeta)): shprint(sh.cp, *args) def has_libs(self, arch, *libs): - return all(map(lambda l: self.ctx.has_lib(arch.arch, l), libs)) + return all(map(lambda lib: self.ctx.has_lib(arch.arch, lib), libs)) + + def get_libraries(self, arch_name, in_context=False): + """Return the full path of the library depending on the architecture. + Per default, the build library path it will be returned, unless + `get_libraries` has been called with kwarg `in_context` set to + True. + + .. note:: this method should be used for library recipes only + """ + recipe_libs = set() + if not self.built_libraries: + return recipe_libs + for lib, rel_path in self.built_libraries.items(): + if not in_context: + abs_path = join(self.get_build_dir(arch_name), rel_path, lib) + if rel_path in {".", "", None}: + abs_path = join(self.get_build_dir(arch_name), lib) + else: + abs_path = join(self.ctx.get_libs_dir(arch_name), lib) + recipe_libs.add(abs_path) + return recipe_libs @classmethod def recipe_dirs(cls, ctx): @@ -596,7 +700,7 @@ class Recipe(with_metaclass(RecipeMeta)): if recipe_file is not None: break - if not recipe_file: + else: raise ValueError('Recipe does not exist: {}'.format(name)) mod = import_recipe('pythonforandroid.recipes.{}'.format(name), recipe_file) @@ -649,17 +753,14 @@ class BootstrapNDKRecipe(Recipe): return join(self.ctx.bootstrap.build_dir, 'jni') def get_recipe_env(self, arch=None, with_flags_in_cc=True, with_python=False): - env = super(BootstrapNDKRecipe, self).get_recipe_env( - arch, with_flags_in_cc) + env = super().get_recipe_env(arch, with_flags_in_cc) if not with_python: return env env['PYTHON_INCLUDE_ROOT'] = self.ctx.python_recipe.include_root(arch.arch) env['PYTHON_LINK_ROOT'] = self.ctx.python_recipe.link_root(arch.arch) env['EXTRA_LDLIBS'] = ' -lpython{}'.format( - self.ctx.python_recipe.major_minor_version_string) - #if 'python3' in self.ctx.python_recipe.name: - # env['EXTRA_LDLIBS'] += 'm' + self.ctx.python_recipe.link_version) return env @@ -684,13 +785,14 @@ class NDKRecipe(Recipe): return join(self.get_build_dir(arch.arch), 'jni') def build_arch(self, arch, *extra_args): - super(NDKRecipe, self).build_arch(arch) + super().build_arch(arch) env = self.get_recipe_env(arch) with current_directory(self.get_build_dir(arch.arch)): shprint( - sh.ndk_build, + sh.Command(join(self.ctx.ndk_dir, "ndk-build")), 'V=1', + 'NDK_DEBUG=' + ("1" if self.ctx.build_as_debuggable else "0"), 'APP_PLATFORM=android-' + str(self.ctx.ndk_api), 'APP_ABI=' + arch.arch, *extra_args, _env=env @@ -721,17 +823,35 @@ class PythonRecipe(Recipe): This is almost always what you want to do.''' setup_extra_args = [] - '''List of extra arugments to pass to setup.py''' + '''List of extra arguments to pass to setup.py''' + + depends = ['python3'] + ''' + .. note:: it's important to keep this depends as a class attribute outside + `__init__` because sometimes we only initialize the class, so the + `__init__` call won't be called and the deps would be missing + (which breaks the dependency graph computation) + + .. warning:: don't forget to call `super().__init__()` in any recipe's + `__init__`, or otherwise it may not be ensured that it depends + on python2 or python3 which can break the dependency graph + ''' def __init__(self, *args, **kwargs): - super(PythonRecipe, self).__init__(*args, **kwargs) - depends = self.depends - depends.append(('python2', 'python2legacy', 'python3', 'python3crystax')) - depends = list(set(depends)) - self.depends = depends + super().__init__(*args, **kwargs) + + if 'python3' not in self.depends: + # We ensure here that the recipe depends on python even it overrode + # `depends`. We only do this if it doesn't already depend on any + # python, since some recipes intentionally don't depend on/work + # with all python variants + depends = self.depends + depends.append('python3') + depends = list(set(depends)) + self.depends = depends def clean_build(self, arch=None): - super(PythonRecipe, self).clean_build(arch=arch) + super().clean_build(arch=arch) name = self.folder_name python_install_dirs = glob.glob(join(self.ctx.python_installs_dir, '*')) for python_install in python_install_dirs: @@ -746,11 +866,9 @@ class PythonRecipe(Recipe): @property def real_hostpython_location(self): host_name = 'host{}'.format(self.ctx.python_recipe.name) - host_build = Recipe.get_recipe(host_name, self.ctx).get_build_dir() - if host_name in ['hostpython2', 'hostpython3']: - return join(host_build, 'native-build', 'python') - elif host_name in ['hostpython3crystax', 'hostpython2legacy']: - return join(host_build, 'hostpython') + if host_name == 'hostpython3': + python_recipe = Recipe.get_recipe(host_name, self.ctx) + return python_recipe.python_exe else: python_recipe = self.ctx.python_recipe return 'python{}'.format(python_recipe.version) @@ -770,7 +888,7 @@ class PythonRecipe(Recipe): return name def get_recipe_env(self, arch=None, with_flags_in_cc=True): - env = super(PythonRecipe, self).get_recipe_env(arch, with_flags_in_cc) + env = super().get_recipe_env(arch, with_flags_in_cc) env['PYTHONNOUSERSITE'] = '1' @@ -779,31 +897,13 @@ class PythonRecipe(Recipe): env['LANG'] = "en_GB.UTF-8" if not self.call_hostpython_via_targetpython: - # sets python headers/linkages...depending on python's recipe - python_name = self.ctx.python_recipe.name - python_version = self.ctx.python_recipe.version - python_short_version = '.'.join(python_version.split('.')[:2]) - if not self.ctx.python_recipe.from_crystax: - env['CFLAGS'] += ' -I{}'.format( - self.ctx.python_recipe.include_root(arch.arch)) - env['LDFLAGS'] += ' -L{} -lpython{}'.format( - self.ctx.python_recipe.link_root(arch.arch), - self.ctx.python_recipe.major_minor_version_string) - if python_name == 'python3': - env['LDFLAGS'] += 'm' - elif python_name == 'python2legacy': - env['PYTHON_ROOT'] = join( - self.ctx.python_recipe.get_build_dir( - arch.arch), 'python-install') - else: - ndk_dir_python = join(self.ctx.ndk_dir, 'sources', - 'python', python_version) - env['CFLAGS'] += ' -I{} '.format( - join(ndk_dir_python, 'include', - 'python')) - env['LDFLAGS'] += ' -L{}'.format( - join(ndk_dir_python, 'libs', arch.arch)) - env['LDFLAGS'] += ' -lpython{}'.format(python_short_version) + env['CFLAGS'] += ' -I{}'.format( + self.ctx.python_recipe.include_root(arch.arch) + ) + env['LDFLAGS'] += ' -L{} -lpython{}'.format( + self.ctx.python_recipe.link_root(arch.arch), + self.ctx.python_recipe.link_version, + ) hppath = [] hppath.append(join(dirname(self.hostpython_location), 'Lib')) @@ -821,7 +921,7 @@ class PythonRecipe(Recipe): def should_build(self, arch): name = self.folder_name - if self.ctx.has_package(name): + if self.ctx.has_package(name, arch): info('Python package already exists in site-packages') return False info('{} apparently isn\'t already in site-packages'.format(name)) @@ -830,7 +930,7 @@ class PythonRecipe(Recipe): def build_arch(self, arch): '''Install the Python module by calling setup.py install with the target Python dir.''' - super(PythonRecipe, self).build_arch(arch) + super().build_arch(arch) self.install_python_package(arch) def install_python_package(self, arch, name=None, env=None, is_dir=True): @@ -844,29 +944,13 @@ class PythonRecipe(Recipe): info('Installing {} into site-packages'.format(self.name)) + hostpython = sh.Command(self.hostpython_location) + hpenv = env.copy() with current_directory(self.get_build_dir(arch.arch)): - hostpython = sh.Command(self.hostpython_location) - - if self.ctx.python_recipe.name != 'python2legacy': - hpenv = env.copy() - shprint(hostpython, 'setup.py', 'install', '-O2', - '--root={}'.format(self.ctx.get_python_install_dir()), - '--install-lib=.', - _env=hpenv, *self.setup_extra_args) - elif self.call_hostpython_via_targetpython: - shprint(hostpython, 'setup.py', 'install', '-O2', _env=env, - *self.setup_extra_args) - else: # python2legacy - hppath = join(dirname(self.hostpython_location), 'Lib', 'site-packages') - hpenv = env.copy() - if 'PYTHONPATH' in hpenv: - hpenv['PYTHONPATH'] = ':'.join([hppath] + hpenv['PYTHONPATH'].split(':')) - else: - hpenv['PYTHONPATH'] = hppath - shprint(hostpython, 'setup.py', 'install', '-O2', - '--root={}'.format(self.ctx.get_python_install_dir()), - '--install-lib=lib/python2.7/site-packages', - _env=hpenv, *self.setup_extra_args) + shprint(hostpython, 'setup.py', 'install', '-O2', + '--root={}'.format(self.ctx.get_python_install_dir(arch.arch)), + '--install-lib=.', + _env=hpenv, *self.setup_extra_args) # If asked, also install in the hostpython build dir if self.install_in_hostpython: @@ -903,8 +987,8 @@ class CompiledComponentsPythonRecipe(PythonRecipe): info('Building compiled components in {}'.format(self.name)) env = self.get_recipe_env(arch) + hostpython = sh.Command(self.hostpython_location) with current_directory(self.get_build_dir(arch.arch)): - hostpython = sh.Command(self.hostpython_location) if self.install_in_hostpython: shprint(hostpython, 'setup.py', 'clean', '--all', _env=env) shprint(hostpython, 'setup.py', self.build_cmd, '-v', @@ -916,7 +1000,7 @@ class CompiledComponentsPythonRecipe(PythonRecipe): def install_hostpython_package(self, arch): env = self.get_hostrecipe_env(arch) self.rebuild_compiled_components(arch, env) - super(CompiledComponentsPythonRecipe, self).install_hostpython_package(arch) + super().install_hostpython_package(arch) def rebuild_compiled_components(self, arch, env): info('Rebuilding compiled components in {}'.format(self.name)) @@ -930,35 +1014,7 @@ class CompiledComponentsPythonRecipe(PythonRecipe): class CppCompiledComponentsPythonRecipe(CompiledComponentsPythonRecipe): """ Extensions that require the cxx-stl """ call_hostpython_via_targetpython = False - - def get_recipe_env(self, arch): - env = super(CppCompiledComponentsPythonRecipe, self).get_recipe_env(arch) - keys = dict( - ctx=self.ctx, - arch=arch, - arch_noeabi=arch.arch.replace('eabi', '') - ) - env['LDSHARED'] = env['CC'] + ' -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions' - env['CFLAGS'] += ( - " -I{ctx.ndk_dir}/platforms/android-{ctx.android_api}/arch-{arch_noeabi}/usr/include" + - " -I{ctx.ndk_dir}/sources/cxx-stl/gnu-libstdc++/{ctx.toolchain_version}/include" + - " -I{ctx.ndk_dir}/sources/cxx-stl/gnu-libstdc++/{ctx.toolchain_version}/libs/{arch.arch}/include").format(**keys) - env['CXXFLAGS'] = env['CFLAGS'] + ' -frtti -fexceptions' - env['LDFLAGS'] += ( - " -L{ctx.ndk_dir}/sources/cxx-stl/gnu-libstdc++/{ctx.toolchain_version}/libs/{arch.arch}" + - " -lgnustl_shared").format(**keys) - - return env - - def build_compiled_components(self, arch): - super(CppCompiledComponentsPythonRecipe, self).build_compiled_components(arch) - - # Copy libgnustl_shared.so - with current_directory(self.get_build_dir(arch.arch)): - sh.cp( - "{ctx.ndk_dir}/sources/cxx-stl/gnu-libstdc++/{ctx.toolchain_version}/libs/{arch.arch}/libgnustl_shared.so".format(ctx=self.ctx, arch=arch), - self.ctx.get_libs_dir(arch.arch) - ) + need_stl_shared = True class CythonRecipe(PythonRecipe): @@ -967,13 +1023,6 @@ class CythonRecipe(PythonRecipe): cython_args = [] call_hostpython_via_targetpython = False - def __init__(self, *args, **kwargs): - super(CythonRecipe, self).__init__(*args, **kwargs) - depends = self.depends - depends.append(('python2', 'python2legacy', 'python3', 'python3crystax')) - depends = list(set(depends)) - self.depends = depends - def build_arch(self, arch): '''Build any cython components, then install the Python module by calling setup.py install with the target Python dir. @@ -1011,25 +1060,20 @@ class CythonRecipe(PythonRecipe): info('First build appeared to complete correctly, skipping manual' 'cythonising.') - self.strip_object_files(arch, env) + if not self.ctx.with_debug_symbols: + self.strip_object_files(arch, env) def strip_object_files(self, arch, env, build_dir=None): if build_dir is None: build_dir = self.get_build_dir(arch.arch) with current_directory(build_dir): info('Stripping object files') - if self.ctx.python_recipe.name == 'python2legacy': - info('Stripping object files') - build_lib = glob.glob('./build/lib*') - shprint(sh.find, build_lib[0], '-name', '*.o', '-exec', - env['STRIP'], '{}', ';', _env=env) - else: - shprint(sh.find, '.', '-iname', '*.so', '-exec', - '/usr/bin/echo', '{}', ';', _env=env) - shprint(sh.find, '.', '-iname', '*.so', '-exec', - env['STRIP'].split(' ')[0], '--strip-unneeded', - # '/usr/bin/strip', '--strip-unneeded', - '{}', ';', _env=env) + shprint(sh.find, '.', '-iname', '*.so', '-exec', + '/usr/bin/echo', '{}', ';', _env=env) + shprint(sh.find, '.', '-iname', '*.so', '-exec', + env['STRIP'].split(' ')[0], '--strip-unneeded', + # '/usr/bin/strip', '--strip-unneeded', + '{}', ';', _env=env) def cythonize_file(self, env, build_dir, filename): short_filename = filename @@ -1043,9 +1087,12 @@ class CythonRecipe(PythonRecipe): del cyenv['PYTHONPATH'] if 'PYTHONNOUSERSITE' in cyenv: cyenv.pop('PYTHONNOUSERSITE') - cython = 'cython' if self.ctx.python_recipe.from_crystax else self.ctx.cython - cython_command = sh.Command(cython) - shprint(cython_command, filename, *self.cython_args, _env=cyenv) + python_command = sh.Command("python{}".format( + self.ctx.python_recipe.major_minor_version_string.split(".")[0] + )) + shprint(python_command, "-c" + "import sys; from Cython.Compiler.Main import setuptools_main; sys.exit(setuptools_main());", + filename, *self.cython_args, _env=cyenv) def cythonize_build(self, env, build_dir="."): if not self.cythonize: @@ -1057,23 +1104,16 @@ class CythonRecipe(PythonRecipe): self.cythonize_file(env, build_dir, join(root, filename)) def get_recipe_env(self, arch, with_flags_in_cc=True): - env = super(CythonRecipe, self).get_recipe_env(arch, with_flags_in_cc) + env = super().get_recipe_env(arch, with_flags_in_cc) env['LDFLAGS'] = env['LDFLAGS'] + ' -L{} '.format( self.ctx.get_libs_dir(arch.arch) + ' -L{} '.format(self.ctx.libs_dir) + ' -L{}'.format(join(self.ctx.bootstrap.build_dir, 'obj', 'local', arch.arch))) - if self.ctx.python_recipe.from_crystax: - env['LDFLAGS'] = (env['LDFLAGS'] + - ' -L{}'.format(join(self.ctx.bootstrap.build_dir, 'libs', arch.arch))) - if self.ctx.python_recipe.name == 'python2legacy': - env['LDSHARED'] = join(self.ctx.root_dir, 'tools', 'liblink.sh') - else: - env['LDSHARED'] = env['CC'] + ' -shared' + env['LDSHARED'] = env['CC'] + ' -shared' # shprint(sh.whereis, env['LDSHARED'], _env=env) env['LIBLINK'] = 'NOTNONE' - env['NDKPLATFORM'] = self.ctx.ndk_platform if self.ctx.copy_libs: env['COPYLIBS'] = '1' @@ -1084,24 +1124,6 @@ class CythonRecipe(PythonRecipe): env['LIBLINK_PATH'] = liblink_path ensure_dir(liblink_path) - # Add crystax-specific site packages: - if self.ctx.python_recipe.from_crystax: - command = sh.Command('python{}'.format(self.ctx.python_recipe.version)) - site_packages_dirs = command( - '-c', 'import site; print("\\n".join(site.getsitepackages()))') - site_packages_dirs = site_packages_dirs.stdout.decode('utf-8').split('\n') - if 'PYTHONPATH' in env: - env['PYTHONPATH'] = env['PYTHONPATH'] +\ - ':{}'.format(':'.join(site_packages_dirs)) - else: - env['PYTHONPATH'] = ':'.join(site_packages_dirs) - while env['PYTHONPATH'].find("::") > 0: - env['PYTHONPATH'] = env['PYTHONPATH'].replace("::", ":") - if env['PYTHONPATH'].endswith(":"): - env['PYTHONPATH'] = env['PYTHONPATH'][:-1] - if env['PYTHONPATH'].startswith(":"): - env['PYTHONPATH'] = env['PYTHONPATH'][1:] - return env @@ -1109,20 +1131,12 @@ class TargetPythonRecipe(Recipe): '''Class for target python recipes. Sets ctx.python_recipe to point to itself, so as to know later what kind of Python was built or used.''' - from_crystax = False - '''True if the python is used from CrystaX, False otherwise (i.e. if - it is built by p4a).''' - def __init__(self, *args, **kwargs): self._ctx = None - super(TargetPythonRecipe, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def prebuild_arch(self, arch): - super(TargetPythonRecipe, self).prebuild_arch(arch) - if self.from_crystax and self.ctx.ndk != 'crystax': - raise BuildInterruptingException( - 'The {} recipe can only be built when ' - 'using the CrystaX NDK. Exiting.'.format(self.name)) + super().prebuild_arch(arch) self.ctx.python_recipe = self def include_root(self, arch): @@ -1160,10 +1174,10 @@ class TargetPythonRecipe(Recipe): shprint(sh.mv, filen, join(file_dirname, parts[0] + '.so')) -def md5sum(filen): - '''Calculate the md5sum of a file. +def algsum(alg, filen): + '''Calculate the digest of a file. ''' with open(filen, 'rb') as fileh: - md5 = hashlib.md5(fileh.read()) + digest = getattr(hashlib, alg)(fileh.read()) - return md5.hexdigest() + return digest.hexdigest() diff --git a/p4a/pythonforandroid/recipes/Pillow/__init__.py b/p4a/pythonforandroid/recipes/Pillow/__init__.py index 14c9d2b..f8f6929 100644 --- a/p4a/pythonforandroid/recipes/Pillow/__init__.py +++ b/p4a/pythonforandroid/recipes/Pillow/__init__.py @@ -1,28 +1,43 @@ -from pythonforandroid.recipe import CompiledComponentsPythonRecipe from os.path import join +from pythonforandroid.recipe import CompiledComponentsPythonRecipe + class PillowRecipe(CompiledComponentsPythonRecipe): + """ + A recipe for Pillow (previously known as Pil). - version = '5.2.0' + This recipe allow us to build the Pillow recipe with support for different + types of images and fonts. But you should be aware, that in order to use + some of the features of Pillow, we must build some libraries. By default + we automatically trigger the build of below libraries:: + + - freetype: rendering fonts support. + - harfbuzz: a text shaping library. + - jpeg: reading and writing JPEG image files. + - png: support for PNG images. + + But you also could enable the build of some extra image types by requesting + the build of some libraries via argument `requirements`:: + + - libwebp: library to encode and decode images in WebP format. + """ + + version = '8.4.0' url = 'https://github.com/python-pillow/Pillow/archive/{version}.tar.gz' site_packages_name = 'Pillow' depends = ['png', 'jpeg', 'freetype', 'setuptools'] - patches = [join('patches', 'fix-docstring.patch'), - join('patches', 'fix-setup.patch')] + opt_depends = ['libwebp'] + patches = [join('patches', 'fix-setup.patch')] call_hostpython_via_targetpython = False def get_recipe_env(self, arch=None, with_flags_in_cc=True): - env = super(PillowRecipe, self).get_recipe_env(arch, with_flags_in_cc) - - env['ANDROID_ROOT'] = join(self.ctx.ndk_platform, 'usr') - ndk_lib_dir = join(self.ctx.ndk_platform, 'usr', 'lib') - ndk_include_dir = join(self.ctx.ndk_dir, 'sysroot', 'usr', 'include') + env = super().get_recipe_env(arch, with_flags_in_cc) png = self.get_recipe('png', self.ctx) - png_lib_dir = png.get_lib_dir(arch) - png_jni_dir = png.get_jni_dir(arch) + png_lib_dir = join(png.get_build_dir(arch.arch), '.libs') + png_inc_dir = png.get_build_dir(arch) jpeg = self.get_recipe('jpeg', self.ctx) jpeg_inc_dir = jpeg_lib_dir = jpeg.get_build_dir(arch.arch) @@ -37,22 +52,38 @@ class PillowRecipe(CompiledComponentsPythonRecipe): harf_lib_dir = join(harfbuzz.get_build_dir(arch.arch), 'src', '.libs') harf_inc_dir = harfbuzz.get_build_dir(arch.arch) - env['JPEG_ROOT'] = '{}|{}'.format(jpeg_lib_dir, jpeg_inc_dir) - env['FREETYPE_ROOT'] = '{}|{}'.format(free_lib_dir, free_inc_dir) - env['ZLIB_ROOT'] = '{}|{}'.format(ndk_lib_dir, ndk_include_dir) + # libwebp is an optional dependency, so we add the + # flags if we have it in our `ctx.recipe_build_order` + build_with_webp_support = 'libwebp' in self.ctx.recipe_build_order + if build_with_webp_support: + webp = self.get_recipe('libwebp', self.ctx) + webp_install = join( + webp.get_build_dir(arch.arch), 'installation' + ) - cflags = ' -I{}'.format(png_jni_dir) - cflags += ' -I{} -I{}'.format(harf_inc_dir, join(harf_inc_dir, 'src')) - cflags += ' -I{}'.format(free_inc_dir) - cflags += ' -I{}'.format(jpeg_inc_dir) - cflags += ' -I{}'.format(ndk_include_dir) + # Add libraries includes to CFLAGS + cflags = f' -I{png_inc_dir}' + cflags += f' -I{harf_inc_dir} -I{join(harf_inc_dir, "src")}' + cflags += f' -I{free_inc_dir}' + cflags += f' -I{jpeg_inc_dir}' + if build_with_webp_support: + cflags += f' -I{join(webp_install, "include")}' + cflags += f' -I{self.ctx.ndk.sysroot_include_dir}' - env['LIBS'] = ' -lpng -lfreetype -lharfbuzz -ljpeg -lturbojpeg' + # Link the basic Pillow libraries...no need to add webp's libraries + # since it seems that the linkage is properly made without it :) + env['LIBS'] = ' -lpng -lfreetype -lharfbuzz -ljpeg -lturbojpeg -lm' - env['LDFLAGS'] += ' -L{} -L{} -L{} -L{}'.format( - png_lib_dir, harf_lib_dir, jpeg_lib_dir, ndk_lib_dir) + # Add libraries locations to LDFLAGS + env['LDFLAGS'] += f' -L{png_lib_dir}' + env['LDFLAGS'] += f' -L{free_lib_dir}' + env['LDFLAGS'] += f' -L{harf_lib_dir}' + env['LDFLAGS'] += f' -L{jpeg_lib_dir}' + if build_with_webp_support: + env['LDFLAGS'] += f' -L{join(webp_install, "lib")}' + env['LDFLAGS'] += f' -L{arch.ndk_lib_dir_versioned}' if cflags not in env['CFLAGS']: - env['CFLAGS'] += cflags + env['CFLAGS'] += cflags + " -lm" return env diff --git a/p4a/pythonforandroid/recipes/Pillow/patches/fix-setup.patch b/p4a/pythonforandroid/recipes/Pillow/patches/fix-setup.patch index 3b0ccef..5c5a3d0 100644 --- a/p4a/pythonforandroid/recipes/Pillow/patches/fix-setup.patch +++ b/p4a/pythonforandroid/recipes/Pillow/patches/fix-setup.patch @@ -1,148 +1,196 @@ -diff --git a/setup.py b/setup.py -index 761d552..4ddc598 100755 ---- a/setup.py -+++ b/setup.py -@@ -136,12 +136,12 @@ except (ImportError, OSError): +--- Pillow.orig/setup.py 2021-11-01 14:50:48.000000000 +0100 ++++ Pillow/setup.py 2021-11-01 14:51:31.000000000 +0100 +@@ -125,7 +125,7 @@ + "codec_fd", + ) - NAME = 'Pillow' - PILLOW_VERSION = get_version() --JPEG_ROOT = None -+JPEG_ROOT = tuple(os.environ['JPEG_ROOT'].split('|')) if 'JPEG_ROOT' in os.environ else None - JPEG2K_ROOT = None --ZLIB_ROOT = None -+ZLIB_ROOT = tuple(os.environ['ZLIB_ROOT'].split('|')) if 'ZLIB_ROOT' in os.environ else None - IMAGEQUANT_ROOT = None - TIFF_ROOT = None --FREETYPE_ROOT = None -+FREETYPE_ROOT = tuple(os.environ['FREETYPE_ROOT'].split('|')) if 'FREETYPE_ROOT' in os.environ else None - LCMS_ROOT = None +-DEBUG = False ++DEBUG = True # So we can easely triage user issues. -@@ -194,7 +194,7 @@ class pil_build_ext(build_ext): - ] + class DependencyException(Exception): +@@ -411,46 +411,6 @@ + include_dirs = [] - def initialize_options(self): -- self.disable_platform_guessing = None -+ self.disable_platform_guessing = True - build_ext.initialize_options(self) - for x in self.feature: - setattr(self, 'disable_%s' % x, None) -@@ -466,61 +466,6 @@ class pil_build_ext(build_ext): - feature.jpeg = "libjpeg" # alternative name - - feature.openjpeg_version = None -- if feature.want('jpeg2000'): -- _dbg('Looking for jpeg2000') -- best_version = None -- best_path = None + pkg_config = None +- if _cmd_exists(os.environ.get("PKG_CONFIG", "pkg-config")): +- pkg_config = _pkg_config - -- # Find the best version -- for directory in self.compiler.include_dirs: -- _dbg('Checking for openjpeg-#.# in %s', directory) +- # +- # add configured kits +- for root_name, lib_name in dict( +- JPEG_ROOT="libjpeg", +- JPEG2K_ROOT="libopenjp2", +- TIFF_ROOT=("libtiff-5", "libtiff-4"), +- ZLIB_ROOT="zlib", +- FREETYPE_ROOT="freetype2", +- HARFBUZZ_ROOT="harfbuzz", +- FRIBIDI_ROOT="fribidi", +- LCMS_ROOT="lcms2", +- IMAGEQUANT_ROOT="libimagequant", +- ).items(): +- root = globals()[root_name] +- +- if root is None and root_name in os.environ: +- prefix = os.environ[root_name] +- root = (os.path.join(prefix, "lib"), os.path.join(prefix, "include")) +- +- if root is None and pkg_config: +- if isinstance(lib_name, tuple): +- for lib_name2 in lib_name: +- _dbg(f"Looking for `{lib_name2}` using pkg-config.") +- root = pkg_config(lib_name2) +- if root: +- break +- else: +- _dbg(f"Looking for `{lib_name}` using pkg-config.") +- root = pkg_config(lib_name) +- +- if isinstance(root, tuple): +- lib_root, include_root = root +- else: +- lib_root = include_root = root +- +- _add_directory(library_dirs, lib_root) +- _add_directory(include_dirs, include_root) + + # respect CFLAGS/CPPFLAGS/LDFLAGS + for k in ("CFLAGS", "CPPFLAGS", "LDFLAGS"): +@@ -471,137 +431,6 @@ + for d in os.environ[k].split(os.path.pathsep): + _add_directory(library_dirs, d) + +- _add_directory(library_dirs, os.path.join(sys.prefix, "lib")) +- _add_directory(include_dirs, os.path.join(sys.prefix, "include")) +- +- # +- # add platform directories +- +- if self.disable_platform_guessing: +- pass +- +- elif sys.platform == "cygwin": +- # pythonX.Y.dll.a is in the /usr/lib/pythonX.Y/config directory +- _add_directory( +- library_dirs, +- os.path.join( +- "/usr/lib", "python{}.{}".format(*sys.version_info), "config" +- ), +- ) +- +- elif sys.platform == "darwin": +- # attempt to make sure we pick freetype2 over other versions +- _add_directory(include_dirs, "/sw/include/freetype2") +- _add_directory(include_dirs, "/sw/lib/freetype2/include") +- # fink installation directories +- _add_directory(library_dirs, "/sw/lib") +- _add_directory(include_dirs, "/sw/include") +- # darwin ports installation directories +- _add_directory(library_dirs, "/opt/local/lib") +- _add_directory(include_dirs, "/opt/local/include") +- +- # if Homebrew is installed, use its lib and include directories +- try: +- prefix = ( +- subprocess.check_output(["brew", "--prefix"]) +- .strip() +- .decode("latin1") +- ) +- except Exception: +- # Homebrew not installed +- prefix = None +- +- ft_prefix = None +- +- if prefix: +- # add Homebrew's include and lib directories +- _add_directory(library_dirs, os.path.join(prefix, "lib")) +- _add_directory(include_dirs, os.path.join(prefix, "include")) +- _add_directory( +- include_dirs, os.path.join(prefix, "opt", "zlib", "include") +- ) +- ft_prefix = os.path.join(prefix, "opt", "freetype") +- +- if ft_prefix and os.path.isdir(ft_prefix): +- # freetype might not be linked into Homebrew's prefix +- _add_directory(library_dirs, os.path.join(ft_prefix, "lib")) +- _add_directory(include_dirs, os.path.join(ft_prefix, "include")) +- else: +- # fall back to freetype from XQuartz if +- # Homebrew's freetype is missing +- _add_directory(library_dirs, "/usr/X11/lib") +- _add_directory(include_dirs, "/usr/X11/include") +- +- # SDK install path +- sdk_path = "/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk" +- if not os.path.exists(sdk_path): - try: -- listdir = os.listdir(directory) +- sdk_path = ( +- subprocess.check_output(["xcrun", "--show-sdk-path"]) +- .strip() +- .decode("latin1") +- ) - except Exception: -- # WindowsError, FileNotFoundError -- continue -- for name in listdir: -- if name.startswith('openjpeg-') and \ -- os.path.isfile(os.path.join(directory, name, -- 'openjpeg.h')): -- _dbg('Found openjpeg.h in %s/%s', (directory, name)) -- version = tuple(int(x) for x in name[9:].split('.')) -- if best_version is None or version > best_version: -- best_version = version -- best_path = os.path.join(directory, name) -- _dbg('Best openjpeg version %s so far in %s', -- (best_version, best_path)) +- sdk_path = None +- if sdk_path: +- _add_directory(library_dirs, os.path.join(sdk_path, "usr", "lib")) +- _add_directory(include_dirs, os.path.join(sdk_path, "usr", "include")) +- elif ( +- sys.platform.startswith("linux") +- or sys.platform.startswith("gnu") +- or sys.platform.startswith("freebsd") +- ): +- for dirname in _find_library_dirs_ldconfig(): +- _add_directory(library_dirs, dirname) +- if sys.platform.startswith("linux") and os.environ.get( +- "ANDROID_ROOT", None +- ): +- # termux support for android. +- # system libraries (zlib) are installed in /system/lib +- # headers are at $PREFIX/include +- # user libs are at $PREFIX/lib +- _add_directory( +- library_dirs, os.path.join(os.environ["ANDROID_ROOT"], "lib") +- ) - -- if best_version and _find_library_file(self, 'openjp2'): -- # Add the directory to the include path so we can include -- # rather than having to cope with the versioned -- # include path -- # FIXME (melvyn-sopacua): -- # At this point it's possible that best_path is already in -- # self.compiler.include_dirs. Should investigate how that is -- # possible. -- _add_directory(self.compiler.include_dirs, best_path, 0) -- feature.jpeg2000 = 'openjp2' -- feature.openjpeg_version = '.'.join(str(x) for x in best_version) +- elif sys.platform.startswith("netbsd"): +- _add_directory(library_dirs, "/usr/pkg/lib") +- _add_directory(include_dirs, "/usr/pkg/include") - -- if feature.want('imagequant'): -- _dbg('Looking for imagequant') -- if _find_include_file(self, 'libimagequant.h'): -- if _find_library_file(self, "imagequant"): -- feature.imagequant = "imagequant" -- elif _find_library_file(self, "libimagequant"): -- feature.imagequant = "libimagequant" +- elif sys.platform.startswith("sunos5"): +- _add_directory(library_dirs, "/opt/local/lib") +- _add_directory(include_dirs, "/opt/local/include") - -- if feature.want('tiff'): -- _dbg('Looking for tiff') -- if _find_include_file(self, 'tiff.h'): -- if _find_library_file(self, "tiff"): -- feature.tiff = "tiff" -- if sys.platform == "win32" and _find_library_file(self, "libtiff"): -- feature.tiff = "libtiff" -- if (sys.platform == "darwin" and -- _find_library_file(self, "libtiff")): -- feature.tiff = "libtiff" - - if feature.want('freetype'): - _dbg('Looking for freetype') -@@ -546,36 +491,6 @@ class pil_build_ext(build_ext): - if subdir: - _add_directory(self.compiler.include_dirs, subdir, 0) - -- if feature.want('lcms'): -- _dbg('Looking for lcms') -- if _find_include_file(self, "lcms2.h"): -- if _find_library_file(self, "lcms2"): -- feature.lcms = "lcms2" -- elif _find_library_file(self, "lcms2_static"): -- # alternate Windows name. -- feature.lcms = "lcms2_static" +- # FIXME: check /opt/stuff directories here? - -- if feature.want('webp'): -- _dbg('Looking for webp') -- if (_find_include_file(self, "webp/encode.h") and -- _find_include_file(self, "webp/decode.h")): -- # In Google's precompiled zip it is call "libwebp": -- if _find_library_file(self, "webp"): -- feature.webp = "webp" -- elif _find_library_file(self, "libwebp"): -- feature.webp = "libwebp" +- # standard locations +- if not self.disable_platform_guessing: +- _add_directory(library_dirs, "/usr/local/lib") +- _add_directory(include_dirs, "/usr/local/include") - -- if feature.want('webpmux'): -- _dbg('Looking for webpmux') -- if (_find_include_file(self, "webp/mux.h") and -- _find_include_file(self, "webp/demux.h")): -- if (_find_library_file(self, "webpmux") and -- _find_library_file(self, "webpdemux")): -- feature.webpmux = "webpmux" -- if (_find_library_file(self, "libwebpmux") and -- _find_library_file(self, "libwebpdemux")): -- feature.webpmux = "libwebpmux" +- _add_directory(library_dirs, "/usr/lib") +- _add_directory(include_dirs, "/usr/include") +- # alpine, at least +- _add_directory(library_dirs, "/lib") - - for f in feature: - if not getattr(feature, f) and feature.require(f): - if f in ('jpeg', 'zlib'): -@@ -612,8 +527,6 @@ class pil_build_ext(build_ext): - defs.append(("HAVE_LIBTIFF", None)) - if sys.platform == "win32": - libs.extend(["kernel32", "user32", "gdi32"]) -- if struct.unpack("h", "\0\1".encode('ascii'))[0] == 1: -- defs.append(("WORDS_BIGENDIAN", None)) - - if sys.platform == "win32" and not (PLATFORM_PYPY or PLATFORM_MINGW): - defs.append(("PILLOW_VERSION", '"\\"%s\\""' % PILLOW_VERSION)) -@@ -658,10 +571,6 @@ class pil_build_ext(build_ext): - define_macros=defs)) - - tk_libs = ['psapi'] if sys.platform == 'win32' else [] -- exts.append(Extension("PIL._imagingtk", -- ["src/_imagingtk.c", "src/Tk/tkImaging.c"], -- include_dirs=['src/Tk'], -- libraries=tk_libs)) - - exts.append(Extension("PIL._imagingmath", ["src/_imagingmath.c"])) - exts.append(Extension("PIL._imagingmorph", ["src/_imagingmorph.c"])) +- if sys.platform == "win32": +- # on Windows, look for the OpenJPEG libraries in the location that +- # the official installer puts them +- program_files = os.environ.get("ProgramFiles", "") +- best_version = (0, 0) +- best_path = None +- for name in os.listdir(program_files): +- if name.startswith("OpenJPEG "): +- version = tuple(int(x) for x in name[9:].strip().split(".")) +- if version > best_version: +- best_version = version +- best_path = os.path.join(program_files, name) +- +- if best_path: +- _dbg("Adding %s to search list", best_path) +- _add_directory(library_dirs, os.path.join(best_path, "lib")) +- _add_directory(include_dirs, os.path.join(best_path, "include")) +- + # + # insert new dirs *before* default libs, to avoid conflicts + # between Python PYD stub libs and real libraries \ No newline at end of file diff --git a/p4a/pythonforandroid/recipes/aiohttp/__init__.py b/p4a/pythonforandroid/recipes/aiohttp/__init__.py new file mode 100644 index 0000000..f32c653 --- /dev/null +++ b/p4a/pythonforandroid/recipes/aiohttp/__init__.py @@ -0,0 +1,20 @@ +"""Build AIOHTTP""" +from typing import List +from pythonforandroid.recipe import CppCompiledComponentsPythonRecipe + + +class AIOHTTPRecipe(CppCompiledComponentsPythonRecipe): # type: ignore # pylint: disable=R0903 + version = "3.8.3" + url = "https://pypi.python.org/packages/source/a/aiohttp/aiohttp-{version}.tar.gz" + name = "aiohttp" + depends: List[str] = ["setuptools"] + call_hostpython_via_targetpython = False + install_in_hostpython = True + + def get_recipe_env(self, arch): + env = super().get_recipe_env(arch) + env['LDFLAGS'] += ' -lc++_shared' + return env + + +recipe = AIOHTTPRecipe() diff --git a/p4a/pythonforandroid/recipes/android/__init__.py b/p4a/pythonforandroid/recipes/android/__init__.py index 4a06ca8..e568ac8 100644 --- a/p4a/pythonforandroid/recipes/android/__init__.py +++ b/p4a/pythonforandroid/recipes/android/__init__.py @@ -1,7 +1,5 @@ -from __future__ import unicode_literals from pythonforandroid.recipe import CythonRecipe, IncludedFilesBehaviour from pythonforandroid.util import current_directory -from pythonforandroid.patching import will_build from pythonforandroid import logger from os.path import join @@ -14,18 +12,17 @@ class AndroidRecipe(IncludedFilesBehaviour, CythonRecipe): src_filename = 'src' - depends = [('pygame', 'sdl2', 'genericndkbuild'), - 'pyjnius'] + depends = [('sdl2', 'genericndkbuild'), 'pyjnius'] config_env = {} def get_recipe_env(self, arch): - env = super(AndroidRecipe, self).get_recipe_env(arch) + env = super().get_recipe_env(arch) env.update(self.config_env) return env def prebuild_arch(self, arch): - super(AndroidRecipe, self).prebuild_arch(arch) + super().prebuild_arch(arch) ctx_bootstrap = self.ctx.bootstrap.name # define macros for Cython, C, Python @@ -37,19 +34,10 @@ class AndroidRecipe(IncludedFilesBehaviour, CythonRecipe): if isinstance(ctx_bootstrap, bytes): ctx_bootstrap = ctx_bootstrap.decode('utf-8') bootstrap = bootstrap_name = ctx_bootstrap - - is_sdl2 = bootstrap_name in ('sdl2', 'sdl2python3', 'sdl2_gradle') - is_pygame = bootstrap_name in ('pygame',) - is_webview = bootstrap_name in ('webview',) - - if is_sdl2 or is_webview: - if is_sdl2: - bootstrap = 'sdl2' + is_sdl2 = (bootstrap_name == "sdl2") + if bootstrap_name in ["sdl2", "webview", "service_only", "service_library"]: java_ns = u'org.kivy.android' jni_ns = u'org/kivy/android' - elif is_pygame: - java_ns = u'org.renpy.android' - jni_ns = u'org/renpy/android' else: logger.error(( 'unsupported bootstrap for android recipe: {}' @@ -60,10 +48,12 @@ class AndroidRecipe(IncludedFilesBehaviour, CythonRecipe): config = { 'BOOTSTRAP': bootstrap, 'IS_SDL2': int(is_sdl2), - 'IS_PYGAME': int(is_pygame), - 'PY2': int(will_build('python2')(self)), + 'PY2': 0, 'JAVA_NAMESPACE': java_ns, 'JNI_NAMESPACE': jni_ns, + 'ACTIVITY_CLASS_NAME': self.ctx.activity_class_name, + 'ACTIVITY_CLASS_NAMESPACE': self.ctx.activity_class_name.replace('.', '/'), + 'SERVICE_CLASS_NAME': self.ctx.service_class_name, } # create config files for Cython, C and Python @@ -88,8 +78,11 @@ class AndroidRecipe(IncludedFilesBehaviour, CythonRecipe): fh.write( '#define SDL_ANDROID_GetJNIEnv SDL_AndroidGetJNIEnv\n' ) - elif is_pygame: - fh.write('JNIEnv *SDL_ANDROID_GetJNIEnv(void);\n') + else: + fh.write('JNIEnv *WebView_AndroidGetJNIEnv(void);\n') + fh.write( + '#define SDL_ANDROID_GetJNIEnv WebView_AndroidGetJNIEnv\n' + ) recipe = AndroidRecipe() diff --git a/p4a/pythonforandroid/recipes/android/src/android/_android.pyx b/p4a/pythonforandroid/recipes/android/src/android/_android.pyx index d332eed..6708b84 100644 --- a/p4a/pythonforandroid/recipes/android/src/android/_android.pyx +++ b/p4a/pythonforandroid/recipes/android/src/android/_android.pyx @@ -2,22 +2,6 @@ include "config.pxi" -IF BOOTSTRAP == 'pygame': - cdef extern int SDL_ANDROID_CheckPause() - cdef extern void SDL_ANDROID_WaitForResume() nogil - cdef extern void SDL_ANDROID_MapKey(int scancode, int keysym) - - def check_pause(): - return SDL_ANDROID_CheckPause() - - def wait_for_resume(): - android_accelerometer_enable(False) - SDL_ANDROID_WaitForResume() - android_accelerometer_enable(accelerometer_enabled) - - def map_key(scancode, keysym): - SDL_ANDROID_MapKey(scancode, keysym) - # Android keycodes. KEYCODE_UNKNOWN = 0 KEYCODE_SOFT_LEFT = 1 @@ -175,12 +159,10 @@ api_version = autoclass('android.os.Build$VERSION').SDK_INT version_codes = autoclass('android.os.Build$VERSION_CODES') -python_act = autoclass(JAVA_NAMESPACE + u'.PythonActivity') +python_act = autoclass(ACTIVITY_CLASS_NAME) Rect = autoclass(u'android.graphics.Rect') mActivity = python_act.mActivity if mActivity: - # PyGame backend already has the listener so adding - # one here leads to a crash/too much cpu usage. # SDL2 now does not need the listener so there is # no point adding a processor intensive layout listenere here. height = 0 @@ -274,42 +256,6 @@ def get_buildinfo(): binfo.VERSION_RELEASE = BUILD_VERSION_RELEASE return binfo -IF IS_PYGAME: - # Activate input - required to receive input events. - cdef extern void android_activate_input() - - def init(): - android_activate_input() - - # Action send - cdef extern void android_action_send(char*, char*, char*, char*, char*) - def action_send(mimetype, filename=None, subject=None, text=None, - chooser_title=None): - cdef char *j_mimetype = mimetype - cdef char *j_filename = NULL - cdef char *j_subject = NULL - cdef char *j_text = NULL - cdef char *j_chooser_title = NULL - if filename is not None: - j_filename = filename - if subject is not None: - j_subject = subject - if text is not None: - j_text = text - if chooser_title is not None: - j_chooser_title = chooser_title - android_action_send(j_mimetype, j_filename, j_subject, j_text, - j_chooser_title) - - cdef extern int android_checkstop() - cdef extern void android_ackstop() - - def check_stop(): - return android_checkstop() - - def ack_stop(): - android_ackstop() - # ------------------------------------------------------------------- # URL Opening. def open_url(url): @@ -334,17 +280,29 @@ class AndroidBrowser(object): import webbrowser webbrowser.register('android', AndroidBrowser) -cdef extern void android_start_service(char *, char *, char *) -def start_service(title=None, description=None, arg=None): - cdef char *j_title = NULL - cdef char *j_description = NULL - if title is not None: - j_title = title - if description is not None: - j_description = description - if arg is not None: - j_arg = arg - android_start_service(j_title, j_description, j_arg) + +def start_service(title="Background Service", + description="", arg="", + as_foreground=True): + # Legacy None value support (for old function signature style): + if title is None: + title = "Background Service" + if description is None: + description = "" + if arg is None: + arg = "" + + # Start service: + mActivity = autoclass(ACTIVITY_CLASS_NAME).mActivity + if as_foreground: + mActivity.start_service( + title, description, arg + ) + else: + mActivity.start_service_not_as_foreground( + title, description, arg + ) + cdef extern void android_stop_service() def stop_service(): diff --git a/p4a/pythonforandroid/recipes/android/src/android/_android_billing.pyx b/p4a/pythonforandroid/recipes/android/src/android/_android_billing.pyx index bd6bb2e..d5ed2a0 100644 --- a/p4a/pythonforandroid/recipes/android/src/android/_android_billing.pyx +++ b/p4a/pythonforandroid/recipes/android/src/android/_android_billing.pyx @@ -15,7 +15,7 @@ class BillingService(object): BILLING_TYPE_SUBSCRIPTION = 'subs' def __init__(self, callback): - super(BillingService, self).__init__() + super().__init__() self.callback = callback self.purchased_items = None android_billing_service_start() diff --git a/p4a/pythonforandroid/recipes/android/src/android/_android_jni.c b/p4a/pythonforandroid/recipes/android/src/android/_android_jni.c index 8eee770..cf1b1bf 100644 --- a/p4a/pythonforandroid/recipes/android/src/android/_android_jni.c +++ b/p4a/pythonforandroid/recipes/android/src/android/_android_jni.c @@ -201,146 +201,6 @@ void android_get_buildinfo() { } } -#if IS_PYGAME -void android_activate_input(void) { - static JNIEnv *env = NULL; - static jclass *cls = NULL; - static jmethodID mid = NULL; - - if (env == NULL) { - env = SDL_ANDROID_GetJNIEnv(); - aassert(env); - cls = (*env)->FindClass(env, "org/renpy/android/SDLSurfaceView"); - aassert(cls); - mid = (*env)->GetStaticMethodID(env, cls, "activateInput", "()V"); - aassert(mid); - } - - (*env)->CallStaticVoidMethod(env, cls, mid); -} - -int android_checkstop(void) { - static JNIEnv *env = NULL; - static jclass *cls = NULL; - static jmethodID mid = NULL; - - if (env == NULL) { - env = SDL_ANDROID_GetJNIEnv(); - aassert(env); - cls = (*env)->FindClass(env, "org/renpy/android/SDLSurfaceView"); - aassert(cls); - mid = (*env)->GetStaticMethodID(env, cls, "checkStop", "()I"); - aassert(mid); - } - - return (*env)->CallStaticIntMethod(env, cls, mid); -} - -void android_ackstop(void) { - static JNIEnv *env = NULL; - static jclass *cls = NULL; - static jmethodID mid = NULL; - - if (env == NULL) { - env = SDL_ANDROID_GetJNIEnv(); - aassert(env); - cls = (*env)->FindClass(env, "org/renpy/android/SDLSurfaceView"); - aassert(cls); - mid = (*env)->GetStaticMethodID(env, cls, "ackStop", "()I"); - aassert(mid); - } - - (*env)->CallStaticIntMethod(env, cls, mid); -} - -void android_action_send(char *mimeType, char *filename, char *subject, char *text, char *chooser_title) { - static JNIEnv *env = NULL; - static jclass *cls = NULL; - static jmethodID mid = NULL; - - if (env == NULL) { - env = SDL_ANDROID_GetJNIEnv(); - aassert(env); - cls = (*env)->FindClass(env, "org/renpy/android/Action"); - aassert(cls); - mid = (*env)->GetStaticMethodID(env, cls, "send", - "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V"); - aassert(mid); - } - - jstring j_mimeType = (*env)->NewStringUTF(env, mimeType); - jstring j_filename = NULL; - jstring j_subject = NULL; - jstring j_text = NULL; - jstring j_chooser_title = NULL; - if ( filename != NULL ) - j_filename = (*env)->NewStringUTF(env, filename); - if ( subject != NULL ) - j_subject = (*env)->NewStringUTF(env, subject); - if ( text != NULL ) - j_text = (*env)->NewStringUTF(env, text); - if ( chooser_title != NULL ) - j_chooser_title = (*env)->NewStringUTF(env, text); - - (*env)->CallStaticVoidMethod( - env, cls, mid, - j_mimeType, j_filename, j_subject, j_text, - j_chooser_title); -} - -void android_open_url(char *url) { - static JNIEnv *env = NULL; - static jclass *cls = NULL; - static jmethodID mid = NULL; - - if (env == NULL) { - env = SDL_ANDROID_GetJNIEnv(); - aassert(env); - cls = (*env)->FindClass(env, "org/renpy/android/SDLSurfaceView"); - aassert(cls); - mid = (*env)->GetStaticMethodID(env, cls, "openUrl", "(Ljava/lang/String;)V"); - aassert(mid); - } - - PUSH_FRAME; - - (*env)->CallStaticVoidMethod( - env, cls, mid, - (*env)->NewStringUTF(env, url) - ); - - POP_FRAME; -} -#endif // IS_PYGAME - -void android_start_service(char *title, char *description, char *arg) { - static JNIEnv *env = NULL; - static jclass *cls = NULL; - static jmethodID mid = NULL; - - if (env == NULL) { - env = SDL_ANDROID_GetJNIEnv(); - aassert(env); - cls = (*env)->FindClass(env, JNI_NAMESPACE "/PythonActivity"); - aassert(cls); - mid = (*env)->GetStaticMethodID(env, cls, "start_service", - "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V"); - aassert(mid); - } - - jstring j_title = NULL; - jstring j_description = NULL; - jstring j_arg = NULL; - if ( title != 0 ) - j_title = (*env)->NewStringUTF(env, title); - if ( description != 0 ) - j_description = (*env)->NewStringUTF(env, description); - if ( arg != 0 ) - j_arg = (*env)->NewStringUTF(env, arg); - - (*env)->CallStaticVoidMethod(env, cls, mid, j_title, j_description, j_arg); -} - void android_stop_service() { static JNIEnv *env = NULL; static jclass *cls = NULL; diff --git a/p4a/pythonforandroid/recipes/android/src/android/_ctypes_library_finder.py b/p4a/pythonforandroid/recipes/android/src/android/_ctypes_library_finder.py new file mode 100644 index 0000000..a03512e --- /dev/null +++ b/p4a/pythonforandroid/recipes/android/src/android/_ctypes_library_finder.py @@ -0,0 +1,67 @@ + +import sys +import os + + +def get_activity_lib_dir(activity_name): + from jnius import autoclass + + # Get the actual activity instance: + activity_class = autoclass(activity_name) + if activity_class is None: + return None + activity = None + if hasattr(activity_class, "mActivity") and \ + activity_class.mActivity is not None: + activity = activity_class.mActivity + elif hasattr(activity_class, "mService") and \ + activity_class.mService is not None: + activity = activity_class.mService + if activity is None: + return None + + # Extract the native lib dir from the activity instance: + package_name = activity.getApplicationContext().getPackageName() + manager = activity.getApplicationContext().getPackageManager() + manager_class = autoclass("android.content.pm.PackageManager") + native_lib_dir = manager.getApplicationInfo( + package_name, manager_class.GET_SHARED_LIBRARY_FILES + ).nativeLibraryDir + return native_lib_dir + + +def does_libname_match_filename(search_name, file_path): + # Filter file names so given search_name="mymodule" we match one of: + # mymodule.so (direct name + .so) + # libmymodule.so (added lib prefix) + # mymodule.arm64.so (added dot-separated middle parts) + # mymodule.so.1.3.4 (added dot-separated version tail) + # and all above (all possible combinations) + import re + file_name = os.path.basename(file_path) + return (re.match(r"^(lib)?" + re.escape(search_name) + + r"\.(.*\.)?so(\.[0-9]+)*$", file_name) is not None) + + +def find_library(name): + # Obtain all places for native libraries: + if sys.maxsize > 2**32: # 64bit-build + lib_search_dirs = ["/system/lib64", "/system/lib"] + else: + lib_search_dirs = ["/system/lib"] + lib_dir_1 = get_activity_lib_dir("org.kivy.android.PythonActivity") + if lib_dir_1 is not None: + lib_search_dirs.insert(0, lib_dir_1) + lib_dir_2 = get_activity_lib_dir("org.kivy.android.PythonService") + if lib_dir_2 is not None and lib_dir_2 not in lib_search_dirs: + lib_search_dirs.insert(0, lib_dir_2) + + # Now scan the lib dirs: + for lib_dir in [ldir for ldir in lib_search_dirs if os.path.exists(ldir)]: + filelist = [ + f for f in os.listdir(lib_dir) + if does_libname_match_filename(name, f) + ] + if len(filelist) > 0: + return os.path.join(lib_dir, filelist[0]) + return None diff --git a/p4a/pythonforandroid/recipes/android/src/android/activity.py b/p4a/pythonforandroid/recipes/android/src/android/activity.py index cafbbda..78d068c 100644 --- a/p4a/pythonforandroid/recipes/android/src/android/activity.py +++ b/p4a/pythonforandroid/recipes/android/src/android/activity.py @@ -1,7 +1,7 @@ from jnius import PythonJavaClass, autoclass, java_method -from android.config import JAVA_NAMESPACE, JNI_NAMESPACE +from android.config import ACTIVITY_CLASS_NAME, ACTIVITY_CLASS_NAMESPACE -_activity = autoclass(JAVA_NAMESPACE + '.PythonActivity').mActivity +_activity = autoclass(ACTIVITY_CLASS_NAME).mActivity _callbacks = { 'on_new_intent': [], @@ -10,11 +10,11 @@ _callbacks = { class NewIntentListener(PythonJavaClass): - __javainterfaces__ = [JNI_NAMESPACE + '/PythonActivity$NewIntentListener'] + __javainterfaces__ = [ACTIVITY_CLASS_NAMESPACE + '$NewIntentListener'] __javacontext__ = 'app' def __init__(self, callback, **kwargs): - super(NewIntentListener, self).__init__(**kwargs) + super().__init__(**kwargs) self.callback = callback @java_method('(Landroid/content/Intent;)V') @@ -23,11 +23,11 @@ class NewIntentListener(PythonJavaClass): class ActivityResultListener(PythonJavaClass): - __javainterfaces__ = [JNI_NAMESPACE + '/PythonActivity$ActivityResultListener'] + __javainterfaces__ = [ACTIVITY_CLASS_NAMESPACE + '$ActivityResultListener'] __javacontext__ = 'app' def __init__(self, callback): - super(ActivityResultListener, self).__init__() + super().__init__() self.callback = callback @java_method('(IILandroid/content/Intent;)V') @@ -61,3 +61,154 @@ def unbind(**kwargs): _activity.unregisterNewIntentListener(listener) elif event == 'on_activity_result': _activity.unregisterActivityResultListener(listener) + + +# Keep a reference to all the registered classes so that python doesn't +# garbage collect them. +_lifecycle_callbacks = set() + + +class ActivityLifecycleCallbacks(PythonJavaClass): + """Callback class for handling PythonActivity lifecycle transitions""" + + __javainterfaces__ = ['android/app/Application$ActivityLifecycleCallbacks'] + + def __init__(self, callbacks): + super().__init__() + + # It would be nice to use keyword arguments, but PythonJavaClass + # doesn't allow that in its __cinit__ method. + if not isinstance(callbacks, dict): + raise ValueError('callbacks must be a dict instance') + self.callbacks = callbacks + + def _callback(self, name, *args): + func = self.callbacks.get(name) + if func: + return func(*args) + + @java_method('(Landroid/app/Activity;Landroid/os/Bundle;)V') + def onActivityCreated(self, activity, savedInstanceState): + self._callback('onActivityCreated', activity, savedInstanceState) + + @java_method('(Landroid/app/Activity;)V') + def onActivityDestroyed(self, activity): + self._callback('onActivityDestroyed', activity) + + @java_method('(Landroid/app/Activity;)V') + def onActivityPaused(self, activity): + self._callback('onActivityPaused', activity) + + @java_method('(Landroid/app/Activity;Landroid/os/Bundle;)V') + def onActivityPostCreated(self, activity, savedInstanceState): + self._callback('onActivityPostCreated', activity, savedInstanceState) + + @java_method('(Landroid/app/Activity;)V') + def onActivityPostDestroyed(self, activity): + self._callback('onActivityPostDestroyed', activity) + + @java_method('(Landroid/app/Activity;)V') + def onActivityPostPaused(self, activity): + self._callback('onActivityPostPaused', activity) + + @java_method('(Landroid/app/Activity;)V') + def onActivityPostResumed(self, activity): + self._callback('onActivityPostResumed', activity) + + @java_method('(Landroid/app/Activity;Landroid/os/Bundle;)V') + def onActivityPostSaveInstanceState(self, activity, outState): + self._callback('onActivityPostSaveInstanceState', activity, outState) + + @java_method('(Landroid/app/Activity;)V') + def onActivityPostStarted(self, activity): + self._callback('onActivityPostStarted', activity) + + @java_method('(Landroid/app/Activity;)V') + def onActivityPostStopped(self, activity): + self._callback('onActivityPostStopped', activity) + + @java_method('(Landroid/app/Activity;Landroid/os/Bundle;)V') + def onActivityPreCreated(self, activity, savedInstanceState): + self._callback('onActivityPreCreated', activity, savedInstanceState) + + @java_method('(Landroid/app/Activity;)V') + def onActivityPreDestroyed(self, activity): + self._callback('onActivityPreDestroyed', activity) + + @java_method('(Landroid/app/Activity;)V') + def onActivityPrePaused(self, activity): + self._callback('onActivityPrePaused', activity) + + @java_method('(Landroid/app/Activity;)V') + def onActivityPreResumed(self, activity): + self._callback('onActivityPreResumed', activity) + + @java_method('(Landroid/app/Activity;Landroid/os/Bundle;)V') + def onActivityPreSaveInstanceState(self, activity, outState): + self._callback('onActivityPreSaveInstanceState', activity, outState) + + @java_method('(Landroid/app/Activity;)V') + def onActivityPreStarted(self, activity): + self._callback('onActivityPreStarted', activity) + + @java_method('(Landroid/app/Activity;)V') + def onActivityPreStopped(self, activity): + self._callback('onActivityPreStopped', activity) + + @java_method('(Landroid/app/Activity;)V') + def onActivityResumed(self, activity): + self._callback('onActivityResumed', activity) + + @java_method('(Landroid/app/Activity;Landroid/os/Bundle;)V') + def onActivitySaveInstanceState(self, activity, outState): + self._callback('onActivitySaveInstanceState', activity, outState) + + @java_method('(Landroid/app/Activity;)V') + def onActivityStarted(self, activity): + self._callback('onActivityStarted', activity) + + @java_method('(Landroid/app/Activity;)V') + def onActivityStopped(self, activity): + self._callback('onActivityStopped', activity) + + +def register_activity_lifecycle_callbacks(**callbacks): + """Register ActivityLifecycleCallbacks instance + + The callbacks are supplied as keyword arguments corresponding to the + Application.ActivityLifecycleCallbacks methods such as + onActivityStarted. See the ActivityLifecycleCallbacks documentation + for the signature of each method. + + The ActivityLifecycleCallbacks instance is returned so it can be + supplied to unregister_activity_lifecycle_callbacks if needed. + """ + instance = ActivityLifecycleCallbacks(callbacks) + _lifecycle_callbacks.add(instance) + + # Use the registerActivityLifecycleCallbacks method from the + # Activity class if it's available (API 29) since it guarantees the + # callbacks will only be run for that activity. Otherwise, fallback + # to the method on the Application class (API 14). In practice there + # should be no difference since p4a applications only have a single + # activity. + if hasattr(_activity, 'registerActivityLifecycleCallbacks'): + _activity.registerActivityLifecycleCallbacks(instance) + else: + app = _activity.getApplication() + app.registerActivityLifecycleCallbacks(instance) + return instance + + +def unregister_activity_lifecycle_callbacks(instance): + """Unregister ActivityLifecycleCallbacks instance""" + if hasattr(_activity, 'unregisterActivityLifecycleCallbacks'): + _activity.unregisterActivityLifecycleCallbacks(instance) + else: + app = _activity.getApplication() + app.unregisterActivityLifecycleCallbacks(instance) + + try: + _lifecycle_callbacks.remove(instance) + except KeyError: + pass diff --git a/p4a/pythonforandroid/recipes/android/src/android/broadcast.py b/p4a/pythonforandroid/recipes/android/src/android/broadcast.py index cb34cd9..3232d83 100644 --- a/p4a/pythonforandroid/recipes/android/src/android/broadcast.py +++ b/p4a/pythonforandroid/recipes/android/src/android/broadcast.py @@ -2,7 +2,7 @@ # Broadcast receiver bridge from jnius import autoclass, PythonJavaClass, java_method -from android.config import JAVA_NAMESPACE, JNI_NAMESPACE +from android.config import JAVA_NAMESPACE, JNI_NAMESPACE, ACTIVITY_CLASS_NAME, SERVICE_CLASS_NAME class BroadcastReceiver(object): @@ -20,7 +20,7 @@ class BroadcastReceiver(object): self.callback(context, intent) def __init__(self, callback, actions=None, categories=None): - super(BroadcastReceiver, self).__init__() + super().__init__() self.callback = callback if not actions and not categories: @@ -72,7 +72,7 @@ class BroadcastReceiver(object): def context(self): from os import environ if 'PYTHON_SERVICE_ARGUMENT' in environ: - PythonService = autoclass(JAVA_NAMESPACE + '.PythonService') + PythonService = autoclass(SERVICE_CLASS_NAME) return PythonService.mService - PythonActivity = autoclass(JAVA_NAMESPACE + '.PythonActivity') + PythonActivity = autoclass(ACTIVITY_CLASS_NAME) return PythonActivity.mActivity diff --git a/p4a/pythonforandroid/recipes/android/src/android/loadingscreen.py b/p4a/pythonforandroid/recipes/android/src/android/loadingscreen.py index 1dc1b67..a18162e 100644 --- a/p4a/pythonforandroid/recipes/android/src/android/loadingscreen.py +++ b/p4a/pythonforandroid/recipes/android/src/android/loadingscreen.py @@ -1,7 +1,9 @@ from jnius import autoclass +from android.config import ACTIVITY_CLASS_NAME + def hide_loading_screen(): - python_activity = autoclass('org.kivy.android.PythonActivity') - python_activity.removeLoadingScreen() + mActivity = autoclass(ACTIVITY_CLASS_NAME).mActivity + mActivity.removeLoadingScreen() diff --git a/p4a/pythonforandroid/recipes/android/src/android/mixer.py b/p4a/pythonforandroid/recipes/android/src/android/mixer.py index 334f696..303a953 100644 --- a/p4a/pythonforandroid/recipes/android/src/android/mixer.py +++ b/p4a/pythonforandroid/recipes/android/src/android/mixer.py @@ -93,7 +93,9 @@ def find_channel(force=False): if not force: return None - return min(busy, key=lambda x: x.play_time) + busy.sort(key=lambda x: x.play_time) + + return busy[0] class ChannelImpl(object): diff --git a/p4a/pythonforandroid/recipes/android/src/android/permissions.py b/p4a/pythonforandroid/recipes/android/src/android/permissions.py index 6c2d384..0ce568f 100644 --- a/p4a/pythonforandroid/recipes/android/src/android/permissions.py +++ b/p4a/pythonforandroid/recipes/android/src/android/permissions.py @@ -1,6 +1,7 @@ +import threading try: - from jnius import autoclass + from jnius import autoclass, PythonJavaClass, java_method except ImportError: # To allow importing by build/manifest-creating code without # pyjnius being present: @@ -8,9 +9,14 @@ except ImportError: raise RuntimeError("pyjnius not available") +from android.config import ACTIVITY_CLASS_NAME, ACTIVITY_CLASS_NAMESPACE + + class Permission: ACCEPT_HANDOVER = "android.permission.ACCEPT_HANDOVER" + ACCESS_BACKGROUND_LOCATION = "android.permission.ACCESS_BACKGROUND_LOCATION" ACCESS_COARSE_LOCATION = "android.permission.ACCESS_COARSE_LOCATION" + ACCESS_FINE_LOCATION = "android.permission.ACCESS_FINE_LOCATION" ACCESS_LOCATION_EXTRA_COMMANDS = ( "android.permission.ACCESS_LOCATION_EXTRA_COMMANDS" ) @@ -92,6 +98,15 @@ class Permission: BLUETOOTH = ( "android.permission.BLUETOOTH" ) + BLUETOOTH_ADVERTISE = ( + "android.permission.BLUETOOTH_ADVERTISE" + ) + BLUETOOTH_CONNECT = ( + "android.permission.BLUETOOTH_CONNECT" + ) + BLUETOOTH_SCAN = ( + "android.permission.BLUETOOTH_SCAN" + ) BLUETOOTH_ADMIN = ( "android.permission.BLUETOOTH_ADMIN" ) @@ -227,6 +242,9 @@ class Permission: MOUNT_UNMOUNT_FILESYSTEMS = ( "android.permission.MOUNT_UNMOUNT_FILESYSTEMS" ) + NEARBY_WIFI_DEVICES = ( + "android.permission.NEARBY_WIFI_DEVICES" + ) NFC = ( "android.permission.NFC" ) @@ -239,6 +257,9 @@ class Permission: PERSISTENT_ACTIVITY = ( "android.permission.PERSISTENT_ACTIVITY" ) + POST_NOTIFICATIONS = ( + "android.permission.POST_NOTIFICATIONS" + ) PROCESS_OUTGOING_CALLS = ( "android.permission.PROCESS_OUTGOING_CALLS" ) @@ -263,6 +284,15 @@ class Permission: READ_LOGS = ( "android.permission.READ_LOGS" ) + READ_MEDIA_AUDIO = ( + "android.permission.READ_MEDIA_AUDIO" + ) + READ_MEDIA_IMAGES = ( + "android.permission.READ_MEDIA_IMAGES" + ) + READ_MEDIA_VIDEO = ( + "android.permission.READ_MEDIA_VIDEO" + ) READ_PHONE_NUMBERS = ( "android.permission.READ_PHONE_NUMBERS" ) @@ -421,18 +451,168 @@ class Permission: ) -def request_permissions(permissions): - python_activity = autoclass('org.kivy.android.PythonActivity') - python_activity.requestPermissions(permissions) +PERMISSION_GRANTED = 0 +PERMISSION_DENIED = -1 -def request_permission(permission): - request_permissions([permission]) +class _onRequestPermissionsCallback(PythonJavaClass): + """Callback class for registering a Python callback from + onRequestPermissionsResult in PythonActivity. + """ + __javainterfaces__ = [ACTIVITY_CLASS_NAMESPACE + '$PermissionsCallback'] + __javacontext__ = 'app' + + def __init__(self, func): + self.func = func + super().__init__() + + @java_method('(I[Ljava/lang/String;[I)V') + def onRequestPermissionsResult(self, requestCode, + permissions, grantResults): + self.func(requestCode, permissions, grantResults) + + +class _RequestPermissionsManager: + """Internal class for requesting Android permissions. + + Permissions are requested through the method 'request_permissions' which + accepts a list of permissions and an optional callback. + + Any callback will asynchronously receive arguments from + onRequestPermissionsResult on PythonActivity after requestPermissions is + called. + + The callback supplied must accept two arguments: 'permissions' and + 'grantResults' (as supplied to onPermissionsCallbackResult). + + Note that for SDK_INT < 23, run-time permissions are not required, and so + the callback will be called immediately. + + The attribute '_java_callback' is initially None, but is set when the first + permissions request is made. It is set to an instance of + onRequestPermissionsCallback, which allows the Java callback to be + propagated to the class method 'python_callback'. This is then, in turn, + used to call an application callback if provided to request_permissions. + + The attribute '_callback_id' is incremented with each call to + request_permissions which has a callback (the value '1' is used for any + call which does not pass a callback). This is passed to requestCode in + the Java call, and used to identify (via the _callbacks dictionary) + the matching call. + """ + _SDK_INT = None + _java_callback = None + _callbacks = {1: None} + _callback_id = 1 + # Lock to prevent multiple calls to request_permissions being handled + # simultaneously (as incrementing _callback_id is not atomic) + _lock = threading.Lock() + + @classmethod + def register_callback(cls): + """Register Java callback for requestPermissions.""" + cls._java_callback = _onRequestPermissionsCallback(cls.python_callback) + mActivity = autoclass(ACTIVITY_CLASS_NAME).mActivity + mActivity.addPermissionsCallback(cls._java_callback) + + @classmethod + def request_permissions(cls, permissions, callback=None): + """Requests Android permissions from PythonActivity. + If 'callback' is supplied, the request is made with a new requestCode + and the callback is stored in the _callbacks dict. When a Java callback + with the matching requestCode is received, callback will be called + with arguments of 'permissions' and 'grant_results'. + """ + if not cls._SDK_INT: + # Get the Android build version and store it + VERSION = autoclass('android.os.Build$VERSION') + cls.SDK_INT = VERSION.SDK_INT + if cls.SDK_INT < 23: + # No run-time permissions needed, return immediately. + if callback: + callback(permissions, [True for x in permissions]) + return + # Request permissions + with cls._lock: + if not cls._java_callback: + cls.register_callback() + mActivity = autoclass(ACTIVITY_CLASS_NAME).mActivity + if not callback: + mActivity.requestPermissions(permissions) + else: + cls._callback_id += 1 + mActivity.requestPermissionsWithRequestCode( + permissions, cls._callback_id) + cls._callbacks[cls._callback_id] = callback + + @classmethod + def python_callback(cls, requestCode, permissions, grantResults): + """Calls the relevant callback with arguments of 'permissions' + and 'grantResults'.""" + # Convert from Android codes to True/False + grant_results = [x == PERMISSION_GRANTED for x in grantResults] + if cls._callbacks.get(requestCode): + cls._callbacks[requestCode](permissions, grant_results) + + +# Public API methods for requesting permissions + +def request_permissions(permissions, callback=None): + """Requests Android permissions. + + Args: + permissions (str): A list of permissions to requests (str) + callback (callable, optional): A function to call when the request + is completed (callable) + + Returns: + None + + Notes: + + Permission strings can be imported from the 'Permission' class in this + module. For example: + + from android import Permission + permissions_list = [Permission.CAMERA, + Permission.WRITE_EXTERNAL_STORAGE] + + See the p4a source file 'permissions.py' for a list of valid permission + strings (pythonforandroid/recipes/android/src/android/permissions.py). + + Any callback supplied must accept two arguments: + permissions (list of str): A list of permission strings + grant_results (list of bool): A list of bools indicating whether the + respective permission was granted. + See Android documentation for onPermissionsCallbackResult for + further information. + + Note that if the request is interupted the callback may contain an empty + list of permissions, without permissions being granted; the App should + check that each permission requested has been granted. + + Also note that when calling request_permission on SDK_INT < 23, the + callback will be returned immediately as requesting permissions is not + required. + """ + _RequestPermissionsManager.request_permissions(permissions, callback) + + +def request_permission(permission, callback=None): + request_permissions([permission], callback) def check_permission(permission): - python_activity = autoclass('org.kivy.android.PythonActivity') - result = bool(python_activity.checkCurrentPermission( + """Checks if an app holds the passed permission. + + Args: + - permission An Android permission (str) + + Returns: + bool: True if the app holds the permission given, False otherwise. + """ + mActivity = autoclass(ACTIVITY_CLASS_NAME).mActivity + result = bool(mActivity.checkCurrentPermission( permission + "" )) return result diff --git a/p4a/pythonforandroid/recipes/android/src/android/runnable.py b/p4a/pythonforandroid/recipes/android/src/android/runnable.py index 8d2d116..b20f6cc 100644 --- a/p4a/pythonforandroid/recipes/android/src/android/runnable.py +++ b/p4a/pythonforandroid/recipes/android/src/android/runnable.py @@ -1,14 +1,17 @@ ''' Runnable ======== - ''' from jnius import PythonJavaClass, java_method, autoclass -from android.config import JAVA_NAMESPACE +from android.config import ACTIVITY_CLASS_NAME -# reference to the activity -_PythonActivity = autoclass(JAVA_NAMESPACE + '.PythonActivity') +# Reference to the activity +_PythonActivity = autoclass(ACTIVITY_CLASS_NAME) + +# Cache of functions table. In older Android versions the number of JNI references +# is limited, so by caching them we avoid running out. +__functionstable__ = {} class Runnable(PythonJavaClass): @@ -20,7 +23,7 @@ class Runnable(PythonJavaClass): __runnables__ = [] def __init__(self, func): - super(Runnable, self).__init__() + super().__init__() self.func = func def __call__(self, *args, **kwargs): @@ -44,6 +47,12 @@ def run_on_ui_thread(f): '''Decorator to create automatically a :class:`Runnable` object with the function. The function will be delayed and call into the Activity thread. ''' + if f not in __functionstable__: + rfunction = Runnable(f) # store the runnable function + __functionstable__[f] = {"rfunction": rfunction} + rfunction = __functionstable__[f]["rfunction"] + def f2(*args, **kwargs): - Runnable(f)(*args, **kwargs) + rfunction(*args, **kwargs) + return f2 diff --git a/p4a/pythonforandroid/recipes/android/src/android/storage.py b/p4a/pythonforandroid/recipes/android/src/android/storage.py new file mode 100644 index 0000000..aa6d781 --- /dev/null +++ b/p4a/pythonforandroid/recipes/android/src/android/storage.py @@ -0,0 +1,117 @@ +from jnius import autoclass, cast +import os + +from android.config import ACTIVITY_CLASS_NAME, SERVICE_CLASS_NAME + + +Environment = autoclass('android.os.Environment') +File = autoclass('java.io.File') + + +def _android_has_is_removable_func(): + VERSION = autoclass('android.os.Build$VERSION') + return (VERSION.SDK_INT >= 24) + + +def _get_sdcard_path(): + """ Internal function to return getExternalStorageDirectory() + path. This is internal because it may either return the internal, + or an external sd card, depending on the device. + Use primary_external_storage_path() + or secondary_external_storage_path() instead which try to + distinguish this properly. + """ + return ( + Environment.getExternalStorageDirectory().getAbsolutePath() + ) + + +def _get_activity(): + """ + Retrieves the activity from `PythonActivity` fallback to `PythonService`. + """ + PythonActivity = autoclass(ACTIVITY_CLASS_NAME) + activity = PythonActivity.mActivity + if activity is None: + # assume we're running from the background service + PythonService = autoclass(SERVICE_CLASS_NAME) + activity = PythonService.mService + return activity + + +def app_storage_path(): + """ Locate the built-in device storage used for this app only. + + This storage is APP-SPECIFIC, and not visible to other apps. + It will be wiped when your app is uninstalled. + + Returns directory path to storage. + """ + activity = _get_activity() + currentActivity = cast('android.app.Activity', activity) + context = cast('android.content.ContextWrapper', + currentActivity.getApplicationContext()) + file_p = cast('java.io.File', context.getFilesDir()) + return os.path.normpath(os.path.abspath( + file_p.getAbsolutePath().replace("/", os.path.sep))) + + +def primary_external_storage_path(): + """ Locate the built-in device storage that user can see via file browser. + Often found at: /sdcard/ + + This is storage is SHARED, and visible to other apps and the user. + It will remain untouched when your app is uninstalled. + + Returns directory path to storage. + + WARNING: You need storage permissions to access this storage. + """ + if _android_has_is_removable_func(): + sdpath = _get_sdcard_path() + # Apparently this can both return primary (built-in) or + # secondary (removable) external storage depending on the device, + # therefore check that we got what we wanted: + if not Environment.isExternalStorageRemovable(File(sdpath)): + return sdpath + if "EXTERNAL_STORAGE" in os.environ: + return os.environ["EXTERNAL_STORAGE"] + raise RuntimeError( + "unexpectedly failed to determine " + + "primary external storage path" + ) + + +def secondary_external_storage_path(): + """ Locate the external SD Card storage, which may not be present. + Often found at: /sdcard/External_SD/ + + This storage is SHARED, visible to other apps, and may not be + be available if the user didn't put in an external SD card. + It will remain untouched when your app is uninstalled. + + Returns None if not found, otherwise path to storage. + + WARNING: You need storage permissions to access this storage. + If it is not writable and presents as empty even with + permissions, then the external sd card may not be present. + """ + if _android_has_is_removable_func: + # See if getExternalStorageDirectory() returns secondary ext storage: + sdpath = _get_sdcard_path() + # Apparently this can both return primary (built-in) or + # secondary (removable) external storage depending on the device, + # therefore check that we got what we wanted: + if Environment.isExternalStorageRemovable(File(sdpath)): + if os.path.exists(sdpath): + return sdpath + + # See if we can take a guess based on environment variables: + p = None + if "SECONDARY_STORAGE" in os.environ: + p = os.environ["SECONDARY_STORAGE"] + elif "EXTERNAL_SDCARD_STORAGE" in os.environ: + p = os.environ["EXTERNAL_SDCARD_STORAGE"] + if p is not None and os.path.exists(p): + return p + return None diff --git a/p4a/pythonforandroid/recipes/android/src/setup.py b/p4a/pythonforandroid/recipes/android/src/setup.py index 2e95a86..bcd411f 100755 --- a/p4a/pythonforandroid/recipes/android/src/setup.py +++ b/p4a/pythonforandroid/recipes/android/src/setup.py @@ -3,15 +3,9 @@ import os library_dirs = ['libs/' + os.environ['ARCH']] lib_dict = { - 'pygame': ['sdl'], 'sdl2': ['SDL2', 'SDL2_image', 'SDL2_mixer', 'SDL2_ttf'] } -sdl_libs = lib_dict.get(os.environ['BOOTSTRAP'], []) - -renpy_sound = Extension('android._android_sound', - ['android/_android_sound.c', 'android/_android_sound_jni.c', ], - libraries=sdl_libs + ['log'], - library_dirs=library_dirs) +sdl_libs = lib_dict.get(os.environ['BOOTSTRAP'], ['main']) modules = [Extension('android._android', ['android/_android.c', 'android/_android_jni.c'], @@ -22,10 +16,6 @@ modules = [Extension('android._android', libraries=['log'], library_dirs=library_dirs)] -if int(os.environ['IS_PYGAME']): - modules.append(renpy_sound) - - setup(name='android', version='1.0', packages=['android'], diff --git a/p4a/pythonforandroid/recipes/apsw/__init__.py b/p4a/pythonforandroid/recipes/apsw/__init__.py index 6098e4b..42ad3ba 100644 --- a/p4a/pythonforandroid/recipes/apsw/__init__.py +++ b/p4a/pythonforandroid/recipes/apsw/__init__.py @@ -6,7 +6,7 @@ import sh class ApswRecipe(PythonRecipe): version = '3.15.0-r1' url = 'https://github.com/rogerbinns/apsw/archive/{version}.tar.gz' - depends = ['sqlite3', ('python2', 'python3'), 'setuptools'] + depends = ['sqlite3', 'setuptools'] call_hostpython_via_targetpython = False site_packages_name = 'apsw' @@ -20,10 +20,10 @@ class ApswRecipe(PythonRecipe): 'build_ext', '--enable=fts4', _env=env) # Install python bindings - super(ApswRecipe, self).build_arch(arch) + super().build_arch(arch) def get_recipe_env(self, arch): - env = super(ApswRecipe, self).get_recipe_env(arch) + env = super().get_recipe_env(arch) sqlite_recipe = self.get_recipe('sqlite3', self.ctx) env['CFLAGS'] += ' -I' + sqlite_recipe.get_build_dir(arch.arch) env['LDFLAGS'] += ' -L' + sqlite_recipe.get_lib_dir(arch) diff --git a/p4a/pythonforandroid/recipes/argon2-cffi/__init__.py b/p4a/pythonforandroid/recipes/argon2-cffi/__init__.py new file mode 100644 index 0000000..0450d78 --- /dev/null +++ b/p4a/pythonforandroid/recipes/argon2-cffi/__init__.py @@ -0,0 +1,17 @@ +from pythonforandroid.recipe import CompiledComponentsPythonRecipe + + +class Argon2Recipe(CompiledComponentsPythonRecipe): + version = '20.1.0' + url = 'git+https://github.com/hynek/argon2-cffi' + depends = ['setuptools', 'cffi'] + call_hostpython_via_targetpython = False + build_cmd = 'build' + + def get_recipe_env(self, arch): + env = super().get_recipe_env(arch) + env['ARGON2_CFFI_USE_SSE2'] = '0' + return env + + +recipe = Argon2Recipe() diff --git a/p4a/pythonforandroid/recipes/audiostream/__init__.py b/p4a/pythonforandroid/recipes/audiostream/__init__.py index 4197abd..00c92b3 100644 --- a/p4a/pythonforandroid/recipes/audiostream/__init__.py +++ b/p4a/pythonforandroid/recipes/audiostream/__init__.py @@ -1,32 +1,51 @@ from pythonforandroid.recipe import CythonRecipe +from pythonforandroid.toolchain import shprint, current_directory, info +import sh from os.path import join class AudiostreamRecipe(CythonRecipe): - version = 'master' + # audiostream has no tagged versions; this is the latest commit to master 2020-12-22 + # it includes a fix for the dyload issue on android that was preventing use + version = '69f6b100f1ea4e3982a1acf6bbb0804e31a2cd50' url = 'https://github.com/kivy/audiostream/archive/{version}.zip' + sha256sum = '4d415c91706fd76865d0d22f1945f87900dc42125ff5a6c8d77898ccdf613c21' name = 'audiostream' - depends = [('python2', 'python3'), ('sdl', 'sdl2'), 'pyjnius'] + depends = ['python3', 'sdl2', 'pyjnius'] def get_recipe_env(self, arch): - env = super(AudiostreamRecipe, self).get_recipe_env(arch) - if 'sdl' in self.ctx.recipe_build_order: - sdl_include = 'sdl' - sdl_mixer_include = 'sdl_mixer' - elif 'sdl2' in self.ctx.recipe_build_order: - sdl_include = 'SDL2' - sdl_mixer_include = 'SDL2_mixer' - env['USE_SDL2'] = 'True' - env['SDL2_INCLUDE_DIR'] = join(self.ctx.bootstrap.build_dir, 'jni', 'SDL', 'include') + env = super().get_recipe_env(arch) + sdl_include = 'SDL2' - env['CFLAGS'] += ' -I{jni_path}/{sdl_include}/include -I{jni_path}/{sdl_mixer_include}'.format( + env['USE_SDL2'] = 'True' + env['SDL2_INCLUDE_DIR'] = join(self.ctx.bootstrap.build_dir, 'jni', 'SDL', 'include') + + env['CFLAGS'] += ' -I{jni_path}/{sdl_include}/include'.format( jni_path=join(self.ctx.bootstrap.build_dir, 'jni'), - sdl_include=sdl_include, - sdl_mixer_include=sdl_mixer_include) - env['NDKPLATFORM'] = self.ctx.ndk_platform + sdl_include=sdl_include) + + sdl2_mixer_recipe = self.get_recipe('sdl2_mixer', self.ctx) + for include_dir in sdl2_mixer_recipe.get_include_dirs(arch): + env['CFLAGS'] += ' -I{include_dir}'.format(include_dir=include_dir) + + # NDKPLATFORM is our switch for detecting Android platform, so can't be None + env['NDKPLATFORM'] = "NOTNONE" env['LIBLINK'] = 'NOTNONE' # Hacky fix. Needed by audiostream setup.py return env + def postbuild_arch(self, arch): + # TODO: This code was copied from pyjnius, but judging by the + # audiostream history, it looks like this step might have + # happened automatically in the past. + # Given the goal of migrating off of recipes, it would + # be good to repair or build infrastructure for doing this + # automatically, for when including a java class is + # the best solution to a problem. + super().postbuild_arch(arch) + info('Copying audiostream java files to classes build dir') + with current_directory(self.get_build_dir(arch.arch)): + shprint(sh.cp, '-a', join('audiostream', 'platform', 'android'), self.ctx.javaclass_dir) + recipe = AudiostreamRecipe() diff --git a/p4a/pythonforandroid/recipes/bcrypt/__init__.py b/p4a/pythonforandroid/recipes/bcrypt/__init__.py new file mode 100644 index 0000000..da220ff --- /dev/null +++ b/p4a/pythonforandroid/recipes/bcrypt/__init__.py @@ -0,0 +1,22 @@ +from pythonforandroid.recipe import CompiledComponentsPythonRecipe, Recipe + + +class BCryptRecipe(CompiledComponentsPythonRecipe): + name = 'bcrypt' + version = '3.1.7' + url = 'https://github.com/pyca/bcrypt/archive/{version}.tar.gz' + depends = ['openssl', 'cffi'] + call_hostpython_via_targetpython = False + + def get_recipe_env(self, arch): + env = super().get_recipe_env(arch) + + openssl_recipe = Recipe.get_recipe('openssl', self.ctx) + env['CFLAGS'] += openssl_recipe.include_flags(arch) + env['LDFLAGS'] += openssl_recipe.link_dirs_flags(arch) + env['LIBS'] = openssl_recipe.link_libs_flags() + + return env + + +recipe = BCryptRecipe() diff --git a/p4a/pythonforandroid/recipes/boost/__init__.py b/p4a/pythonforandroid/recipes/boost/__init__.py index 53d9388..aa386c9 100644 --- a/p4a/pythonforandroid/recipes/boost/__init__.py +++ b/p4a/pythonforandroid/recipes/boost/__init__.py @@ -1,10 +1,13 @@ -from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory +from pythonforandroid.util import current_directory +from pythonforandroid.recipe import Recipe +from pythonforandroid.logger import shprint from os.path import join, exists from os import environ +import shutil import sh """ -This recipe creates a custom toolchain and bootstraps Boost from source to build Boost.Build +This recipe bootstraps Boost from source to build Boost.Build including python bindings """ @@ -12,7 +15,8 @@ including python bindings class BoostRecipe(Recipe): # Todo: make recipe compatible with all p4a architectures ''' - .. note:: This recipe can be built only against API 21+ and arch armeabi-v7a + .. note:: This recipe can be built only against API 21+ and an android + ndk >= r19 .. versionchanged:: 0.6.0 Rewrote recipe to support clang's build. The following changes has @@ -24,14 +28,24 @@ class BoostRecipe(Recipe): - Default compiler for ndk's toolchain set to clang - Python version will be detected via user-config.jam - Changed stl's lib from ``gnustl_shared`` to ``c++_shared`` + + .. versionchanged:: 2019.08.09.1.dev0 + + - Bumped version number to 1.68.0 + - Adapted to work with ndk-r19+ ''' - version = '1.68.0' - url = 'http://downloads.sourceforge.net/project/boost/' \ - 'boost/{version}/boost_{version_underscore}.tar.bz2' - depends = [('python2', 'python3')] - patches = ['disable-so-version.patch', - 'use-android-libs.patch', - 'fix-android-issues.patch'] + version = '1.69.0' + url = ( + 'https://downloads.sourceforge.net/project/boost/' + 'boost/{version}/boost_{version_underscore}.tar.bz2' + ) + depends = ['python3'] + patches = [ + 'disable-so-version.patch', + 'use-android-libs.patch', + 'fix-android-issues.patch', + ] + need_stl_shared = True @property def versioned_url(self): @@ -39,65 +53,51 @@ class BoostRecipe(Recipe): return None return self.url.format( version=self.version, - version_underscore=self.version.replace('.', '_')) + version_underscore=self.version.replace('.', '_'), + ) def should_build(self, arch): return not exists(join(self.get_build_dir(arch.arch), 'b2')) def prebuild_arch(self, arch): - super(BoostRecipe, self).prebuild_arch(arch) + super().prebuild_arch(arch) env = self.get_recipe_env(arch) with current_directory(self.get_build_dir(arch.arch)): - if not exists(env['CROSSHOME']): - # Make custom toolchain - bash = sh.Command('bash') - shprint(bash, join(self.ctx.ndk_dir, 'build/tools/make-standalone-toolchain.sh'), - '--arch=' + env['ARCH'], - '--platform=android-' + str(self.ctx.android_api), - '--toolchain=' + env['CROSSHOST'] + '-' + self.ctx.toolchain_version + ':-llvm', - '--use-llvm', - '--stl=libc++', - '--install-dir=' + env['CROSSHOME'] - ) # Set custom configuration - shutil.copyfile(join(self.get_recipe_dir(), 'user-config.jam'), - join(env['BOOST_BUILD_PATH'], 'user-config.jam')) + shutil.copyfile( + join(self.get_recipe_dir(), 'user-config.jam'), + join(env['BOOST_BUILD_PATH'], 'user-config.jam'), + ) def build_arch(self, arch): - super(BoostRecipe, self).build_arch(arch) + super().build_arch(arch) env = self.get_recipe_env(arch) env['PYTHON_HOST'] = self.ctx.hostpython with current_directory(self.get_build_dir(arch.arch)): - # Compile Boost.Build engine with this custom toolchain - bash = sh.Command('bash') - shprint(bash, 'bootstrap.sh') # Do not pass env - # Install app stl - shutil.copyfile( - join(self.ctx.ndk_dir, 'sources/cxx-stl/llvm-libc++/libs/' - 'armeabi-v7a/libc++_shared.so'), - join(self.ctx.get_libs_dir(arch.arch), 'libc++_shared.so')) - - def select_build_arch(self, arch): - return arch.arch.replace('eabi-v7a', '').replace('eabi', '') + if not exists('b2'): + # Compile Boost.Build engine with this custom toolchain + bash = sh.Command('bash') + shprint(bash, 'bootstrap.sh') # Do not pass env def get_recipe_env(self, arch): # We don't use the normal env because we # are building with a standalone toolchain env = environ.copy() - env['BOOST_BUILD_PATH'] = self.get_build_dir(arch.arch) # find user-config.jam - env['BOOST_ROOT'] = env['BOOST_BUILD_PATH'] # find boost source + # find user-config.jam + env['BOOST_BUILD_PATH'] = self.get_build_dir(arch.arch) + # find boost source + env['BOOST_ROOT'] = env['BOOST_BUILD_PATH'] env['PYTHON_ROOT'] = self.ctx.python_recipe.link_root(arch.arch) env['PYTHON_INCLUDE'] = self.ctx.python_recipe.include_root(arch.arch) env['PYTHON_MAJOR_MINOR'] = self.ctx.python_recipe.version[:3] - env['PYTHON_LINK_VERSION'] = self.ctx.python_recipe.major_minor_version_string - if 'python3' in self.ctx.python_recipe.name: - env['PYTHON_LINK_VERSION'] += 'm' + env['PYTHON_LINK_VERSION'] = self.ctx.python_recipe.link_version - env['ARCH'] = self.select_build_arch(arch) - env['CROSSHOST'] = env['ARCH'] + '-linux-androideabi' - env['CROSSHOME'] = join(env['BOOST_ROOT'], 'standalone-' + env['ARCH'] + '-toolchain') + env['ARCH'] = arch.arch.replace('-', '') + env['TARGET_TRIPLET'] = arch.target + env['CROSSHOST'] = arch.command_prefix + env['CROSSHOME'] = self.ctx.ndk.llvm_prebuilt_dir return env diff --git a/p4a/pythonforandroid/recipes/boost/fix-android-issues.patch b/p4a/pythonforandroid/recipes/boost/fix-android-issues.patch index 5413480..40bdea4 100644 --- a/p4a/pythonforandroid/recipes/boost/fix-android-issues.patch +++ b/p4a/pythonforandroid/recipes/boost/fix-android-issues.patch @@ -1,10 +1,26 @@ -diff -u -r boost_1_68_0.orig/boost/config/user.hpp boost_1_68_0/boost/config/user.hpp ---- boost_1_68_0.orig/boost/config/user.hpp 2018-08-01 22:50:46.000000000 +0200 -+++ boost_1_68_0/boost/config/user.hpp 2018-08-27 15:43:38.000000000 +0200 +diff -u -r boost_1_69_0.orig/boost/asio/detail/config.hpp boost_1_69_0/boost/asio/detail/config.hpp +--- boost_1_69_0.orig/boost/asio/detail/config.hpp 2018-12-05 20:58:15.000000000 +0100 ++++ boost_1_69_0/boost/asio/detail/config.hpp 2018-12-13 14:52:06.000000000 +0100 +@@ -815,7 +815,11 @@ + # if (_LIBCPP_VERSION < 7000) + # if (__cplusplus >= 201402) + # if __has_include() +-# define BOOST_ASIO_HAS_STD_EXPERIMENTAL_STRING_VIEW 1 ++# if __clang_major__ >= 7 ++# undef BOOST_ASIO_HAS_STD_EXPERIMENTAL_STRING_VIEW ++# else ++# define BOOST_ASIO_HAS_STD_EXPERIMENTAL_STRING_VIEW 1 ++# endif // __clang_major__ >= 7 + # endif // __has_include() + # endif // (__cplusplus >= 201402) + # endif // (_LIBCPP_VERSION < 7000) +diff -u -r boost_1_69_0.orig/boost/config/user.hpp boost_1_69_0/boost/config/user.hpp +--- boost_1_69_0.orig/boost/config/user.hpp 2018-12-05 20:58:16.000000000 +0100 ++++ boost_1_69_0/boost/config/user.hpp 2018-12-13 14:35:29.000000000 +0100 @@ -13,6 +13,12 @@ // configuration policy: // - + +// Android defines +// There is problem with std::atomic on android (and some other platforms). +// See this link for more info: @@ -13,41 +29,25 @@ diff -u -r boost_1_68_0.orig/boost/config/user.hpp boost_1_68_0/boost/config/use + // define this to locate a compiler config file: // #define BOOST_COMPILER_CONFIG - -diff -u -r boost_1_68_0.orig/boost/asio/detail/config.hpp boost_1_68_0/boost/asio/detail/config.hpp ---- boost_1_68_0.orig/boost/asio/detail/config.hpp 2018-08-01 22:50:46.000000000 +0200 -+++ boost_1_68_0/boost/asio/detail/config.hpp 2018-09-19 12:39:56.000000000 +0200 -@@ -804,7 +804,11 @@ - # if defined(__clang__) - # if (__cplusplus >= 201402) - # if __has_include() --# define BOOST_ASIO_HAS_STD_EXPERIMENTAL_STRING_VIEW 1 -+# if __clang_major__ >= 7 -+# undef BOOST_ASIO_HAS_STD_EXPERIMENTAL_STRING_VIEW -+# else -+# define BOOST_ASIO_HAS_STD_EXPERIMENTAL_STRING_VIEW 1 -+# endif // __clang_major__ >= 7 - # endif // __has_include() - # endif // (__cplusplus >= 201402) - # endif // defined(__clang__) -diff -u -r boost_1_68_0.orig/boost/system/error_code.hpp boost_1_68_0/boost/system/error_code.hpp ---- boost_1_68_0.orig/boost/system/error_code.hpp 2018-08-01 22:50:53.000000000 +0200 -+++ boost_1_68_0/boost/system/error_code.hpp 2018-08-27 15:44:29.000000000 +0200 -@@ -17,6 +17,7 @@ - #include - #include - #include + +diff -u -r boost_1_69_0.orig/boost/system/error_code.hpp boost_1_69_0/boost/system/error_code.hpp +--- boost_1_69_0.orig/boost/system/error_code.hpp 2018-12-05 20:58:23.000000000 +0100 ++++ boost_1_69_0/boost/system/error_code.hpp 2018-12-13 14:53:33.000000000 +0100 +@@ -14,6 +14,7 @@ + #include + #include + #include +#include #include #include - #include -diff -u -r boost_1_68_0.orig/libs/filesystem/src/operations.cpp boost_1_68_0/libs/filesystem/src/operations.cpp ---- boost_1_68_0.orig/libs/filesystem/src/operations.cpp 2018-08-01 22:50:47.000000000 +0200 -+++ boost_1_68_0/libs/filesystem/src/operations.cpp 2018-08-27 15:47:15.000000000 +0200 + #include +diff -u -r boost_1_69_0.orig/libs/filesystem/src/operations.cpp boost_1_69_0/libs/filesystem/src/operations.cpp +--- boost_1_69_0.orig/libs/filesystem/src/operations.cpp 2018-12-05 20:58:17.000000000 +0100 ++++ boost_1_69_0/libs/filesystem/src/operations.cpp 2018-12-13 14:55:41.000000000 +0100 @@ -232,6 +232,21 @@ - + # if defined(BOOST_POSIX_API) - + +# if defined(__ANDROID__) +# define truncate libboost_truncate_wrapper +// truncate() is present in Android libc only starting from ABI 21, so here's a simple wrapper @@ -64,5 +64,23 @@ diff -u -r boost_1_68_0.orig/libs/filesystem/src/operations.cpp boost_1_68_0/lib +# endif + typedef int err_t; - + // POSIX uses a 0 return to indicate success +diff -u -r boost_1_69_0.orig/tools/build/src/tools/common.jam boost_1_69_0/tools/build/src/tools/common.jam +--- boost_1_69_0.orig/tools/build/src/tools/common.jam 2019-01-25 23:18:34.544755629 +0200 ++++ boost_1_69_0/tools/build/src/tools/common.jam 2019-01-25 23:20:42.309047754 +0200 +@@ -976,10 +976,10 @@ + } + + # Ditto, from Clang 4 +- if $(tag) in clang clangw && [ numbers.less 3 $(version[1]) ] +- { +- version = $(version[1]) ; +- } ++ #if $(tag) in clang clangw && [ numbers.less 3 $(version[1]) ] ++ #{ ++ # version = $(version[1]) ; ++ #} + + # On intel, version is not added, because it does not matter and it is the + # version of vc used as backend that matters. Ideally, we should encode the diff --git a/p4a/pythonforandroid/recipes/boost/user-config.jam b/p4a/pythonforandroid/recipes/boost/user-config.jam index e50b50a..fa1eef1 100644 --- a/p4a/pythonforandroid/recipes/boost/user-config.jam +++ b/p4a/pythonforandroid/recipes/boost/user-config.jam @@ -1,6 +1,7 @@ import os ; local ARCH = [ os.environ ARCH ] ; +local TARGET_TRIPLET = [ os.environ TARGET_TRIPLET ] ; local CROSSHOME = [ os.environ CROSSHOME ] ; local PYTHON_HOST = [ os.environ PYTHON_HOST ] ; local PYTHON_ROOT = [ os.environ PYTHON_ROOT ] ; @@ -8,42 +9,22 @@ local PYTHON_INCLUDE = [ os.environ PYTHON_INCLUDE ] ; local PYTHON_LINK_VERSION = [ os.environ PYTHON_LINK_VERSION ] ; local PYTHON_MAJOR_MINOR = [ os.environ PYTHON_MAJOR_MINOR ] ; -using clang : $(ARCH) : $(CROSSHOME)/bin/arm-linux-androideabi-clang++ : -$(CROSSHOME)/bin/arm-linux-androideabi-ar -$(CROSSHOME)/sysroot -$(ARCH) --fexceptions --frtti --fpic +using clang : $(ARCH) : $(CROSSHOME)/bin/$(TARGET_TRIPLET)-clang++ : +$(CROSSHOME)/bin/llvm-ar +-fPIC -ffunction-sections --funwind-tables --march=armv7-a --msoft-float --mfpu=neon --mthumb --march=armv7-a --Wl,--fix-cortex-a8 --Os --fomit-frame-pointer --fno-strict-aliasing --DANDROID --D__ANDROID__ --DANDROID_TOOLCHAIN=clang --DANDROID_ABI=armv7-a --DANDROID_STL=c++_shared --DBOOST_ALL_NO_LIB -#-DNDEBUG --O2 --g --fvisibility=hidden --fvisibility-inlines-hidden -fdata-sections --D__arm__ --D_REENTRANT --D_GLIBCXX__PTHREADS --Wno-long-long --Wno-missing-field-initializers --Wno-unused-variable +-funwind-tables +-fstack-protector-strong +-no-canonical-prefixes +-Wformat +-Werror=format-security +-frtti +-fexceptions +-DNDEBUG +-g +-Oz +-mthumb -Wl,-z,relro -Wl,-z,now -lc++_shared diff --git a/p4a/pythonforandroid/recipes/cdecimal/__init__.py b/p4a/pythonforandroid/recipes/cdecimal/__init__.py index 94929c7..a444eb1 100644 --- a/p4a/pythonforandroid/recipes/cdecimal/__init__.py +++ b/p4a/pythonforandroid/recipes/cdecimal/__init__.py @@ -13,7 +13,7 @@ class CdecimalRecipe(CompiledComponentsPythonRecipe): 'cross-compile.patch'] def prebuild_arch(self, arch): - super(CdecimalRecipe, self).prebuild_arch(arch) + super().prebuild_arch(arch) if not is_darwin(): if '64' in arch.arch: machine = 'ansi64' diff --git a/p4a/pythonforandroid/recipes/cffi/__init__.py b/p4a/pythonforandroid/recipes/cffi/__init__.py index 50458e5..a198a3d 100644 --- a/p4a/pythonforandroid/recipes/cffi/__init__.py +++ b/p4a/pythonforandroid/recipes/cffi/__init__.py @@ -7,7 +7,7 @@ class CffiRecipe(CompiledComponentsPythonRecipe): Extra system dependencies: autoconf, automake and libtool. """ name = 'cffi' - version = '1.11.5' + version = '1.13.2' url = 'https://pypi.python.org/packages/source/c/cffi/cffi-{version}.tar.gz' depends = ['setuptools', 'pycparser', 'libffi'] @@ -19,14 +19,14 @@ class CffiRecipe(CompiledComponentsPythonRecipe): def get_hostrecipe_env(self, arch=None): # fixes missing ffi.h on some host systems (e.g. gentoo) - env = super(CffiRecipe, self).get_hostrecipe_env(arch) + env = super().get_hostrecipe_env(arch) libffi = self.get_recipe('libffi', self.ctx) includes = libffi.get_include_dirs(arch) env['FFI_INC'] = ",".join(includes) return env def get_recipe_env(self, arch=None): - env = super(CffiRecipe, self).get_recipe_env(arch) + env = super().get_recipe_env(arch) libffi = self.get_recipe('libffi', self.ctx) includes = libffi.get_include_dirs(arch) env['CFLAGS'] = ' -I'.join([env.get('CFLAGS', '')] + includes) @@ -35,18 +35,13 @@ class CffiRecipe(CompiledComponentsPythonRecipe): self.ctx.get_libs_dir(arch.arch)) env['LDFLAGS'] += ' -L{}'.format(os.path.join(self.ctx.bootstrap.build_dir, 'libs', arch.arch)) # required for libc and libdl - ndk_dir = self.ctx.ndk_platform - ndk_lib_dir = os.path.join(ndk_dir, 'usr', 'lib') - env['LDFLAGS'] += ' -L{}'.format(ndk_lib_dir) - env['LDFLAGS'] += " --sysroot={}".format(self.ctx.ndk_platform) + env['LDFLAGS'] += ' -L{}'.format(arch.ndk_lib_dir_versioned) env['PYTHONPATH'] = ':'.join([ - self.ctx.get_site_packages_dir(), + self.ctx.get_site_packages_dir(arch), env['BUILDLIB_PATH'], ]) env['LDFLAGS'] += ' -L{}'.format(self.ctx.python_recipe.link_root(arch.arch)) - env['LDFLAGS'] += ' -lpython{}'.format(self.ctx.python_recipe.major_minor_version_string) - if 'python3' in self.ctx.python_recipe.name: - env['LDFLAGS'] += 'm' + env['LDFLAGS'] += ' -lpython{}'.format(self.ctx.python_recipe.link_version) return env diff --git a/p4a/pythonforandroid/recipes/coverage/__init__.py b/p4a/pythonforandroid/recipes/coverage/__init__.py index 95f08f1..2ee2d05 100644 --- a/p4a/pythonforandroid/recipes/coverage/__init__.py +++ b/p4a/pythonforandroid/recipes/coverage/__init__.py @@ -7,7 +7,7 @@ class CoverageRecipe(PythonRecipe): url = 'https://pypi.python.org/packages/2d/10/6136c8e10644c16906edf4d9f7c782c0f2e7ed47ff2f41f067384e432088/coverage-{version}.tar.gz' - depends = [('hostpython2', 'hostpython3'), 'setuptools'] + depends = ['hostpython3', 'setuptools'] patches = ['fallback-utf8.patch'] diff --git a/p4a/pythonforandroid/recipes/cppy/__init__.py b/p4a/pythonforandroid/recipes/cppy/__init__.py new file mode 100644 index 0000000..f61e2c2 --- /dev/null +++ b/p4a/pythonforandroid/recipes/cppy/__init__.py @@ -0,0 +1,14 @@ +from pythonforandroid.recipe import PythonRecipe + + +class CppyRecipe(PythonRecipe): + site_packages_name = 'cppy' + version = '1.1.0' + url = 'https://github.com/nucleic/cppy/archive/{version}.zip' + call_hostpython_via_targetpython = False + # to be detected by the matplotlib install script + install_in_hostpython = True + depends = ['setuptools'] + + +recipe = CppyRecipe() diff --git a/p4a/pythonforandroid/recipes/cryptography/__init__.py b/p4a/pythonforandroid/recipes/cryptography/__init__.py index 1b7baba..182c745 100644 --- a/p4a/pythonforandroid/recipes/cryptography/__init__.py +++ b/p4a/pythonforandroid/recipes/cryptography/__init__.py @@ -3,14 +3,13 @@ from pythonforandroid.recipe import CompiledComponentsPythonRecipe, Recipe class CryptographyRecipe(CompiledComponentsPythonRecipe): name = 'cryptography' - version = '2.6.1' + version = '2.8' url = 'https://github.com/pyca/cryptography/archive/{version}.tar.gz' - depends = ['openssl', 'idna', 'asn1crypto', 'six', 'setuptools', - 'enum34', 'ipaddress', 'cffi'] + depends = ['openssl', 'six', 'setuptools', 'cffi'] call_hostpython_via_targetpython = False def get_recipe_env(self, arch): - env = super(CryptographyRecipe, self).get_recipe_env(arch) + env = super().get_recipe_env(arch) openssl_recipe = Recipe.get_recipe('openssl', self.ctx) env['CFLAGS'] += openssl_recipe.include_flags(arch) diff --git a/p4a/pythonforandroid/recipes/cymunk/__init__.py b/p4a/pythonforandroid/recipes/cymunk/__init__.py index 96d4169..272c18f 100644 --- a/p4a/pythonforandroid/recipes/cymunk/__init__.py +++ b/p4a/pythonforandroid/recipes/cymunk/__init__.py @@ -6,7 +6,5 @@ class CymunkRecipe(CythonRecipe): url = 'https://github.com/tito/cymunk/archive/{version}.zip' name = 'cymunk' - depends = [('python2', 'python3crystax', 'python3')] - recipe = CymunkRecipe() diff --git a/p4a/pythonforandroid/recipes/cython/__init__.py b/p4a/pythonforandroid/recipes/cython/__init__.py new file mode 100644 index 0000000..9135e18 --- /dev/null +++ b/p4a/pythonforandroid/recipes/cython/__init__.py @@ -0,0 +1,14 @@ +from pythonforandroid.recipe import CompiledComponentsPythonRecipe + + +class CythonRecipe(CompiledComponentsPythonRecipe): + + version = '0.29.28' + url = 'https://github.com/cython/cython/archive/{version}.tar.gz' + site_packages_name = 'cython' + depends = ['setuptools'] + call_hostpython_via_targetpython = False + install_in_hostpython = True + + +recipe = CythonRecipe() diff --git a/p4a/pythonforandroid/recipes/evdev/__init__.py b/p4a/pythonforandroid/recipes/evdev/__init__.py index afd542e..b69169d 100644 --- a/p4a/pythonforandroid/recipes/evdev/__init__.py +++ b/p4a/pythonforandroid/recipes/evdev/__init__.py @@ -5,6 +5,7 @@ class EvdevRecipe(CompiledComponentsPythonRecipe): name = 'evdev' version = 'v0.4.7' url = 'https://github.com/gvalkov/python-evdev/archive/{version}.zip' + call_hostpython_via_targetpython = False depends = [] @@ -17,8 +18,8 @@ class EvdevRecipe(CompiledComponentsPythonRecipe): 'evdev-permissions.patch'] def get_recipe_env(self, arch=None): - env = super(EvdevRecipe, self).get_recipe_env(arch) - env['NDKPLATFORM'] = self.ctx.ndk_platform + env = super().get_recipe_env(arch) + env['SYSROOT'] = self.ctx.ndk.sysroot return env diff --git a/p4a/pythonforandroid/recipes/evdev/include-dir.patch b/p4a/pythonforandroid/recipes/evdev/include-dir.patch index d6a7c81..a1c41e7 100644 --- a/p4a/pythonforandroid/recipes/evdev/include-dir.patch +++ b/p4a/pythonforandroid/recipes/evdev/include-dir.patch @@ -6,7 +6,7 @@ diff -Naur orig/setup.py v0.4.7/setup.py #----------------------------------------------------------------------------- def create_ecodes(): - header = '/usr/include/linux/input.h' -+ header = os.environ['NDKPLATFORM'] + '/usr/include/linux/input.h' ++ header = os.environ['SYSROOT'] + '/usr/include/linux/input.h' if not os.path.isfile(header): msg = '''\ diff --git a/p4a/pythonforandroid/recipes/ffmpeg/__init__.py b/p4a/pythonforandroid/recipes/ffmpeg/__init__.py index f8e3ec1..9414552 100644 --- a/p4a/pythonforandroid/recipes/ffmpeg/__init__.py +++ b/p4a/pythonforandroid/recipes/ffmpeg/__init__.py @@ -4,8 +4,9 @@ import sh class FFMpegRecipe(Recipe): - version = '3.4.5' - url = 'http://ffmpeg.org/releases/ffmpeg-{version}.tar.bz2' + version = 'n4.3.1' + # Moved to github.com instead of ffmpeg.org to improve download speed + url = 'https://github.com/FFmpeg/FFmpeg/archive/{version}.zip' depends = ['sdl2'] # Need this to build correct recipe order opts_depends = ['openssl', 'ffpyplayer_codecs'] patches = ['patches/configure.patch'] @@ -18,7 +19,7 @@ class FFMpegRecipe(Recipe): self.apply_patches(arch) def get_recipe_env(self, arch): - env = super(FFMpegRecipe, self).get_recipe_env(arch) + env = super().get_recipe_env(arch) env['NDK'] = self.ctx.ndk_dir return env @@ -36,14 +37,20 @@ class FFMpegRecipe(Recipe): '--enable-nonfree', '--enable-protocol=https,tls_openssl', ] - build_dir = Recipe.get_recipe('openssl', self.ctx).get_build_dir(arch.arch) - cflags += ['-I' + build_dir + '/include/'] + build_dir = Recipe.get_recipe( + 'openssl', self.ctx).get_build_dir(arch.arch) + cflags += ['-I' + build_dir + '/include/', + '-DOPENSSL_API_COMPAT=0x10002000L'] ldflags += ['-L' + build_dir] if 'ffpyplayer_codecs' in self.ctx.recipe_build_order: + # Enable GPL + flags += ['--enable-gpl'] + # libx264 flags += ['--enable-libx264'] - build_dir = Recipe.get_recipe('libx264', self.ctx).get_build_dir(arch.arch) + build_dir = Recipe.get_recipe( + 'libx264', self.ctx).get_build_dir(arch.arch) cflags += ['-I' + build_dir + '/include/'] ldflags += ['-lx264', '-L' + build_dir + '/lib/'] @@ -52,6 +59,14 @@ class FFMpegRecipe(Recipe): build_dir = Recipe.get_recipe('libshine', self.ctx).get_build_dir(arch.arch) cflags += ['-I' + build_dir + '/include/'] ldflags += ['-lshine', '-L' + build_dir + '/lib/'] + ldflags += ['-lm'] + + # libvpx + flags += ['--enable-libvpx'] + build_dir = Recipe.get_recipe( + 'libvpx', self.ctx).get_build_dir(arch.arch) + cflags += ['-I' + build_dir + '/include/'] + ldflags += ['-lvpx', '-L' + build_dir + '/lib/'] # Enable all codecs: flags += [ @@ -67,7 +82,7 @@ class FFMpegRecipe(Recipe): '--enable-parser=aac,ac3,h261,h264,mpegaudio,mpeg4video,mpegvideo,vc1', '--enable-decoder=aac,h264,mpeg4,mpegvideo', '--enable-muxer=h264,mov,mp4,mpeg2video', - '--enable-demuxer=aac,h264,m4v,mov,mpegvideo,vc1', + '--enable-demuxer=aac,h264,m4v,mov,mpegvideo,vc1,rtsp', ] # needed to prevent _ffmpeg.so: version node not found for symbol av_init_packet@LIBAVFORMAT_52 @@ -78,39 +93,48 @@ class FFMpegRecipe(Recipe): # disable binaries / doc flags += [ - '--disable-ffmpeg', - '--disable-ffplay', - '--disable-ffprobe', - '--disable-ffserver', + '--disable-programs', '--disable-doc', ] # other flags: flags += [ '--enable-filter=aresample,resample,crop,adelay,volume,scale', - '--enable-protocol=file,http', + '--enable-protocol=file,http,hls,udp,tcp', '--enable-small', '--enable-hwaccels', - '--enable-gpl', '--enable-pic', '--disable-static', + '--disable-debug', '--enable-shared', ] + if 'arm64' in arch.arch: + arch_flag = 'aarch64' + elif 'x86' in arch.arch: + arch_flag = 'x86' + flags += ['--disable-asm'] + else: + arch_flag = 'arm' + # android: flags += [ '--target-os=android', - '--cross-prefix=arm-linux-androideabi-', - '--arch=arm', - '--sysroot=' + self.ctx.ndk_platform, + '--enable-cross-compile', + '--cross-prefix={}-'.format(arch.target), + '--arch={}'.format(arch_flag), + '--strip={}'.format(self.ctx.ndk.llvm_strip), + '--sysroot={}'.format(self.ctx.ndk.sysroot), '--enable-neon', '--prefix={}'.format(realpath('.')), ] - cflags += [ - '-mfpu=vfpv3-d16', - '-mfloat-abi=softfp', - '-fPIC', - ] + + if arch_flag == 'arm': + cflags += [ + '-mfpu=vfpv3-d16', + '-mfloat-abi=softfp', + '-fPIC', + ] env['CFLAGS'] += ' ' + ' '.join(cflags) env['LDFLAGS'] += ' ' + ' '.join(ldflags) @@ -120,7 +144,8 @@ class FFMpegRecipe(Recipe): shprint(sh.make, '-j4', _env=env) shprint(sh.make, 'install', _env=env) # copy libs: - sh.cp('-a', sh.glob('./lib/lib*.so'), self.ctx.get_libs_dir(arch.arch)) + sh.cp('-a', sh.glob('./lib/lib*.so'), + self.ctx.get_libs_dir(arch.arch)) recipe = FFMpegRecipe() diff --git a/p4a/pythonforandroid/recipes/ffmpeg/patches/configure.patch b/p4a/pythonforandroid/recipes/ffmpeg/patches/configure.patch index b898c7f..cacf029 100644 --- a/p4a/pythonforandroid/recipes/ffmpeg/patches/configure.patch +++ b/p4a/pythonforandroid/recipes/ffmpeg/patches/configure.patch @@ -1,40 +1,11 @@ ---- ./configure.orig 2017-12-11 00:35:18.000000000 +0300 -+++ ./configure 2017-12-19 09:47:54.104914600 +0300 -@@ -4841,9 +4841,6 @@ - add_cflags -std=c11 || - check_cflags -std=c99 - --check_cppflags -D_FILE_OFFSET_BITS=64 --check_cppflags -D_LARGEFILE_SOURCE -- - add_host_cppflags -D_ISOC99_SOURCE - check_host_cflags -std=c99 - check_host_cflags -Wall -@@ -5979,7 +5976,7 @@ +--- ./configure 2020-10-11 19:12:16.759760904 +0200 ++++ ./configure.patch 2020-10-11 19:15:49.059533563 +0200 +@@ -6361,7 +6361,7 @@ enabled librsvg && require_pkg_config librsvg librsvg-2.0 librsvg-2.0/librsvg/rsvg.h rsvg_handle_render_cairo enabled librtmp && require_pkg_config librtmp librtmp librtmp/rtmp.h RTMP_Socket - enabled librubberband && require_pkg_config librubberband "rubberband >= 1.8.1" rubberband/rubberband-c.h rubberband_new + enabled librubberband && require_pkg_config librubberband "rubberband >= 1.8.1" rubberband/rubberband-c.h rubberband_new -lstdc++ && append librubberband_extralibs "-lstdc++" -enabled libshine && require_pkg_config libshine shine shine/layer3.h shine_encode_buffer -+enabled libshine && require "shine" shine/layer3.h shine_encode_buffer -lshine - enabled libsmbclient && { use_pkg_config libsmbclient smbclient libsmbclient.h smbc_init || - require smbclient libsmbclient.h smbc_init -lsmbclient; } - enabled libsnappy && require libsnappy snappy-c.h snappy_compress -lsnappy - -diff -Naur ffmpeg/configure ffmpeg-1/configure ---- ffmpeg/configure 2019-01-11 09:30:02.824961600 +0100 -+++ ffmpeg-1/configure 2019-01-11 09:29:54.976149600 +0100 -@@ -6068,11 +6068,11 @@ - { ! enabled cross_compile && add_cflags -isystem/opt/vc/include/IL && check_header OMX_Core.h ; } || - die "ERROR: OpenMAX IL headers not found"; } - enabled omx && require_header OMX_Core.h --enabled openssl && { use_pkg_config openssl openssl openssl/ssl.h OPENSSL_init_ssl || -+enabled openssl && { use_pkg_config openssl openssl openssl/ssl.h OPENSSL_init_ssl || - use_pkg_config openssl openssl openssl/ssl.h SSL_library_init || -- check_lib openssl openssl/ssl.h OPENSSL_init_ssl -lssl -lcrypto || -- check_lib openssl openssl/ssl.h OPENSSL_init_ssl -lssl32 -leay32 || -- check_lib openssl openssl/ssl.h OPENSSL_init_ssl -lssl -lcrypto -lws2_32 -lgdi32 || -+ check_lib openssl openssl/ssl.h SSL_library_init -lssl -lcrypto || -+ check_lib openssl openssl/ssl.h SSL_library_init -lssl32 -leay32 || -+ check_lib openssl openssl/ssl.h SSL_library_init -lssl -lcrypto -lws2_32 -lgdi32 || - die "ERROR: openssl not found"; } - enabled rkmpp && { { require_pkg_config rockchip_mpp rockchip_mpp rockchip/rk_mpi.h mpp_create || ++enabled libshine && require "shine" shine/layer3.h shine_encode_buffer -lshine -lm + enabled libsmbclient && { check_pkg_config libsmbclient smbclient libsmbclient.h smbc_init || + require libsmbclient libsmbclient.h smbc_init -lsmbclient; } + enabled libsnappy && require libsnappy snappy-c.h snappy_compress -lsnappy -lstdc++ \ No newline at end of file diff --git a/p4a/pythonforandroid/recipes/ffpyplayer/__init__.py b/p4a/pythonforandroid/recipes/ffpyplayer/__init__.py index 9ff29b7..6260037 100644 --- a/p4a/pythonforandroid/recipes/ffpyplayer/__init__.py +++ b/p4a/pythonforandroid/recipes/ffpyplayer/__init__.py @@ -4,13 +4,13 @@ from os.path import join class FFPyPlayerRecipe(CythonRecipe): - version = '6f7568b498715c2da88f061ebad082a042514923' + version = 'v4.3.2' url = 'https://github.com/matham/ffpyplayer/archive/{version}.zip' - depends = [('python2', 'python3'), 'sdl2', 'ffmpeg'] + depends = ['python3', 'sdl2', 'ffmpeg'] opt_depends = ['openssl', 'ffpyplayer_codecs'] def get_recipe_env(self, arch, with_flags_in_cc=True): - env = super(FFPyPlayerRecipe, self).get_recipe_env(arch) + env = super().get_recipe_env(arch) build_dir = Recipe.get_recipe('ffmpeg', self.ctx).get_build_dir(arch.arch) env["FFMPEG_INCLUDE_DIR"] = join(build_dir, "include") @@ -20,7 +20,21 @@ class FFPyPlayerRecipe(CythonRecipe): env["SDL_LIB_DIR"] = join(self.ctx.bootstrap.build_dir, 'libs', arch.arch) env["USE_SDL2_MIXER"] = '1' - env["SDL2_MIXER_INCLUDE_DIR"] = join(self.ctx.bootstrap.build_dir, 'jni', 'SDL2_mixer') + + # ffpyplayer does not allow to pass more than one include dir for sdl2_mixer (and ATM is + # not needed), so we only pass the first one. + sdl2_mixer_recipe = self.get_recipe('sdl2_mixer', self.ctx) + env["SDL2_MIXER_INCLUDE_DIR"] = sdl2_mixer_recipe.get_include_dirs(arch)[0] + + # NDKPLATFORM and LIBLINK are our switches for detecting Android platform, so can't be empty + # FIXME: We may want to introduce a cleaner approach to this? + env['NDKPLATFORM'] = "NOTNONE" + env['LIBLINK'] = 'NOTNONE' + + # ffmpeg recipe enables GPL components only if ffpyplayer_codecs recipe used. + # Therefor we need to disable libpostproc if skipped. + if 'ffpyplayer_codecs' not in self.ctx.recipe_build_order: + env["CONFIG_POSTPROC"] = '0' return env diff --git a/p4a/pythonforandroid/recipes/ffpyplayer_codecs/__init__.py b/p4a/pythonforandroid/recipes/ffpyplayer_codecs/__init__.py index b324194..eedb126 100644 --- a/p4a/pythonforandroid/recipes/ffpyplayer_codecs/__init__.py +++ b/p4a/pythonforandroid/recipes/ffpyplayer_codecs/__init__.py @@ -2,7 +2,7 @@ from pythonforandroid.toolchain import Recipe class FFPyPlayerCodecsRecipe(Recipe): - depends = ['libshine', 'libx264'] + depends = ['libx264', 'libshine', 'libvpx'] def build_arch(self, arch): pass diff --git a/p4a/pythonforandroid/recipes/flask/__init__.py b/p4a/pythonforandroid/recipes/flask/__init__.py index 1a9b685..b272942 100644 --- a/p4a/pythonforandroid/recipes/flask/__init__.py +++ b/p4a/pythonforandroid/recipes/flask/__init__.py @@ -3,13 +3,10 @@ from pythonforandroid.recipe import PythonRecipe class FlaskRecipe(PythonRecipe): - # The webserver of 'master' seems to fail - # after a little while on Android, so use - # 0.10.1 at least for now - version = '0.10.1' + version = '2.0.3' url = 'https://github.com/pallets/flask/archive/{version}.zip' - depends = [('python2', 'python3', 'python3crystax'), 'setuptools'] + depends = ['setuptools'] python_depends = ['jinja2', 'werkzeug', 'markupsafe', 'itsdangerous', 'click'] diff --git a/p4a/pythonforandroid/recipes/fontconfig/__init__.py b/p4a/pythonforandroid/recipes/fontconfig/__init__.py index 8ac01e4..ad959f6 100644 --- a/p4a/pythonforandroid/recipes/fontconfig/__init__.py +++ b/p4a/pythonforandroid/recipes/fontconfig/__init__.py @@ -1,3 +1,5 @@ +from os.path import join + from pythonforandroid.recipe import BootstrapNDKRecipe from pythonforandroid.toolchain import current_directory, shprint import sh @@ -13,7 +15,13 @@ class FontconfigRecipe(BootstrapNDKRecipe): env = self.get_recipe_env(arch) with current_directory(self.get_jni_dir()): - shprint(sh.ndk_build, "V=1", 'fontconfig', _env=env) + shprint( + sh.Command(join(self.ctx.ndk_dir, "ndk-build")), + "V=1", + "APP_ALLOW_MISSING_DEPS=true", + "fontconfig", + _env=env, + ) recipe = FontconfigRecipe() diff --git a/p4a/pythonforandroid/recipes/freetype-py/__init__.py b/p4a/pythonforandroid/recipes/freetype-py/__init__.py new file mode 100644 index 0000000..7be2f2e --- /dev/null +++ b/p4a/pythonforandroid/recipes/freetype-py/__init__.py @@ -0,0 +1,12 @@ +from pythonforandroid.recipe import PythonRecipe + + +class FreetypePyRecipe(PythonRecipe): + version = '2.2.0' + url = 'https://github.com/rougier/freetype-py/archive/refs/tags/v{version}.tar.gz' + depends = ['freetype'] + patches = ['fall-back-to-distutils.patch'] + site_packages_name = 'freetype' + + +recipe = FreetypePyRecipe() diff --git a/p4a/pythonforandroid/recipes/freetype-py/fall-back-to-distutils.patch b/p4a/pythonforandroid/recipes/freetype-py/fall-back-to-distutils.patch new file mode 100644 index 0000000..0f06f18 --- /dev/null +++ b/p4a/pythonforandroid/recipes/freetype-py/fall-back-to-distutils.patch @@ -0,0 +1,15 @@ +diff -ruN freetype-py.orig/setup.py freetype-py/setup.py +--- freetype-py.orig/setup.py 2020-07-09 20:58:51.000000000 +0700 ++++ freetype-py/setup.py 2022-03-02 19:28:17.948831134 +0700 +@@ -12,7 +12,10 @@ + from io import open + from os import path + +-from setuptools import setup ++try: ++ from setuptools import setup ++except ImportError: ++ from distutils.core import setup + + if os.environ.get("FREETYPEPY_BUNDLE_FT"): + print("# Will build and bundle FreeType.") diff --git a/p4a/pythonforandroid/recipes/freetype/__init__.py b/p4a/pythonforandroid/recipes/freetype/__init__.py index 36171ff..0b04c95 100644 --- a/p4a/pythonforandroid/recipes/freetype/__init__.py +++ b/p4a/pythonforandroid/recipes/freetype/__init__.py @@ -1,44 +1,132 @@ -from pythonforandroid.toolchain import Recipe +from pythonforandroid.recipe import Recipe +from pythonforandroid.logger import shprint, info from pythonforandroid.util import current_directory -from pythonforandroid.logger import shprint -from os.path import exists, join, realpath +from os.path import join, exists +from multiprocessing import cpu_count import sh class FreetypeRecipe(Recipe): + """The freetype library it's special, because has cyclic dependencies with + harfbuzz library, so freetype can be build with harfbuzz support, and + harfbuzz can be build with freetype support. This complicates the build of + both recipes because in order to get the full set we need to compile those + recipes several times: + - build freetype without harfbuzz + - build harfbuzz with freetype + - build freetype with harfbuzz support - version = '2.5.5' + .. note:: + To build freetype with harfbuzz support you must add `harfbuzz` to your + requirements, otherwise freetype will be build without harfbuzz + + .. seealso:: + https://sourceforge.net/projects/freetype/files/freetype2/2.5.3/ + """ + + version = '2.10.1' url = 'http://download.savannah.gnu.org/releases/freetype/freetype-{version}.tar.gz' # noqa + built_libraries = {'libfreetype.so': 'objs/.libs'} - depends = ['harfbuzz'] + def get_recipe_env(self, arch=None, with_harfbuzz=False): + env = super().get_recipe_env(arch) + if with_harfbuzz: + harfbuzz_build = self.get_recipe( + 'harfbuzz', self.ctx + ).get_build_dir(arch.arch) + freetype_install = join(self.get_build_dir(arch.arch), 'install') - def should_build(self, arch): - if exists(join(self.get_build_dir(arch.arch), - 'objs', '.libs', 'libfreetype.a')): - return False - return True + env['HARFBUZZ_CFLAGS'] = '-I{harfbuzz} -I{harfbuzz}/src'.format( + harfbuzz=harfbuzz_build + ) + env['HARFBUZZ_LIBS'] = ( + '-L{freetype}/lib -lfreetype ' + '-L{harfbuzz}/src/.libs -lharfbuzz'.format( + freetype=freetype_install, harfbuzz=harfbuzz_build + ) + ) - def build_arch(self, arch): - env = self.get_recipe_env(arch) + # android's zlib support + zlib_lib_path = arch.ndk_lib_dir_versioned + zlib_includes = self.ctx.ndk.sysroot_include_dir - harfbuzz_recipe = Recipe.get_recipe('harfbuzz', self.ctx) - env['LDFLAGS'] = ' '.join( - [env['LDFLAGS'], - '-L{}'.format(join(harfbuzz_recipe.get_build_dir(arch.arch), - 'src', '.libs'))]) + def add_flag_if_not_added(flag, env_key): + if flag not in env[env_key]: + env[env_key] += flag + add_flag_if_not_added(' -I' + zlib_includes, 'CFLAGS') + add_flag_if_not_added(' -L' + zlib_lib_path, 'LDFLAGS') + add_flag_if_not_added(' -lz', 'LDLIBS') + + return env + + def build_arch(self, arch, with_harfbuzz=False): + env = self.get_recipe_env(arch, with_harfbuzz=with_harfbuzz) + + harfbuzz_in_recipes = 'harfbuzz' in self.ctx.recipe_build_order + prefix_path = self.get_build_dir(arch.arch) + if harfbuzz_in_recipes and not with_harfbuzz: + # This is the first time we build freetype and we modify `prefix`, + # because we will install the compiled library so later we can + # build harfbuzz (with freetype support) using this freetype + # installation + prefix_path = join(prefix_path, 'install') + + # Configure freetype library + config_args = { + '--host={}'.format(arch.command_prefix), + '--prefix={}'.format(prefix_path), + '--without-bzip2', + '--with-png=no', + } + if not harfbuzz_in_recipes: + info('Build freetype (without harfbuzz)') + config_args = config_args.union( + {'--disable-static', + '--enable-shared', + '--with-harfbuzz=no', + '--with-zlib=yes', + } + ) + elif not with_harfbuzz: + info('Build freetype for First time (without harfbuzz)') + # This time we will build our freetype library as static because we + # want that the harfbuzz library to have the necessary freetype + # symbols/functions, so we avoid to have two freetype shared + # libraries which will be confusing and harder to link with them + config_args = config_args.union( + {'--disable-shared', '--with-harfbuzz=no', '--with-zlib=no'} + ) + else: + info('Build freetype for Second time (with harfbuzz)') + config_args = config_args.union( + {'--disable-static', + '--enable-shared', + '--with-harfbuzz=yes', + '--with-zlib=yes', + } + ) + info('Configure args are:\n\t-{}'.format('\n\t-'.join(config_args))) + + # Build freetype library with current_directory(self.get_build_dir(arch.arch)): configure = sh.Command('./configure') - shprint(configure, - '--host=arm-linux-androideabi', - '--prefix={}'.format(realpath('.')), - '--without-zlib', - '--with-png=no', - '--disable-shared', - _env=env) - shprint(sh.make, '-j5', _env=env) + shprint(configure, *config_args, _env=env) + shprint(sh.make, '-j', str(cpu_count()), _env=env) - shprint(sh.cp, 'objs/.libs/libfreetype.a', self.ctx.libs_dir) + if not with_harfbuzz and harfbuzz_in_recipes: + info('Installing freetype (first time build without harfbuzz)') + # First build, install the compiled lib, and clean build env + shprint(sh.make, 'install', _env=env) + shprint(sh.make, 'distclean', _env=env) + + def install_libraries(self, arch): + # This library it's special because the first time we built it may not + # generate the expected library, because it can depend on harfbuzz, so + # we will make sure to only install it when the library exists + if not exists(list(self.get_libraries(arch))[0]): + return + self.install_libs(arch, *self.get_libraries(arch)) recipe = FreetypeRecipe() diff --git a/p4a/pythonforandroid/recipes/genericndkbuild/__init__.py b/p4a/pythonforandroid/recipes/genericndkbuild/__init__.py index 2d1cdb0..901f208 100644 --- a/p4a/pythonforandroid/recipes/genericndkbuild/__init__.py +++ b/p4a/pythonforandroid/recipes/genericndkbuild/__init__.py @@ -1,3 +1,5 @@ +from os.path import join + from pythonforandroid.recipe import BootstrapNDKRecipe from pythonforandroid.toolchain import current_directory, shprint import sh @@ -7,15 +9,17 @@ class GenericNDKBuildRecipe(BootstrapNDKRecipe): version = None url = None - depends = [('python2', 'python3', 'python3crystax')] - conflicts = ['sdl2', 'pygame', 'sdl'] + depends = ['python3'] + conflicts = ['sdl2'] def should_build(self, arch): return True def get_recipe_env(self, arch=None, with_flags_in_cc=True, with_python=True): - env = super(GenericNDKBuildRecipe, self).get_recipe_env( - arch=arch, with_flags_in_cc=with_flags_in_cc, with_python=with_python) + env = super().get_recipe_env( + arch=arch, with_flags_in_cc=with_flags_in_cc, + with_python=with_python, + ) env['APP_ALLOW_MISSING_DEPS'] = 'true' return env @@ -23,7 +27,7 @@ class GenericNDKBuildRecipe(BootstrapNDKRecipe): env = self.get_recipe_env(arch) with current_directory(self.get_jni_dir()): - shprint(sh.ndk_build, "V=1", _env=env) + shprint(sh.Command(join(self.ctx.ndk_dir, "ndk-build")), "V=1", _env=env) recipe = GenericNDKBuildRecipe() diff --git a/p4a/pythonforandroid/recipes/gevent/__init__.py b/p4a/pythonforandroid/recipes/gevent/__init__.py index 5933fb3..7958a54 100644 --- a/p4a/pythonforandroid/recipes/gevent/__init__.py +++ b/p4a/pythonforandroid/recipes/gevent/__init__.py @@ -6,16 +6,17 @@ from pythonforandroid.recipe import CythonRecipe class GeventRecipe(CythonRecipe): version = '1.4.0' url = 'https://pypi.python.org/packages/source/g/gevent/gevent-{version}.tar.gz' - depends = ['librt', 'greenlet'] + depends = ['librt', 'setuptools'] patches = ["cross_compiling.patch"] def get_recipe_env(self, arch=None, with_flags_in_cc=True): """ - Moves all -I -D from CFLAGS to CPPFLAGS environment. - Moves all -l from LDFLAGS to LIBS environment. + - Copies all -l from LDLIBS to LIBS environment. - Fixes linker name (use cross compiler) and flags (appends LIBS) """ - env = super(GeventRecipe, self).get_recipe_env(arch, with_flags_in_cc) + env = super().get_recipe_env(arch, with_flags_in_cc) # CFLAGS may only be used to specify C compiler flags, for macro definitions use CPPFLAGS regex = re.compile(r'(?:\s|^)-[DI][\S]+') env['CPPFLAGS'] = ''.join(re.findall(regex, env['CFLAGS'])).strip() @@ -24,6 +25,7 @@ class GeventRecipe(CythonRecipe): # LDFLAGS may only be used to specify linker flags, for libraries use LIBS regex = re.compile(r'(?:\s|^)-l[\w\.]+') env['LIBS'] = ''.join(re.findall(regex, env['LDFLAGS'])).strip() + env['LIBS'] += ' {}'.format(''.join(re.findall(regex, env['LDLIBS'])).strip()) env['LDFLAGS'] = re.sub(regex, '', env['LDFLAGS']) info('Moved "{}" from LDFLAGS to LIBS.'.format(env['LIBS'])) return env diff --git a/p4a/pythonforandroid/recipes/groestlcoin_hash/__init__.py b/p4a/pythonforandroid/recipes/groestlcoin_hash/__init__.py index 62344f0..873ca61 100644 --- a/p4a/pythonforandroid/recipes/groestlcoin_hash/__init__.py +++ b/p4a/pythonforandroid/recipes/groestlcoin_hash/__init__.py @@ -2,9 +2,9 @@ from pythonforandroid.recipe import CythonRecipe class GroestlcoinHashRecipe(CythonRecipe): - version = '1.0.1' + version = '1.0.3' url = 'https://github.com/Groestlcoin/groestlcoin-hash-python/archive/{version}.tar.gz' - depends = [] + depends = ['setuptools'] cythonize = False diff --git a/p4a/pythonforandroid/recipes/harfbuzz/__init__.py b/p4a/pythonforandroid/recipes/harfbuzz/__init__.py index 32f4e51..fd1dbe9 100644 --- a/p4a/pythonforandroid/recipes/harfbuzz/__init__.py +++ b/p4a/pythonforandroid/recipes/harfbuzz/__init__.py @@ -1,39 +1,75 @@ -from pythonforandroid.toolchain import Recipe +from pythonforandroid.recipe import Recipe from pythonforandroid.util import current_directory from pythonforandroid.logger import shprint -from os.path import exists, join +from multiprocessing import cpu_count +from os.path import join import sh class HarfbuzzRecipe(Recipe): - version = '0.9.40' - url = 'http://www.freedesktop.org/software/harfbuzz/release/harfbuzz-{version}.tar.bz2' # noqa + """The harfbuzz library it's special, because has cyclic dependencies with + freetype library, so freetype can be build with harfbuzz support, and + harfbuzz can be build with freetype support. This complicates the build of + both recipes because in order to get the full set we need to compile those + recipes several times: + - build freetype without harfbuzz + - build harfbuzz with freetype + - build freetype with harfbuzz support - def should_build(self, arch): - if exists(join(self.get_build_dir(arch.arch), - 'src', '.libs', 'libharfbuzz.a')): - return False - return True + .. seealso:: + https://sourceforge.net/projects/freetype/files/freetype2/2.5.3/ + """ + + version = '2.6.4' + url = 'http://www.freedesktop.org/software/harfbuzz/release/harfbuzz-{version}.tar.xz' # noqa + opt_depends = ['freetype'] + built_libraries = {'libharfbuzz.so': 'src/.libs'} + + def get_recipe_env(self, arch=None): + env = super().get_recipe_env(arch) + if 'freetype' in self.ctx.recipe_build_order: + freetype = self.get_recipe('freetype', self.ctx) + freetype_install = join( + freetype.get_build_dir(arch.arch), 'install' + ) + # Explicitly tell harfbuzz's configure script that we want to + # use our freetype library or it won't be correctly detected + env['FREETYPE_CFLAGS'] = '-I{}/include/freetype2'.format( + freetype_install + ) + env['FREETYPE_LIBS'] = ' '.join( + ['-L{}/lib'.format(freetype_install), '-lfreetype'] + ) + return env def build_arch(self, arch): env = self.get_recipe_env(arch) - env['LDFLAGS'] = env['LDFLAGS'] + ' -L{}'.format( - self.ctx.get_libs_dir(arch.arch) + - '-L{}'.format(self.ctx.libs_dir)) + with current_directory(self.get_build_dir(arch.arch)): configure = sh.Command('./configure') - shprint(configure, '--without-icu', '--host=arm-linux=androideabi', - '--prefix={}'.format( - join(self.ctx.build_dir, 'python-install')), - '--without-freetype', - '--without-glib', - '--disable-shared', - _env=env) - shprint(sh.make, '-j5', _env=env) + shprint( + configure, + '--host={}'.format(arch.command_prefix), + '--prefix={}'.format(self.get_build_dir(arch.arch)), + '--with-freetype={}'.format( + 'yes' + if 'freetype' in self.ctx.recipe_build_order + else 'no' + ), + '--with-icu=no', + '--with-cairo=no', + '--with-fontconfig=no', + '--with-glib=no', + _env=env, + ) + shprint(sh.make, '-j', str(cpu_count()), _env=env) - shprint(sh.cp, '-L', join('src', '.libs', 'libharfbuzz.a'), - self.ctx.libs_dir) + if 'freetype' in self.ctx.recipe_build_order: + # Rebuild/install freetype with harfbuzz support + freetype = self.get_recipe('freetype', self.ctx) + freetype.build_arch(arch, with_harfbuzz=True) + freetype.install_libraries(arch) recipe = HarfbuzzRecipe() diff --git a/p4a/pythonforandroid/recipes/hostpython3/__init__.py b/p4a/pythonforandroid/recipes/hostpython3/__init__.py index 8b268bd..ef2324a 100644 --- a/p4a/pythonforandroid/recipes/hostpython3/__init__.py +++ b/p4a/pythonforandroid/recipes/hostpython3/__init__.py @@ -1,17 +1,144 @@ -from pythonforandroid.python import HostPythonRecipe +import sh +import os + +from multiprocessing import cpu_count +from pathlib import Path +from os.path import join + +from pythonforandroid.logger import shprint +from pythonforandroid.recipe import Recipe +from pythonforandroid.util import ( + BuildInterruptingException, + current_directory, + ensure_dir, +) +from pythonforandroid.prerequisites import OpenSSLPrerequisite + +HOSTPYTHON_VERSION_UNSET_MESSAGE = ( + 'The hostpython recipe must have set version' +) + +SETUP_DIST_NOT_FIND_MESSAGE = ( + 'Could not find Setup.dist or Setup in Python build' +) -class Hostpython3Recipe(HostPythonRecipe): +class HostPython3Recipe(Recipe): ''' The hostpython3's recipe. + .. versionchanged:: 2019.10.06.post0 + Refactored from deleted class ``python.HostPythonRecipe`` into here. + .. versionchanged:: 0.6.0 Refactored into the new class :class:`~pythonforandroid.python.HostPythonRecipe` ''' - version = '3.7.1' + + version = '3.9.9' name = 'hostpython3' - conflicts = ['hostpython2', 'hostpython3crystax'] + + build_subdir = 'native-build' + '''Specify the sub build directory for the hostpython3 recipe. Defaults + to ``native-build``.''' + + url = 'https://www.python.org/ftp/python/{version}/Python-{version}.tgz' + '''The default url to download our host python recipe. This url will + change depending on the python version set in attribute :attr:`version`.''' + + patches = ['patches/pyconfig_detection.patch'] + + @property + def _exe_name(self): + ''' + Returns the name of the python executable depending on the version. + ''' + if not self.version: + raise BuildInterruptingException(HOSTPYTHON_VERSION_UNSET_MESSAGE) + return f'python{self.version.split(".")[0]}' + + @property + def python_exe(self): + '''Returns the full path of the hostpython executable.''' + return join(self.get_path_to_python(), self._exe_name) + + def get_recipe_env(self, arch=None): + env = os.environ.copy() + openssl_prereq = OpenSSLPrerequisite() + if env.get("PKG_CONFIG_PATH", ""): + env["PKG_CONFIG_PATH"] = os.pathsep.join( + [openssl_prereq.pkg_config_location, env["PKG_CONFIG_PATH"]] + ) + else: + env["PKG_CONFIG_PATH"] = openssl_prereq.pkg_config_location + return env + + def should_build(self, arch): + if Path(self.python_exe).exists(): + # no need to build, but we must set hostpython for our Context + self.ctx.hostpython = self.python_exe + return False + return True + + def get_build_container_dir(self, arch=None): + choices = self.check_recipe_choices() + dir_name = '-'.join([self.name] + choices) + return join(self.ctx.build_dir, 'other_builds', dir_name, 'desktop') + + def get_build_dir(self, arch=None): + ''' + .. note:: Unlike other recipes, the hostpython build dir doesn't + depend on the target arch + ''' + return join(self.get_build_container_dir(), self.name) + + def get_path_to_python(self): + return join(self.get_build_dir(), self.build_subdir) + + def build_arch(self, arch): + env = self.get_recipe_env(arch) + + recipe_build_dir = self.get_build_dir(arch.arch) + + # Create a subdirectory to actually perform the build + build_dir = join(recipe_build_dir, self.build_subdir) + ensure_dir(build_dir) + + # Configure the build + with current_directory(build_dir): + if not Path('config.status').exists(): + shprint(sh.Command(join(recipe_build_dir, 'configure')), _env=env) + + with current_directory(recipe_build_dir): + # Create the Setup file. This copying from Setup.dist is + # the normal and expected procedure before Python 3.8, but + # after this the file with default options is already named "Setup" + setup_dist_location = join('Modules', 'Setup.dist') + if Path(setup_dist_location).exists(): + shprint(sh.cp, setup_dist_location, + join(build_dir, 'Modules', 'Setup')) + else: + # Check the expected file does exist + setup_location = join('Modules', 'Setup') + if not Path(setup_location).exists(): + raise BuildInterruptingException( + SETUP_DIST_NOT_FIND_MESSAGE + ) + + shprint(sh.make, '-j', str(cpu_count()), '-C', build_dir, _env=env) + + # make a copy of the python executable giving it the name we want, + # because we got different python's executable names depending on + # the fs being case-insensitive (Mac OS X, Cygwin...) or + # case-sensitive (linux)...so this way we will have an unique name + # for our hostpython, regarding the used fs + for exe_name in ['python.exe', 'python']: + exe = join(self.get_path_to_python(), exe_name) + if Path(exe).is_file(): + shprint(sh.cp, exe, self.python_exe) + break + + self.ctx.hostpython = self.python_exe -recipe = Hostpython3Recipe() +recipe = HostPython3Recipe() diff --git a/p4a/pythonforandroid/recipes/hostpython3/patches/pyconfig_detection.patch b/p4a/pythonforandroid/recipes/hostpython3/patches/pyconfig_detection.patch new file mode 100644 index 0000000..7f78b66 --- /dev/null +++ b/p4a/pythonforandroid/recipes/hostpython3/patches/pyconfig_detection.patch @@ -0,0 +1,13 @@ +diff -Nru Python-3.8.2/Lib/site.py Python-3.8.2-new/Lib/site.py +--- Python-3.8.2/Lib/site.py 2020-04-28 12:48:38.000000000 -0700 ++++ Python-3.8.2-new/Lib/site.py 2020-04-28 12:52:46.000000000 -0700 +@@ -487,7 +487,8 @@ + if key == 'include-system-site-packages': + system_site = value.lower() + elif key == 'home': +- sys._home = value ++ # this is breaking pyconfig.h path detection with venv ++ print('Ignoring "sys._home = value" override', file=sys.stderr) + + sys.prefix = sys.exec_prefix = site_prefix + diff --git a/p4a/pythonforandroid/recipes/icu/__init__.py b/p4a/pythonforandroid/recipes/icu/__init__.py index 4bb2de0..232939b 100644 --- a/p4a/pythonforandroid/recipes/icu/__init__.py +++ b/p4a/pythonforandroid/recipes/icu/__init__.py @@ -1,33 +1,57 @@ import sh import os -from os.path import join, isdir -from pythonforandroid.recipe import NDKRecipe +import platform +from os.path import join, isdir, exists +from multiprocessing import cpu_count +from pythonforandroid.recipe import Recipe from pythonforandroid.toolchain import shprint from pythonforandroid.util import current_directory, ensure_dir -class ICURecipe(NDKRecipe): +class ICURecipe(Recipe): name = 'icu4c' version = '57.1' - url = 'http://download.icu-project.org/files/icu4c/57.1/icu4c-57_1-src.tgz' + major_version = version.split('.')[0] + url = ( + "https://github.com/unicode-org/icu/releases/download/" + "release-{version_hyphen}/icu4c-{version_underscore}-src.tgz" + ) - depends = [('hostpython2', 'hostpython3')] # installs in python - generated_libraries = [ - 'libicui18n.so', 'libicuuc.so', 'libicudata.so', 'libicule.so'] + depends = ['hostpython3'] # installs in python + patches = ['disable-libs-version.patch'] - def get_lib_dir(self, arch): - lib_dir = join(self.ctx.get_python_install_dir(), "lib") - ensure_dir(lib_dir) - return lib_dir + built_libraries = { + 'libicui18n{}.so'.format(major_version): 'build_icu_android/lib', + 'libicuuc{}.so'.format(major_version): 'build_icu_android/lib', + 'libicudata{}.so'.format(major_version): 'build_icu_android/lib', + 'libicule{}.so'.format(major_version): 'build_icu_android/lib', + 'libicuio{}.so'.format(major_version): 'build_icu_android/lib', + 'libicutu{}.so'.format(major_version): 'build_icu_android/lib', + 'libiculx{}.so'.format(major_version): 'build_icu_android/lib', + } - def prepare_build_dir(self, arch): - if self.ctx.android_api > 19: - # greater versions do not have /usr/include/sys/exec_elf.h - raise RuntimeError("icu needs an android api <= 19") + @property + def versioned_url(self): + if self.url is None: + return None + return self.url.format( + version=self.version, + version_underscore=self.version.replace('.', '_'), + version_hyphen=self.version.replace('.', '-')) - super(ICURecipe, self).prepare_build_dir(arch) + def get_recipe_dir(self): + """ + .. note:: We need to overwrite `Recipe.get_recipe_dir` due to the + mismatch name between the recipe's folder (icu) and the value + of `ICURecipe.name` (icu4c). + """ + if self.ctx.local_recipes is not None: + local_recipe_dir = join(self.ctx.local_recipes, 'icu') + if exists(local_recipe_dir): + return local_recipe_dir + return join(self.ctx.root_dir, 'recipes', 'icu') - def build_arch(self, arch, *extra_args): + def build_arch(self, arch): env = self.get_recipe_env(arch).copy() build_root = self.get_build_dir(arch.arch) @@ -40,7 +64,7 @@ class ICURecipe(NDKRecipe): return build_dest, True icu_build = join(build_root, "icu_build") - build_linux, exists = make_build_dest("build_icu_linux") + build_host, exists = make_build_dest("build_icu_host") host_env = os.environ.copy() # reduce the function set @@ -51,102 +75,53 @@ class ICURecipe(NDKRecipe): "-DUCONFIG_NO_TRANSLITERATION=0 ") if not exists: + icu4c_host_platform = platform.system() + if icu4c_host_platform == "Darwin": + icu4c_host_platform = "MacOSX" configure = sh.Command( join(build_root, "source", "runConfigureICU")) - with current_directory(build_linux): + with current_directory(build_host): shprint( configure, - "Linux", + icu4c_host_platform, "--prefix="+icu_build, "--enable-extras=no", "--enable-strict=no", - "--enable-static", + "--enable-static=no", "--enable-tests=no", "--enable-samples=no", _env=host_env) - shprint(sh.make, "-j5", _env=host_env) + shprint(sh.make, "-j", str(cpu_count()), _env=host_env) shprint(sh.make, "install", _env=host_env) - build_android, exists = make_build_dest("build_icu_android") if not exists: - configure = sh.Command(join(build_root, "source", "configure")) - include = ( - " -I{ndk}/sources/cxx-stl/gnu-libstdc++/{version}/include/" - " -I{ndk}/sources/cxx-stl/gnu-libstdc++/{version}/libs/" - "{arch}/include") - include = include.format(ndk=self.ctx.ndk_dir, - version=env["TOOLCHAIN_VERSION"], - arch=arch.arch) - env["CPPFLAGS"] = env["CXXFLAGS"] + " " - env["CPPFLAGS"] += host_env["CPPFLAGS"] - env["CPPFLAGS"] += include - - lib = "{ndk}/sources/cxx-stl/gnu-libstdc++/{version}/libs/{arch}" - lib = lib.format(ndk=self.ctx.ndk_dir, - version=env["TOOLCHAIN_VERSION"], - arch=arch.arch) - env["LDFLAGS"] += " -lgnustl_shared -L"+lib - - env.pop("CFLAGS", None) - env.pop("CXXFLAGS", None) - with current_directory(build_android): shprint( configure, - "--with-cross-build="+build_linux, + "--with-cross-build="+build_host, "--enable-extras=no", "--enable-strict=no", - "--enable-static", + "--enable-static=no", "--enable-tests=no", "--enable-samples=no", - "--host="+env["TOOLCHAIN_PREFIX"], + "--host="+arch.command_prefix, "--prefix="+icu_build, _env=env) - shprint(sh.make, "-j5", _env=env) + shprint(sh.make, "-j", str(cpu_count()), _env=env) shprint(sh.make, "install", _env=env) - self.copy_files(arch) - - def copy_files(self, arch): - env = self.get_recipe_env(arch) - - lib = "{ndk}/sources/cxx-stl/gnu-libstdc++/{version}/libs/{arch}" - lib = lib.format(ndk=self.ctx.ndk_dir, - version=env["TOOLCHAIN_VERSION"], - arch=arch.arch) - stl_lib = join(lib, "libgnustl_shared.so") - dst_dir = join(self.ctx.get_site_packages_dir(), "..", "lib-dynload") - shprint(sh.cp, stl_lib, dst_dir) - - src_lib = join(self.get_build_dir(arch.arch), "icu_build", "lib") - dst_lib = self.get_lib_dir(arch) - - src_suffix = "." + self.version - dst_suffix = "." + self.version.split(".")[0] # main version - for lib in self.generated_libraries: - shprint(sh.cp, join(src_lib, lib+src_suffix), - join(dst_lib, lib+dst_suffix)) + def install_libraries(self, arch): + super().install_libraries(arch) src_include = join( self.get_build_dir(arch.arch), "icu_build", "include") dst_include = join( - self.ctx.get_python_install_dir(), "include", "icu") + self.ctx.get_python_install_dir(arch.arch), "include", "icu") ensure_dir(dst_include) shprint(sh.cp, "-r", join(src_include, "layout"), dst_include) shprint(sh.cp, "-r", join(src_include, "unicode"), dst_include) - # copy stl library - lib = "{ndk}/sources/cxx-stl/gnu-libstdc++/{version}/libs/{arch}" - lib = lib.format(ndk=self.ctx.ndk_dir, - version=env["TOOLCHAIN_VERSION"], - arch=arch.arch) - stl_lib = join(lib, "libgnustl_shared.so") - - dst_dir = join(self.ctx.get_python_install_dir(), "lib") - ensure_dir(dst_dir) - shprint(sh.cp, stl_lib, dst_dir) - recipe = ICURecipe() diff --git a/p4a/pythonforandroid/recipes/icu/disable-libs-version.patch b/p4a/pythonforandroid/recipes/icu/disable-libs-version.patch new file mode 100644 index 0000000..872abe0 --- /dev/null +++ b/p4a/pythonforandroid/recipes/icu/disable-libs-version.patch @@ -0,0 +1,66 @@ +diff -aur icu4c-org/source/config/Makefile.inc.in icu4c/source/config/Makefile.inc.in +--- icu/source/config/Makefile.inc.in.orig 2016-03-23 21:50:50.000000000 +0100 ++++ icu/source/config/Makefile.inc.in 2019-02-15 17:59:28.331749766 +0100 +@@ -142,8 +142,8 @@ + LDLIBRARYPATH_ENVVAR = LD_LIBRARY_PATH + + # Versioned target for a shared library +-FINAL_SO_TARGET = $(SO_TARGET).$(SO_TARGET_VERSION) +-MIDDLE_SO_TARGET = $(SO_TARGET).$(SO_TARGET_VERSION_MAJOR) ++FINAL_SO_TARGET = $(SO_TARGET).$(SO_TARGET_VERSION) ++MIDDLE_SO_TARGET = $(SO_TARGET) + + # Access to important ICU tools. + # Use as follows: $(INVOKE) $(GENRB) arguments .. +diff -aur icu4c-org/source/config/mh-linux icu4c/source/config/mh-linux +--- icu4c-org/source/config/mh-linux 2017-07-05 13:23:06.000000000 +0200 ++++ icu4c/source/config/mh-linux 2017-07-06 14:02:52.275016858 +0200 +@@ -24,9 +24,17 @@ + + ## Compiler switch to embed a library name + # The initial tab in the next line is to prevent icu-config from reading it. +- LD_SONAME = -Wl,-soname -Wl,$(notdir $(MIDDLE_SO_TARGET)) ++ LD_SONAME = -Wl,-soname -Wl,$(notdir $(SO_TARGET)) ++ DATA_STUBNAME = data$(SO_TARGET_VERSION_MAJOR) ++ COMMON_STUBNAME = uc$(SO_TARGET_VERSION_MAJOR) ++ I18N_STUBNAME = i18n$(SO_TARGET_VERSION_MAJOR) ++ LAYOUT_STUBNAME = le$(SO_TARGET_VERSION_MAJOR) ++ LAYOUTEX_STUBNAME = lx$(SO_TARGET_VERSION_MAJOR) ++ IO_STUBNAME = io$(SO_TARGET_VERSION_MAJOR) ++ TOOLUTIL_STUBNAME = tu$(SO_TARGET_VERSION_MAJOR) ++ CTESTFW_STUBNAME = test$(SO_TARGET_VERSION_MAJOR) + #SH# # We can't depend on MIDDLE_SO_TARGET being set. +-#SH# LD_SONAME= ++#SH# LD_SONAME=$(SO_TARGET) + + ## Shared library options + LD_SOOPTIONS= -Wl,-Bsymbolic +@@ -64,10 +64,10 @@ + + ## Versioned libraries rules + +-%.$(SO).$(SO_TARGET_VERSION_MAJOR): %.$(SO).$(SO_TARGET_VERSION) +- $(RM) $@ && ln -s ${ use libifaddrs instead ++if not hasattr(libc, 'getifaddrs'): ++ libc = ctypes.CDLL(ctypes.util.find_library('ifaddrs'), use_errno=True) ++ + def get_adapters(): + + addr0 = addr = ctypes.POINTER(ifaddrs)() diff --git a/p4a/pythonforandroid/recipes/ifaddrs/__init__.py b/p4a/pythonforandroid/recipes/ifaddrs/__init__.py index 47c0008..7d44f9c 100644 --- a/p4a/pythonforandroid/recipes/ifaddrs/__init__.py +++ b/p4a/pythonforandroid/recipes/ifaddrs/__init__.py @@ -10,7 +10,7 @@ from pythonforandroid.toolchain import current_directory class IFAddrRecipe(CompiledComponentsPythonRecipe): version = '8f9a87c' url = 'https://github.com/morristech/android-ifaddrs/archive/{version}.zip' - depends = [('hostpython2', 'hostpython3')] + depends = ['hostpython3'] call_hostpython_via_targetpython = False site_packages_name = 'ifaddrs' diff --git a/p4a/pythonforandroid/recipes/jedi/__init__.py b/p4a/pythonforandroid/recipes/jedi/__init__.py index 6338a52..17168e8 100644 --- a/p4a/pythonforandroid/recipes/jedi/__init__.py +++ b/p4a/pythonforandroid/recipes/jedi/__init__.py @@ -5,8 +5,6 @@ class JediRecipe(PythonRecipe): version = 'v0.9.0' url = 'https://github.com/davidhalter/jedi/archive/{version}.tar.gz' - depends = [('python2', 'python3crystax', 'python3')] - patches = ['fix_MergedNamesDict_get.patch'] # This apparently should be fixed in jedi 0.10 (not released to # pypi yet), but it still occurs on Android, I could not reproduce diff --git a/p4a/pythonforandroid/recipes/jpeg/__init__.py b/p4a/pythonforandroid/recipes/jpeg/__init__.py index 1969d2c..a81b825 100644 --- a/p4a/pythonforandroid/recipes/jpeg/__init__.py +++ b/p4a/pythonforandroid/recipes/jpeg/__init__.py @@ -1,9 +1,7 @@ from pythonforandroid.recipe import Recipe from pythonforandroid.logger import shprint from pythonforandroid.util import current_directory -from os.path import join, exists -from os import environ, uname -from glob import glob +from os.path import join import sh @@ -16,15 +14,11 @@ class JpegRecipe(Recipe): name = 'jpeg' version = '2.0.1' url = 'https://github.com/libjpeg-turbo/libjpeg-turbo/archive/{version}.tar.gz' # noqa + built_libraries = {'libjpeg.a': '.', 'libturbojpeg.a': '.'} # we will require this below patch to build the shared library # patches = ['remove-version.patch'] - def should_build(self, arch): - return not exists(join(self.get_build_dir(arch.arch), - 'libturbojpeg.a')) - def build_arch(self, arch): - super(JpegRecipe, self).build_arch(arch) build_dir = self.get_build_dir(arch.arch) # TODO: Fix simd/neon @@ -36,14 +30,12 @@ class JpegRecipe(Recipe): shprint(sh.rm, '-f', 'CMakeCache.txt', 'CMakeFiles/') shprint(sh.cmake, '-G', 'Unix Makefiles', '-DCMAKE_SYSTEM_NAME=Android', - '-DCMAKE_SYSTEM_PROCESSOR={cpu}'.format(cpu='arm'), '-DCMAKE_POSITION_INDEPENDENT_CODE=1', '-DCMAKE_ANDROID_ARCH_ABI={arch}'.format(arch=arch.arch), '-DCMAKE_ANDROID_NDK=' + self.ctx.ndk_dir, - '-DCMAKE_C_COMPILER={toolchain}/bin/clang'.format( - toolchain=env['TOOLCHAIN']), - '-DCMAKE_CXX_COMPILER={toolchain}/bin/clang++'.format( - toolchain=env['TOOLCHAIN']), + '-DCMAKE_C_COMPILER={cc}'.format(cc=arch.get_clang_exe()), + '-DCMAKE_CXX_COMPILER={cc_plus}'.format( + cc_plus=arch.get_clang_exe(plus_plus=True)), '-DCMAKE_BUILD_TYPE=Release', '-DCMAKE_INSTALL_PREFIX=./install', '-DCMAKE_TOOLCHAIN_FILE=' + toolchain_file, @@ -59,20 +51,5 @@ class JpegRecipe(Recipe): _env=env) shprint(sh.make, _env=env) - # copy static libs to libs collection - for lib in glob(join(build_dir, '*.a')): - shprint(sh.cp, '-L', lib, self.ctx.libs_dir) - - def get_recipe_env(self, arch=None, with_flags_in_cc=False, clang=True): - env = environ.copy() - - build_platform = '{system}-{machine}'.format( - system=uname()[0], machine=uname()[-1]).lower() - env['TOOLCHAIN'] = join(self.ctx.ndk_dir, 'toolchains/llvm/' - 'prebuilt/{build_platform}'.format( - build_platform=build_platform)) - - return env - recipe = JpegRecipe() diff --git a/p4a/pythonforandroid/recipes/kivy/__init__.py b/p4a/pythonforandroid/recipes/kivy/__init__.py index d21107f..bc9041a 100644 --- a/p4a/pythonforandroid/recipes/kivy/__init__.py +++ b/p4a/pythonforandroid/recipes/kivy/__init__.py @@ -1,20 +1,41 @@ +import glob +from os.path import basename, exists, join +import sys +import packaging.version + +import sh from pythonforandroid.recipe import CythonRecipe from pythonforandroid.toolchain import current_directory, shprint -from os.path import exists, join, basename -import sh -import glob + + +def is_kivy_affected_by_deadlock_issue(recipe=None, arch=None): + with current_directory(join(recipe.get_build_dir(arch.arch), "kivy")): + kivy_version = shprint( + sh.Command(sys.executable), + "-c", + "import _version; print(_version.__version__)", + ) + + return packaging.version.parse( + str(kivy_version) + ) < packaging.version.Version("2.2.0.dev0") class KivyRecipe(CythonRecipe): - # post kivy==1.10.1, `fixes SDL2 image loading (jpg)` - version = 'c4d6894' + version = '2.1.0' url = 'https://github.com/kivy/kivy/archive/{version}.zip' name = 'kivy' - depends = [('sdl2', 'pygame'), 'pyjnius'] + depends = ['sdl2', 'pyjnius', 'setuptools'] + python_depends = ['certifi'] + + # sdl-gl-swapwindow-nogil.patch is needed to avoid a deadlock. + # See: https://github.com/kivy/kivy/pull/8025 + # WARNING: Remove this patch when a new Kivy version is released. + patches = [("sdl-gl-swapwindow-nogil.patch", is_kivy_affected_by_deadlock_issue)] def cythonize_build(self, env, build_dir='.'): - super(KivyRecipe, self).cythonize_build(env, build_dir=build_dir) + super().cythonize_build(env, build_dir=build_dir) if not exists(join(build_dir, 'kivy', 'include')): return @@ -35,19 +56,22 @@ class KivyRecipe(CythonRecipe): do_not_cythonize = ['window_x11.pyx', ] if basename(filename) in do_not_cythonize: return - super(KivyRecipe, self).cythonize_file(env, build_dir, filename) + super().cythonize_file(env, build_dir, filename) def get_recipe_env(self, arch): - env = super(KivyRecipe, self).get_recipe_env(arch) + env = super().get_recipe_env(arch) + # NDKPLATFORM is our switch for detecting Android platform, so can't be None + env['NDKPLATFORM'] = "NOTNONE" if 'sdl2' in self.ctx.recipe_build_order: env['USE_SDL2'] = '1' env['KIVY_SPLIT_EXAMPLES'] = '1' + sdl2_mixer_recipe = self.get_recipe('sdl2_mixer', self.ctx) env['KIVY_SDL2_PATH'] = ':'.join([ join(self.ctx.bootstrap.build_dir, 'jni', 'SDL', 'include'), join(self.ctx.bootstrap.build_dir, 'jni', 'SDL2_image'), - join(self.ctx.bootstrap.build_dir, 'jni', 'SDL2_mixer'), + *sdl2_mixer_recipe.get_include_dirs(arch), join(self.ctx.bootstrap.build_dir, 'jni', 'SDL2_ttf'), - ]) + ]) return env diff --git a/p4a/pythonforandroid/recipes/kivy/sdl-gl-swapwindow-nogil.patch b/p4a/pythonforandroid/recipes/kivy/sdl-gl-swapwindow-nogil.patch new file mode 100644 index 0000000..8a7c33a --- /dev/null +++ b/p4a/pythonforandroid/recipes/kivy/sdl-gl-swapwindow-nogil.patch @@ -0,0 +1,32 @@ +diff --git a/kivy/core/window/_window_sdl2.pyx b/kivy/core/window/_window_sdl2.pyx +index 46e15ec63..5002cd0f9 100644 +--- a/kivy/core/window/_window_sdl2.pyx ++++ b/kivy/core/window/_window_sdl2.pyx +@@ -746,7 +746,13 @@ cdef class _WindowSDL2Storage: + pass + + def flip(self): +- SDL_GL_SwapWindow(self.win) ++ # On Android (and potentially other platforms), SDL_GL_SwapWindow may ++ # lock the thread waiting for a mutex from another thread to be ++ # released. Calling SDL_GL_SwapWindow with the GIL released allow the ++ # other thread to run (e.g. to process the event filter callback) and ++ # release the mutex SDL_GL_SwapWindow is waiting for. ++ with nogil: ++ SDL_GL_SwapWindow(self.win) + + def save_bytes_in_png(self, filename, data, int width, int height): + cdef SDL_Surface *surface = SDL_CreateRGBSurfaceFrom( +diff --git a/kivy/lib/sdl2.pxi b/kivy/lib/sdl2.pxi +index 6a539de6d..3a5a69d23 100644 +--- a/kivy/lib/sdl2.pxi ++++ b/kivy/lib/sdl2.pxi +@@ -627,7 +627,7 @@ cdef extern from "SDL.h": + cdef SDL_GLContext SDL_GL_GetCurrentContext() + cdef int SDL_GL_SetSwapInterval(int interval) + cdef int SDL_GL_GetSwapInterval() +- cdef void SDL_GL_SwapWindow(SDL_Window * window) ++ cdef void SDL_GL_SwapWindow(SDL_Window * window) nogil + cdef void SDL_GL_DeleteContext(SDL_GLContext context) + + cdef int SDL_NumJoysticks() diff --git a/p4a/pythonforandroid/recipes/kivy3/__init__.py b/p4a/pythonforandroid/recipes/kivy3/__init__.py new file mode 100644 index 0000000..6f27f62 --- /dev/null +++ b/p4a/pythonforandroid/recipes/kivy3/__init__.py @@ -0,0 +1,21 @@ +from pythonforandroid.recipe import PythonRecipe +import shutil + + +class Kivy3Recipe(PythonRecipe): + version = 'master' + url = 'https://github.com/kivy/kivy3/archive/{version}.zip' + + depends = ['kivy'] + site_packages_name = 'kivy3' + + '''Due to setuptools.''' + call_hostpython_via_targetpython = False + + def build_arch(self, arch): + super().build_arch(arch) + suffix = '/kivy3/default.glsl' + shutil.copyfile(self.get_build_dir(arch.arch) + suffix, self.ctx.get_python_install_dir(arch.arch) + suffix) + + +recipe = Kivy3Recipe() diff --git a/p4a/pythonforandroid/recipes/kiwisolver/__init__.py b/p4a/pythonforandroid/recipes/kiwisolver/__init__.py index ae6fa17..587c2b9 100644 --- a/p4a/pythonforandroid/recipes/kiwisolver/__init__.py +++ b/p4a/pythonforandroid/recipes/kiwisolver/__init__.py @@ -3,9 +3,9 @@ from pythonforandroid.recipe import CppCompiledComponentsPythonRecipe class KiwiSolverRecipe(CppCompiledComponentsPythonRecipe): site_packages_name = 'kiwisolver' - version = '0.1.3' - url = 'https://github.com/nucleic/kiwi/archive/master.zip' - depends = ['setuptools'] + version = '1.3.2' + url = 'https://github.com/nucleic/kiwi/archive/{version}.zip' + depends = ['cppy'] recipe = KiwiSolverRecipe() diff --git a/p4a/pythonforandroid/recipes/lapack/__init__.py b/p4a/pythonforandroid/recipes/lapack/__init__.py new file mode 100644 index 0000000..ae20e69 --- /dev/null +++ b/p4a/pythonforandroid/recipes/lapack/__init__.py @@ -0,0 +1,79 @@ +''' +known to build with cmake version 3.23.2 and NDK r21e. +See https://gitlab.kitware.com/cmake/cmake/-/issues/18739 +''' + +from pythonforandroid.recipe import Recipe +from pythonforandroid.logger import shprint +from pythonforandroid.util import current_directory, ensure_dir, BuildInterruptingException +from multiprocessing import cpu_count +from os.path import join +import sh +import shutil +from os import environ +from pythonforandroid.util import build_platform + +arch_to_sysroot = {'armeabi': 'arm', 'armeabi-v7a': 'arm', 'arm64-v8a': 'arm64'} + + +def arch_to_toolchain(arch): + if 'arm' in arch.arch: + return arch.command_prefix + return arch.arch + + +class LapackRecipe(Recipe): + + name = 'lapack' + version = 'v3.10.1' + url = 'https://github.com/Reference-LAPACK/lapack/archive/{version}.tar.gz' + libdir = 'build/install/lib' + built_libraries = {'libblas.so': libdir, 'liblapack.so': libdir, 'libcblas.so': libdir} + + def get_recipe_env(self, arch): + env = super().get_recipe_env(arch) + + ndk_dir = environ.get("LEGACY_NDK") + if ndk_dir is None: + raise BuildInterruptingException("Please set the environment variable 'LEGACY_NDK' to point to a NDK location with gcc/gfortran support (supported NDK version: 'r21e')") + + GCC_VER = '4.9' + HOST = build_platform + + sysroot_suffix = arch_to_sysroot.get(arch.arch, arch.arch) + sysroot = f"{ndk_dir}/platforms/{env['NDK_API']}/arch-{sysroot_suffix}" + FC = f"{ndk_dir}/toolchains/{arch_to_toolchain(arch)}-{GCC_VER}/prebuilt/{HOST}/bin/{arch.command_prefix}-gfortran" + env['FC'] = f'{FC} --sysroot={sysroot}' + if shutil.which(FC) is None: + raise BuildInterruptingException(f"{FC} not found. See https://github.com/mzakharo/android-gfortran") + return env + + def build_arch(self, arch): + source_dir = self.get_build_dir(arch.arch) + build_target = join(source_dir, 'build') + install_target = join(build_target, 'install') + + ensure_dir(build_target) + with current_directory(build_target): + env = self.get_recipe_env(arch) + ndk_dir = environ["LEGACY_NDK"] + shprint(sh.rm, '-rf', 'CMakeFiles/', 'CMakeCache.txt', _env=env) + opts = [ + '-DCMAKE_SYSTEM_NAME=Android', + '-DCMAKE_POSITION_INDEPENDENT_CODE=1', + '-DCMAKE_ANDROID_ARCH_ABI={arch}'.format(arch=arch.arch), + '-DCMAKE_ANDROID_NDK=' + ndk_dir, + '-DCMAKE_ANDROID_API={api}'.format(api=self.ctx.ndk_api), + '-DCMAKE_BUILD_TYPE=Release', + '-DCMAKE_INSTALL_PREFIX={}'.format(install_target), + '-DCBLAS=ON', + '-DBUILD_SHARED_LIBS=ON', + ] + if arch.arch == 'armeabi-v7a': + opts.append('-DCMAKE_ANDROID_ARM_NEON=ON') + shprint(sh.cmake, source_dir, *opts, _env=env) + shprint(sh.make, '-j' + str(cpu_count()), _env=env) + shprint(sh.make, 'install', _env=env) + + +recipe = LapackRecipe() diff --git a/p4a/pythonforandroid/recipes/leveldb/__init__.py b/p4a/pythonforandroid/recipes/leveldb/__init__.py index e7ebe71..7f65a55 100644 --- a/p4a/pythonforandroid/recipes/leveldb/__init__.py +++ b/p4a/pythonforandroid/recipes/leveldb/__init__.py @@ -1,47 +1,46 @@ -from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory +from pythonforandroid.logger import shprint +from pythonforandroid.util import current_directory +from pythonforandroid.recipe import Recipe +from multiprocessing import cpu_count from os.path import join import sh class LevelDBRecipe(Recipe): - version = '1.18' - url = 'https://github.com/google/leveldb/archive/v{version}.tar.gz' - opt_depends = ['snappy'] - patches = ['disable-so-version.patch', 'find-snappy.patch'] - - def should_build(self, arch): - return not self.has_libs(arch, 'libleveldb.so', 'libgnustl_shared.so') + version = '1.22' + url = 'https://github.com/google/leveldb/archive/{version}.tar.gz' + depends = ['snappy'] + built_libraries = {'libleveldb.so': '.'} + need_stl_shared = True def build_arch(self, arch): - super(LevelDBRecipe, self).build_arch(arch) env = self.get_recipe_env(arch) - with current_directory(self.get_build_dir(arch.arch)): - if 'snappy' in recipe.ctx.recipe_build_order: - # Copy source from snappy recipe - sh.cp('-rf', self.get_recipe('snappy', self.ctx).get_build_dir(arch.arch), 'snappy') - # Build - shprint(sh.make, _env=env) - # Copy the shared library - shutil.copyfile('libleveldb.so', join(self.ctx.get_libs_dir(arch.arch), 'libleveldb.so')) - # Copy stl - shutil.copyfile(self.ctx.ndk_dir + '/sources/cxx-stl/gnu-libstdc++/' + self.ctx.toolchain_version + '/libs/' + arch.arch + '/libgnustl_shared.so', - join(self.ctx.get_libs_dir(arch.arch), 'libgnustl_shared.so')) + source_dir = self.get_build_dir(arch.arch) + with current_directory(source_dir): + snappy_recipe = self.get_recipe('snappy', self.ctx) + snappy_build = snappy_recipe.get_build_dir(arch.arch) - def get_recipe_env(self, arch): - env = super(LevelDBRecipe, self).get_recipe_env(arch) - env['TARGET_OS'] = 'OS_ANDROID_CROSSCOMPILE' - if 'snappy' in recipe.ctx.recipe_build_order: - env['CFLAGS'] += ' -DSNAPPY' + \ - ' -I./snappy' - env['CFLAGS'] += ' -I' + self.ctx.ndk_dir + '/platforms/android-' + str(self.ctx.android_api) + '/arch-' + arch.arch.replace('eabi', '') + '/usr/include' + \ - ' -I' + self.ctx.ndk_dir + '/sources/cxx-stl/gnu-libstdc++/' + self.ctx.toolchain_version + '/include' + \ - ' -I' + self.ctx.ndk_dir + '/sources/cxx-stl/gnu-libstdc++/' + self.ctx.toolchain_version + '/libs/' + arch.arch + '/include' - env['CXXFLAGS'] = env['CFLAGS'] - env['CXXFLAGS'] += ' -frtti' - env['CXXFLAGS'] += ' -fexceptions' - env['LDFLAGS'] += ' -L' + self.ctx.ndk_dir + '/sources/cxx-stl/gnu-libstdc++/' + self.ctx.toolchain_version + '/libs/' + arch.arch + \ - ' -lgnustl_shared' - return env + shprint(sh.cmake, source_dir, + '-DANDROID_ABI={}'.format(arch.arch), + '-DANDROID_NATIVE_API_LEVEL={}'.format(self.ctx.ndk_api), + '-DANDROID_STL=' + self.stl_lib_name, + + '-DCMAKE_TOOLCHAIN_FILE={}'.format( + join(self.ctx.ndk_dir, 'build', 'cmake', + 'android.toolchain.cmake')), + '-DCMAKE_BUILD_TYPE=Release', + + '-DBUILD_SHARED_LIBS=1', + + '-DHAVE_SNAPPY=1', + '-DCMAKE_CXX_FLAGS=-I{path}'.format(path=snappy_build), + '-DCMAKE_SHARED_LINKER_FLAGS=-L{path} -lsnappy'.format( + path=snappy_build), + '-DCMAKE_EXE_LINKER_FLAGS=-L{path} -lsnappy'.format( + path=snappy_build), + + _env=env) + shprint(sh.make, '-j' + str(cpu_count()), _env=env) recipe = LevelDBRecipe() diff --git a/p4a/pythonforandroid/recipes/libbz2/__init__.py b/p4a/pythonforandroid/recipes/libbz2/__init__.py new file mode 100644 index 0000000..01d5146 --- /dev/null +++ b/p4a/pythonforandroid/recipes/libbz2/__init__.py @@ -0,0 +1,57 @@ +import sh + +from multiprocessing import cpu_count + +from pythonforandroid.archs import Arch +from pythonforandroid.logger import shprint +from pythonforandroid.recipe import Recipe +from pythonforandroid.util import current_directory + + +class LibBz2Recipe(Recipe): + + version = "1.0.8" + url = "https://sourceware.org/pub/bzip2/bzip2-{version}.tar.gz" + built_libraries = {"libbz2.so": ""} + patches = ["lib_android.patch"] + + def build_arch(self, arch: Arch) -> None: + env = self.get_recipe_env(arch) + with current_directory(self.get_build_dir(arch.arch)): + shprint( + sh.make, + "-j", + str(cpu_count()), + f'CC={env["CC"]}', + "-f", + "Makefile-libbz2_so", + _env=env, + ) + + def get_library_includes(self, arch: Arch) -> str: + """ + Returns a string with the appropriate `-I` to link + with the bz2 lib. This string is usually added to the environment + variable `CPPFLAGS`. + """ + return " -I" + self.get_build_dir(arch.arch) + + def get_library_ldflags(self, arch: Arch) -> str: + """ + Returns a string with the appropriate `-L` to link + with the bz2 lib. This string is usually added to the environment + variable `LDFLAGS`. + """ + return " -L" + self.get_build_dir(arch.arch) + + @staticmethod + def get_library_libs_flag() -> str: + """ + Returns a string with the appropriate `-l` flags to link with + the bz2 lib. This string is usually added to the environment + variable `LIBS`. + """ + return " -lbz2" + + +recipe = LibBz2Recipe() diff --git a/p4a/pythonforandroid/recipes/libbz2/lib_android.patch b/p4a/pythonforandroid/recipes/libbz2/lib_android.patch new file mode 100644 index 0000000..b208896 --- /dev/null +++ b/p4a/pythonforandroid/recipes/libbz2/lib_android.patch @@ -0,0 +1,29 @@ +Set default compiler to `clang` and disable versioned shared library +--- bzip2-1.0.8/Makefile-libbz2_so.orig 2019-07-13 19:50:05.000000000 +0200 ++++ bzip2-1.0.8/Makefile-libbz2_so 2020-03-13 20:10:32.336990786 +0100 +@@ -22,7 +22,7 @@ + + + SHELL=/bin/sh +-CC=gcc ++CC=clang + BIGFILES=-D_FILE_OFFSET_BITS=64 + CFLAGS=-fpic -fPIC -Wall -Winline -O2 -g $(BIGFILES) + +@@ -35,13 +35,11 @@ OBJS= blocksort.o \ + bzlib.o + + all: $(OBJS) +- $(CC) -shared -Wl,-soname -Wl,libbz2.so.1.0 -o libbz2.so.1.0.8 $(OBJS) +- $(CC) $(CFLAGS) -o bzip2-shared bzip2.c libbz2.so.1.0.8 +- rm -f libbz2.so.1.0 +- ln -s libbz2.so.1.0.8 libbz2.so.1.0 ++ $(CC) -shared -Wl,-soname=libbz2.so -o libbz2.so $(OBJS) ++ $(CC) $(CFLAGS) -o bzip2-shared bzip2.c libbz2.so + + clean: +- rm -f $(OBJS) bzip2.o libbz2.so.1.0.8 libbz2.so.1.0 bzip2-shared ++ rm -f $(OBJS) bzip2.o libbz2.so bzip2-shared + + blocksort.o: blocksort.c + $(CC) $(CFLAGS) -c blocksort.c diff --git a/p4a/pythonforandroid/recipes/libcurl/__init__.py b/p4a/pythonforandroid/recipes/libcurl/__init__.py index e8cc860..2971532 100644 --- a/p4a/pythonforandroid/recipes/libcurl/__init__.py +++ b/p4a/pythonforandroid/recipes/libcurl/__init__.py @@ -1,40 +1,37 @@ import sh -from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory -from os.path import exists, join +from pythonforandroid.recipe import Recipe +from pythonforandroid.util import current_directory +from pythonforandroid.logger import shprint +from os.path import join from multiprocessing import cpu_count class LibcurlRecipe(Recipe): version = '7.55.1' url = 'https://curl.haxx.se/download/curl-7.55.1.tar.gz' + built_libraries = {'libcurl.so': 'dist/lib'} depends = ['openssl'] - def should_build(self, arch): - super(LibcurlRecipe, self).should_build(arch) - return not exists(join(self.ctx.get_libs_dir(arch.arch), 'libcurl.so')) - def build_arch(self, arch): - super(LibcurlRecipe, self).build_arch(arch) env = self.get_recipe_env(arch) - r = self.get_recipe('openssl', self.ctx) - openssl_dir = r.get_build_dir(arch.arch) + openssl_recipe = self.get_recipe('openssl', self.ctx) + openssl_dir = openssl_recipe.get_build_dir(arch.arch) + + env['LDFLAGS'] += openssl_recipe.link_dirs_flags(arch) + env['LIBS'] = env.get('LIBS', '') + openssl_recipe.link_libs_flags() with current_directory(self.get_build_dir(arch.arch)): dst_dir = join(self.get_build_dir(arch.arch), 'dist') shprint( sh.Command('./configure'), - '--host=arm-linux-androideabi', + '--host={}'.format(arch.command_prefix), '--enable-shared', '--with-ssl={}'.format(openssl_dir), '--prefix={}'.format(dst_dir), _env=env) shprint(sh.make, '-j', str(cpu_count()), _env=env) shprint(sh.make, 'install', _env=env) - shutil.copyfile('{}/lib/libcurl.so'.format(dst_dir), - join( - self.ctx.get_libs_dir(arch.arch), - 'libcurl.so')) recipe = LibcurlRecipe() diff --git a/p4a/pythonforandroid/recipes/libexpat/__init__.py b/p4a/pythonforandroid/recipes/libexpat/__init__.py index ecf5265..614b0df 100644 --- a/p4a/pythonforandroid/recipes/libexpat/__init__.py +++ b/p4a/pythonforandroid/recipes/libexpat/__init__.py @@ -1,38 +1,32 @@ import sh -from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory -from os.path import exists, join +from pythonforandroid.recipe import Recipe +from pythonforandroid.util import current_directory +from pythonforandroid.logger import shprint +from os.path import join from multiprocessing import cpu_count class LibexpatRecipe(Recipe): version = 'master' url = 'https://github.com/libexpat/libexpat/archive/{version}.zip' + built_libraries = {'libexpat.so': 'dist/lib'} depends = [] - def should_build(self, arch): - super(LibexpatRecipe, self).should_build(arch) - return not exists( - join(self.ctx.get_libs_dir(arch.arch), 'libexpat.so')) - def build_arch(self, arch): - super(LibexpatRecipe, self).build_arch(arch) env = self.get_recipe_env(arch) with current_directory(join(self.get_build_dir(arch.arch), 'expat')): dst_dir = join(self.get_build_dir(arch.arch), 'dist') shprint(sh.Command('./buildconf.sh'), _env=env) shprint( sh.Command('./configure'), - '--host=arm-linux-androideabi', + '--host={}'.format(arch.command_prefix), '--enable-shared', '--without-xmlwf', '--prefix={}'.format(dst_dir), _env=env) shprint(sh.make, '-j', str(cpu_count()), _env=env) shprint(sh.make, 'install', _env=env) - shutil.copyfile( - '{}/lib/libexpat.so'.format(dst_dir), - join(self.ctx.get_libs_dir(arch.arch), 'libexpat.so')) recipe = LibexpatRecipe() diff --git a/p4a/pythonforandroid/recipes/libffi/__init__.py b/p4a/pythonforandroid/recipes/libffi/__init__.py index 31ed9c6..767881b 100644 --- a/p4a/pythonforandroid/recipes/libffi/__init__.py +++ b/p4a/pythonforandroid/recipes/libffi/__init__.py @@ -2,7 +2,7 @@ from os.path import exists, join from multiprocessing import cpu_count from pythonforandroid.recipe import Recipe from pythonforandroid.logger import shprint -from pythonforandroid.util import current_directory, ensure_dir +from pythonforandroid.util import current_directory import sh @@ -14,17 +14,12 @@ class LibffiRecipe(Recipe): - `libltdl-dev` which defines the `LT_SYS_SYMBOL_USCORE` macro """ name = 'libffi' - version = '3.2.1' - url = 'https://github.com/libffi/libffi/archive/v{version}.tar.gz' + version = 'v3.4.2' + url = 'https://github.com/libffi/libffi/archive/{version}.tar.gz' - patches = ['remove-version-info.patch', - # This patch below is already included into libffi's master - # branch and included in the pre-release 3.3rc0...so we should - # remove this when we update the version number for libffi - 'fix-includedir.patch'] + patches = ['remove-version-info.patch'] - def should_build(self, arch): - return not exists(join(self.ctx.get_libs_dir(arch.arch), 'libffi.so')) + built_libraries = {'libffi.so': '.libs'} def build_arch(self, arch): env = self.get_recipe_env(arch) @@ -37,15 +32,8 @@ class LibffiRecipe(Recipe): '--prefix=' + self.get_build_dir(arch.arch), '--disable-builddir', '--enable-shared', _env=env) - shprint(sh.make, '-j', str(cpu_count()), 'libffi.la', _env=env) - host_build = self.get_build_dir(arch.arch) - ensure_dir(self.ctx.get_libs_dir(arch.arch)) - shprint(sh.cp, - join(host_build, '.libs', 'libffi.so'), - self.ctx.get_libs_dir(arch.arch)) - def get_include_dirs(self, arch): return [join(self.get_build_dir(arch.arch), 'include')] diff --git a/p4a/pythonforandroid/recipes/libffi/remove-version-info.patch b/p4a/pythonforandroid/recipes/libffi/remove-version-info.patch index 7bdc11a..0a32b7e 100644 --- a/p4a/pythonforandroid/recipes/libffi/remove-version-info.patch +++ b/p4a/pythonforandroid/recipes/libffi/remove-version-info.patch @@ -1,12 +1,11 @@ -diff -Naur libffi/Makefile.am b/Makefile.am ---- libffi/Makefile.am 2014-11-12 06:00:59.000000000 -0600 -+++ b/Makefile.am 2015-12-23 15:57:10.363148806 -0600 -@@ -249,7 +249,7 @@ - AM_CFLAGS += -DFFI_DEBUG - endif - --libffi_la_LDFLAGS = -no-undefined -version-info `grep -v '^\#' $(srcdir)/libtool-version` $(LTLDFLAGS) $(AM_LTLDFLAGS) +--- libffi/Makefile.am.orig 2018-12-21 16:11:26.159181262 +0100 ++++ libffi/Makefile.am 2018-12-21 16:14:44.075179374 +0100 +@@ -156,7 +156,7 @@ + libffi.map: $(top_srcdir)/libffi.map.in + $(COMPILE) -D$(TARGET) -E -x assembler-with-cpp -o $@ $< + +-libffi_la_LDFLAGS = -no-undefined $(libffi_version_info) $(libffi_version_script) $(LTLDFLAGS) $(AM_LTLDFLAGS) +libffi_la_LDFLAGS = -no-undefined -avoid-version $(LTLDFLAGS) $(AM_LTLDFLAGS) - + libffi_la_DEPENDENCIES = $(libffi_la_LIBADD) $(libffi_version_dep) + AM_CPPFLAGS = -I. -I$(top_srcdir)/include -Iinclude -I$(top_srcdir)/src - AM_CCASFLAGS = $(AM_CPPFLAGS) diff --git a/p4a/pythonforandroid/recipes/libgeos/__init__.py b/p4a/pythonforandroid/recipes/libgeos/__init__.py index 30786f8..cff9fe0 100644 --- a/p4a/pythonforandroid/recipes/libgeos/__init__.py +++ b/p4a/pythonforandroid/recipes/libgeos/__init__.py @@ -1,44 +1,52 @@ -from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory -from os.path import exists, join -import sh +from pythonforandroid.util import current_directory, ensure_dir +from pythonforandroid.toolchain import shprint +from pythonforandroid.recipe import Recipe from multiprocessing import cpu_count +from os.path import join +import sh class LibgeosRecipe(Recipe): - version = '3.5' - # url = 'http://download.osgeo.org/geos/geos-{version}.tar.bz2' - url = 'https://github.com/libgeos/libgeos/archive/svn-{version}.zip' + version = '3.7.1' + url = 'https://github.com/libgeos/libgeos/archive/{version}.zip' depends = [] - - def should_build(self, arch): - super(LibgeosRecipe, self).should_build(arch) - return not exists(join(self.ctx.get_libs_dir(arch.arch), 'libgeos_c.so')) + built_libraries = { + 'libgeos.so': 'install_target/lib', + 'libgeos_c.so': 'install_target/lib' + } + need_stl_shared = True def build_arch(self, arch): - super(LibgeosRecipe, self).build_arch(arch) - env = self.get_recipe_env(arch) + source_dir = self.get_build_dir(arch.arch) + build_target = join(source_dir, 'build_target') + install_target = join(source_dir, 'install_target') - with current_directory(self.get_build_dir(arch.arch)): - dst_dir = join(self.get_build_dir(arch.arch), 'dist') - bash = sh.Command('bash') - print("If this fails make sure you have autoconf and libtool installed") - shprint(bash, 'autogen.sh') # Requires autoconf and libtool - shprint(bash, 'configure', '--host=arm-linux-androideabi', '--enable-shared', '--prefix={}'.format(dst_dir), _env=env) - shprint(sh.make, '-j', str(cpu_count()), _env=env) + ensure_dir(build_target) + with current_directory(build_target): + env = self.get_recipe_env(arch) + shprint(sh.cmake, source_dir, + '-DANDROID_ABI={}'.format(arch.arch), + '-DANDROID_NATIVE_API_LEVEL={}'.format(self.ctx.ndk_api), + '-DANDROID_STL=' + self.stl_lib_name, + + '-DCMAKE_TOOLCHAIN_FILE={}'.format( + join(self.ctx.ndk_dir, 'build', 'cmake', + 'android.toolchain.cmake')), + '-DCMAKE_INSTALL_PREFIX={}'.format(install_target), + '-DCMAKE_BUILD_TYPE=Release', + + '-DGEOS_ENABLE_TESTS=OFF', + + '-DBUILD_SHARED_LIBS=1', + + _env=env) + shprint(sh.make, '-j' + str(cpu_count()), _env=env) + + # We make the install because this way we will have all the + # includes in one place (mostly we are interested in `geos_c.h`, + # which is not in the include folder, so this way we make easier to + # link with this library...case of shapely's recipe) shprint(sh.make, 'install', _env=env) - shutil.copyfile('{}/lib/libgeos_c.so'.format(dst_dir), join(self.ctx.get_libs_dir(arch.arch), 'libgeos_c.so')) - - def get_recipe_env(self, arch): - env = super(LibgeosRecipe, self).get_recipe_env(arch) - env['CXXFLAGS'] += ' -I{}/sources/cxx-stl/gnu-libstdc++/4.8/include'.format(self.ctx.ndk_dir) - env['CXXFLAGS'] += ' -I{}/sources/cxx-stl/gnu-libstdc++/4.8/libs/{}/include'.format( - self.ctx.ndk_dir, arch) - env['CXXFLAGS'] += ' -L{}/sources/cxx-stl/gnu-libstdc++/4.8/libs/{}'.format( - self.ctx.ndk_dir, arch) - env['CXXFLAGS'] += ' -lgnustl_shared' - env['LDFLAGS'] += ' -L{}/sources/cxx-stl/gnu-libstdc++/4.8/libs/{}'.format( - self.ctx.ndk_dir, arch) - return env recipe = LibgeosRecipe() diff --git a/p4a/pythonforandroid/recipes/libglob/__init__.py b/p4a/pythonforandroid/recipes/libglob/__init__.py index e0fccfe..f63db42 100644 --- a/p4a/pythonforandroid/recipes/libglob/__init__.py +++ b/p4a/pythonforandroid/recipes/libglob/__init__.py @@ -3,13 +3,13 @@ available via '-lglob' LDFLAG """ from os.path import exists, join -from pythonforandroid.recipe import CompiledComponentsPythonRecipe +from pythonforandroid.recipe import Recipe from pythonforandroid.toolchain import current_directory from pythonforandroid.logger import info, shprint import sh -class LibGlobRecipe(CompiledComponentsPythonRecipe): +class LibGlobRecipe(Recipe): """Make a glob.h and glob.so for the python_install_dir()""" version = '0.0.1' url = None @@ -20,8 +20,9 @@ class LibGlobRecipe(CompiledComponentsPythonRecipe): # https://raw.githubusercontent.com/white-gecko/TokyoCabinet/master/glob.c # and pushed in via patch name = 'libglob' + built_libraries = {'libglob.so': '.'} - depends = [('hostpython2', 'hostpython3')] + depends = ['hostpython3'] patches = ['glob.patch'] def should_build(self, arch): @@ -60,7 +61,6 @@ class LibGlobRecipe(CompiledComponentsPythonRecipe): cflags.extend(['-shared', '-I.', 'glob.o', '-o', 'libglob.so']) cflags.extend(env['LDFLAGS'].split()) shprint(cc, *cflags, _env=env) - shprint(sh.cp, 'libglob.so', join(self.ctx.libs_dir, arch.arch)) recipe = LibGlobRecipe() diff --git a/p4a/pythonforandroid/recipes/libglob/glob.patch b/p4a/pythonforandroid/recipes/libglob/glob.patch index c7fe817..ee71719 100644 --- a/p4a/pythonforandroid/recipes/libglob/glob.patch +++ b/p4a/pythonforandroid/recipes/libglob/glob.patch @@ -911,7 +911,7 @@ diff -Nur /tmp/x/glob.c libglob/glob.c diff -Nur /tmp/x/glob.h libglob/glob.h --- /tmp/x/glob.h 1969-12-31 19:00:00.000000000 -0500 +++ libglob/glob.h 2017-08-19 15:22:18.367109399 -0400 -@@ -0,0 +1,102 @@ +@@ -0,0 +1,104 @@ +/* + * Copyright (c) 1989, 1993 + * The Regents of the University of California. All rights reserved. @@ -952,10 +952,12 @@ diff -Nur /tmp/x/glob.h libglob/glob.h + +#include +#include ++#ifndef ARG_MAX +#define ARG_MAX 6553 ++#endif + +#ifndef _SIZE_T_DECLARED -+typedef __size_t size_t; ++#include +#define _SIZE_T_DECLARED +#endif + diff --git a/p4a/pythonforandroid/recipes/libiconv/__init__.py b/p4a/pythonforandroid/recipes/libiconv/__init__.py index 4a64669..1cdcb91 100644 --- a/p4a/pythonforandroid/recipes/libiconv/__init__.py +++ b/p4a/pythonforandroid/recipes/libiconv/__init__.py @@ -1,5 +1,5 @@ -import os -from pythonforandroid.toolchain import shprint, current_directory +from pythonforandroid.logger import shprint +from pythonforandroid.util import current_directory from pythonforandroid.recipe import Recipe from multiprocessing import cpu_count import sh @@ -7,28 +7,21 @@ import sh class LibIconvRecipe(Recipe): - version = '1.15' + version = '1.16' url = 'https://ftp.gnu.org/pub/gnu/libiconv/libiconv-{version}.tar.gz' - patches = ['libiconv-1.15-no-gets.patch'] - - def should_build(self, arch): - return not os.path.exists( - os.path.join(self.ctx.get_libs_dir(arch.arch), 'libiconv.so')) + built_libraries = {'libiconv.so': 'lib/.libs'} def build_arch(self, arch): - super(LibIconvRecipe, self).build_arch(arch) env = self.get_recipe_env(arch) with current_directory(self.get_build_dir(arch.arch)): shprint( sh.Command('./configure'), - '--host=' + arch.toolchain_prefix, - '--prefix=' + self.ctx.get_python_install_dir(), + '--host=' + arch.command_prefix, + '--prefix=' + self.ctx.get_python_install_dir(arch.arch), _env=env) shprint(sh.make, '-j' + str(cpu_count()), _env=env) - libs = ['lib/.libs/libiconv.so'] - self.install_libs(arch, *libs) recipe = LibIconvRecipe() diff --git a/p4a/pythonforandroid/recipes/liblzma/__init__.py b/p4a/pythonforandroid/recipes/liblzma/__init__.py new file mode 100644 index 0000000..0b880bc --- /dev/null +++ b/p4a/pythonforandroid/recipes/liblzma/__init__.py @@ -0,0 +1,77 @@ +import sh + +from multiprocessing import cpu_count +from os.path import exists, join + +from pythonforandroid.archs import Arch +from pythonforandroid.logger import shprint +from pythonforandroid.recipe import Recipe +from pythonforandroid.util import current_directory + + +class LibLzmaRecipe(Recipe): + + version = '5.2.4' + url = 'https://tukaani.org/xz/xz-{version}.tar.gz' + built_libraries = {'liblzma.so': 'p4a_install/lib'} + + def build_arch(self, arch: Arch) -> None: + env = self.get_recipe_env(arch) + install_dir = join(self.get_build_dir(arch.arch), 'p4a_install') + with current_directory(self.get_build_dir(arch.arch)): + if not exists('configure'): + shprint(sh.Command('./autogen.sh'), _env=env) + shprint(sh.Command('autoreconf'), '-vif', _env=env) + shprint(sh.Command('./configure'), + '--host=' + arch.command_prefix, + '--prefix=' + install_dir, + '--disable-builddir', + '--disable-static', + '--enable-shared', + + '--disable-xz', + '--disable-xzdec', + '--disable-lzmadec', + '--disable-lzmainfo', + '--disable-scripts', + '--disable-doc', + + _env=env) + shprint( + sh.make, '-j', str(cpu_count()), + _env=env + ) + + shprint(sh.make, 'install', _env=env) + + def get_library_includes(self, arch: Arch) -> str: + """ + Returns a string with the appropriate `-I` to link + with the lzma lib. This string is usually added to the environment + variable `CPPFLAGS`. + """ + return " -I" + join( + self.get_build_dir(arch.arch), 'p4a_install', 'include', + ) + + def get_library_ldflags(self, arch: Arch) -> str: + """ + Returns a string with the appropriate `-L` to link + with the lzma lib. This string is usually added to the environment + variable `LDFLAGS`. + """ + return " -L" + join( + self.get_build_dir(arch.arch), self.built_libraries['liblzma.so'], + ) + + @staticmethod + def get_library_libs_flag() -> str: + """ + Returns a string with the appropriate `-l` flags to link with + the lzma lib. This string is usually added to the environment + variable `LIBS`. + """ + return " -llzma" + + +recipe = LibLzmaRecipe() diff --git a/p4a/pythonforandroid/recipes/libmysqlclient/__init__.py b/p4a/pythonforandroid/recipes/libmysqlclient/__init__.py index 9235ad4..31ebd3c 100644 --- a/p4a/pythonforandroid/recipes/libmysqlclient/__init__.py +++ b/p4a/pythonforandroid/recipes/libmysqlclient/__init__.py @@ -38,7 +38,7 @@ class LibmysqlclientRecipe(Recipe): self.install_libs(arch, join('libmysql', 'libmysql.so')) # def get_recipe_env(self, arch=None): - # env = super(LibmysqlclientRecipe, self).get_recipe_env(arch) + # env = super().get_recipe_env(arch) # env['WITHOUT_SERVER'] = 'ON' # ncurses = self.get_recipe('ncurses', self) # # env['CFLAGS'] += ' -I' + join(ncurses.get_build_dir(arch.arch), diff --git a/p4a/pythonforandroid/recipes/libogg/__init__.py b/p4a/pythonforandroid/recipes/libogg/__init__.py index 064189e..875dd7f 100644 --- a/p4a/pythonforandroid/recipes/libogg/__init__.py +++ b/p4a/pythonforandroid/recipes/libogg/__init__.py @@ -1,26 +1,22 @@ -from pythonforandroid.recipe import NDKRecipe +from pythonforandroid.recipe import Recipe from pythonforandroid.toolchain import current_directory, shprint -from os.path import join import sh -class OggRecipe(NDKRecipe): +class OggRecipe(Recipe): version = '1.3.3' url = 'http://downloads.xiph.org/releases/ogg/libogg-{version}.tar.gz' - - generated_libraries = ['libogg.so'] + built_libraries = {'libogg.so': 'src/.libs'} def build_arch(self, arch): with current_directory(self.get_build_dir(arch.arch)): env = self.get_recipe_env(arch) flags = [ - '--with-sysroot=' + self.ctx.ndk_platform, - '--host=' + arch.toolchain_prefix, + '--host=' + arch.command_prefix, ] configure = sh.Command('./configure') shprint(configure, *flags, _env=env) shprint(sh.make, _env=env) - self.install_libs(arch, join('src', '.libs', 'libogg.so')) recipe = OggRecipe() diff --git a/p4a/pythonforandroid/recipes/libpcre/__init__.py b/p4a/pythonforandroid/recipes/libpcre/__init__.py new file mode 100644 index 0000000..ddf005e --- /dev/null +++ b/p4a/pythonforandroid/recipes/libpcre/__init__.py @@ -0,0 +1,31 @@ +from pythonforandroid.recipe import Recipe +from pythonforandroid.util import current_directory +from pythonforandroid.logger import shprint +import sh +from multiprocessing import cpu_count +from os.path import join + + +class LibpcreRecipe(Recipe): + version = '8.44' + url = 'https://ftp.pcre.org/pub/pcre/pcre-{version}.tar.bz2' + + built_libraries = {'libpcre.so': '.libs'} + + def build_arch(self, arch): + env = self.get_recipe_env(arch) + + with current_directory(self.get_build_dir(arch.arch)): + shprint( + sh.Command('./configure'), + *'''--host=arm-linux-androideabi + --disable-cpp --enable-jit --enable-utf8 + --enable-unicode-properties'''.split(), + _env=env) + shprint(sh.make, '-j', str(cpu_count()), _env=env) + + def get_lib_dir(self, arch): + return join(self.get_build_dir(arch), '.libs') + + +recipe = LibpcreRecipe() diff --git a/p4a/pythonforandroid/recipes/libpq/__init__.py b/p4a/pythonforandroid/recipes/libpq/__init__.py index 45c296a..1faed7c 100644 --- a/p4a/pythonforandroid/recipes/libpq/__init__.py +++ b/p4a/pythonforandroid/recipes/libpq/__init__.py @@ -4,10 +4,16 @@ import os.path class LibpqRecipe(Recipe): - version = '9.5.3' + version = '10.12' url = 'http://ftp.postgresql.org/pub/source/v{version}/postgresql-{version}.tar.bz2' depends = [] + def get_recipe_env(self, arch): + env = super().get_recipe_env(arch) + env['USE_DEV_URANDOM'] = '1' + + return env + def should_build(self, arch): return not os.path.isfile('{}/libpq.a'.format(self.ctx.get_libs_dir(arch.arch))) diff --git a/p4a/pythonforandroid/recipes/librt/__init__.py b/p4a/pythonforandroid/recipes/librt/__init__.py index 9eb56b3..6c42490 100644 --- a/p4a/pythonforandroid/recipes/librt/__init__.py +++ b/p4a/pythonforandroid/recipes/librt/__init__.py @@ -18,11 +18,8 @@ class LibRt(Recipe): libc, so we create a symbolic link which we will remove when our build finishes''' - @property - def libc_path(self): - return join(self.ctx.ndk_platform, 'usr', 'lib', 'libc') - def build_arch(self, arch): + libc_path = join(arch.ndk_lib_dir_versioned, 'libc') # Create a temporary folder to add to link path with a fake librt.so: fake_librt_temp_folder = join( self.get_build_dir(arch.arch), @@ -35,13 +32,13 @@ class LibRt(Recipe): if exists(join(fake_librt_temp_folder, "librt.so")): remove(join(fake_librt_temp_folder, "librt.so")) shprint(sh.ln, '-sf', - self.libc_path + '.so', + libc_path + '.so', join(fake_librt_temp_folder, "librt.so"), ) if exists(join(fake_librt_temp_folder, "librt.a")): remove(join(fake_librt_temp_folder, "librt.a")) shprint(sh.ln, '-sf', - self.libc_path + '.a', + libc_path + '.a', join(fake_librt_temp_folder, "librt.a"), ) diff --git a/p4a/pythonforandroid/recipes/libsecp256k1/__init__.py b/p4a/pythonforandroid/recipes/libsecp256k1/__init__.py index a855257..f3a2772 100644 --- a/p4a/pythonforandroid/recipes/libsecp256k1/__init__.py +++ b/p4a/pythonforandroid/recipes/libsecp256k1/__init__.py @@ -1,4 +1,5 @@ -from pythonforandroid.toolchain import shprint, current_directory +from pythonforandroid.logger import shprint +from pythonforandroid.util import current_directory from pythonforandroid.recipe import Recipe from multiprocessing import cpu_count from os.path import exists @@ -7,26 +8,25 @@ import sh class LibSecp256k1Recipe(Recipe): + built_libraries = {'libsecp256k1.so': '.libs'} + url = 'https://github.com/bitcoin-core/secp256k1/archive/master.zip' def build_arch(self, arch): - super(LibSecp256k1Recipe, self).build_arch(arch) env = self.get_recipe_env(arch) with current_directory(self.get_build_dir(arch.arch)): if not exists('configure'): shprint(sh.Command('./autogen.sh'), _env=env) shprint( sh.Command('./configure'), - '--host=' + arch.toolchain_prefix, - '--prefix=' + self.ctx.get_python_install_dir(), + '--host=' + arch.command_prefix, + '--prefix=' + self.ctx.get_python_install_dir(arch.arch), '--enable-shared', '--enable-module-recovery', '--enable-experimental', '--enable-module-ecdh', _env=env) shprint(sh.make, '-j' + str(cpu_count()), _env=env) - libs = ['.libs/libsecp256k1.so'] - self.install_libs(arch, *libs) recipe = LibSecp256k1Recipe() diff --git a/p4a/pythonforandroid/recipes/libshine/__init__.py b/p4a/pythonforandroid/recipes/libshine/__init__.py index fe9b5b5..32fa9e1 100644 --- a/p4a/pythonforandroid/recipes/libshine/__init__.py +++ b/p4a/pythonforandroid/recipes/libshine/__init__.py @@ -1,5 +1,8 @@ -from pythonforandroid.toolchain import Recipe, current_directory, shprint -from os.path import exists, join, realpath +from pythonforandroid.recipe import Recipe +from pythonforandroid.util import current_directory +from pythonforandroid.logger import shprint +from multiprocessing import cpu_count +from os.path import realpath import sh @@ -7,9 +10,15 @@ class LibShineRecipe(Recipe): version = 'c72aba9031bde18a0995e7c01c9b53f2e08a0e46' url = 'https://github.com/toots/shine/archive/{version}.zip' - def should_build(self, arch): - build_dir = self.get_build_dir(arch.arch) - return not exists(join(build_dir, 'lib', 'libshine.a')) + built_libraries = {'libshine.so': 'lib'} + + def get_recipe_env(self, arch=None, with_flags_in_cc=True): + env = super().get_recipe_env(arch, with_flags_in_cc) + # technically, libraries should go to `LDLIBS`, but it seems + # that libshine doesn't like so, and it will fail on linking stage + env['LDLIBS'] = env['LDLIBS'].replace(' -lm', '') + env['LDFLAGS'] += ' -lm' + return env def build_arch(self, arch): with current_directory(self.get_build_dir(arch.arch)): @@ -17,13 +26,13 @@ class LibShineRecipe(Recipe): shprint(sh.Command('./bootstrap')) configure = sh.Command('./configure') shprint(configure, - '--host=arm-linux', + f'--host={arch.command_prefix}', '--enable-pic', - '--disable-shared', - '--enable-static', - '--prefix={}'.format(realpath('.')), + '--disable-static', + '--enable-shared', + f'--prefix={realpath(".")}', _env=env) - shprint(sh.make, '-j4', _env=env) + shprint(sh.make, '-j', str(cpu_count()), _env=env) shprint(sh.make, 'install', _env=env) diff --git a/p4a/pythonforandroid/recipes/libsodium/__init__.py b/p4a/pythonforandroid/recipes/libsodium/__init__.py index 9911e36..f66fc18 100644 --- a/p4a/pythonforandroid/recipes/libsodium/__init__.py +++ b/p4a/pythonforandroid/recipes/libsodium/__init__.py @@ -1,5 +1,7 @@ -from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory -from os.path import exists, join +from pythonforandroid.recipe import Recipe +from pythonforandroid.util import current_directory +from pythonforandroid.logger import shprint +from multiprocessing import cpu_count import sh @@ -8,22 +10,24 @@ class LibsodiumRecipe(Recipe): url = 'https://github.com/jedisct1/libsodium/releases/download/{version}/libsodium-{version}.tar.gz' depends = [] patches = ['size_max_fix.patch'] - - def should_build(self, arch): - super(LibsodiumRecipe, self).should_build(arch) - return not exists(join(self.ctx.get_libs_dir(arch.arch), 'libsodium.so')) + built_libraries = {'libsodium.so': 'src/libsodium/.libs'} def build_arch(self, arch): - super(LibsodiumRecipe, self).build_arch(arch) env = self.get_recipe_env(arch) with current_directory(self.get_build_dir(arch.arch)): bash = sh.Command('bash') - shprint(bash, 'configure', '--disable-soname-versions', '--host=arm-linux-androideabi', '--enable-shared', _env=env) - shprint(sh.make, _env=env) - shutil.copyfile('src/libsodium/.libs/libsodium.so', join(self.ctx.get_libs_dir(arch.arch), 'libsodium.so')) + shprint( + bash, + 'configure', + '--disable-soname-versions', + '--host={}'.format(arch.command_prefix), + '--enable-shared', + _env=env, + ) + shprint(sh.make, '-j', str(cpu_count()), _env=env) def get_recipe_env(self, arch): - env = super(LibsodiumRecipe, self).get_recipe_env(arch) + env = super().get_recipe_env(arch) env['CFLAGS'] += ' -Os' return env diff --git a/p4a/pythonforandroid/recipes/libtorrent/__init__.py b/p4a/pythonforandroid/recipes/libtorrent/__init__.py index c73bb02..24f9408 100644 --- a/p4a/pythonforandroid/recipes/libtorrent/__init__.py +++ b/p4a/pythonforandroid/recipes/libtorrent/__init__.py @@ -5,8 +5,8 @@ from os import listdir, walk import sh # This recipe builds libtorrent with Python bindings -# It depends on Boost.Build and the source of several Boost libraries present in BOOST_ROOT, -# which is all provided by the boost recipe +# It depends on Boost.Build and the source of several Boost libraries present +# in BOOST_ROOT, which is all provided by the boost recipe def get_lib_from(search_directory, lib_extension='.so'): @@ -24,7 +24,8 @@ def get_lib_from(search_directory, lib_extension='.so'): class LibtorrentRecipe(Recipe): # Todo: make recipe compatible with all p4a architectures ''' - .. note:: This recipe can be built only against API 21+ and arch armeabi-v7a + .. note:: This recipe can be built only against API 21+ and an android + ndk >= r19 .. versionchanged:: 0.6.0 Rewrote recipe to support clang's build and boost 1.68. The following @@ -33,9 +34,14 @@ class LibtorrentRecipe(Recipe): - Bumped version number to 1.2.0 - added python 3 compatibility - new system to detect/copy generated libraries + + .. versionchanged:: 2019.08.09.1.dev0 + + - Bumped version number to 1.2.1 + - Adapted to work with ndk-r19+ ''' - version = '1_2_0' - url = 'https://github.com/arvidn/libtorrent/archive/libtorrent_{version}.tar.gz' + version = '1_2_1' + url = 'https://github.com/arvidn/libtorrent/archive/libtorrent-{version}.tar.gz' depends = ['boost'] opt_depends = ['openssl'] @@ -55,14 +61,14 @@ class LibtorrentRecipe(Recipe): self.ctx.has_package('libtorrent', arch.arch)) def prebuild_arch(self, arch): - super(LibtorrentRecipe, self).prebuild_arch(arch) + super().prebuild_arch(arch) if 'openssl' in recipe.ctx.recipe_build_order: # Patch boost user-config.jam to use openssl self.get_recipe('boost', self.ctx).apply_patch( join(self.get_recipe_dir(), 'user-config-openssl.patch'), arch.arch) def build_arch(self, arch): - super(LibtorrentRecipe, self).build_arch(arch) + super().build_arch(arch) env = self.get_recipe_env(arch) env['PYTHON_HOST'] = self.ctx.hostpython @@ -76,7 +82,7 @@ class LibtorrentRecipe(Recipe): '-j' + str(cpu_count()), '--debug-configuration', # so we know if our python is detected # '--deprecated-functions=off', - 'toolset=clang-arm', + 'toolset=clang-{arch}'.format(arch=env['ARCH']), 'abi=aapcs', 'binary-format=elf', 'cxxflags=-std=c++11', @@ -105,8 +111,12 @@ class LibtorrentRecipe(Recipe): # Copy only the boost shared libraries into the libs folder. Because # boost build two boost_python libraries, we force to search the lib # into the corresponding build path. - b2_build_dir = 'build/clang-linux-arm/release/{encryption}/' \ - 'lt-visibility-hidden/'.format(encryption=crypto_folder) + b2_build_dir = ( + 'build/clang-linux-{arch}/release/{encryption}/' + 'lt-visibility-hidden/'.format( + arch=env['ARCH'], encryption=crypto_folder + ) + ) boost_libs_dir = join(env['BOOST_BUILD_PATH'], 'bin.v2/libs') for boost_lib in listdir(boost_libs_dir): lib_path = get_lib_from(join(boost_libs_dir, boost_lib, b2_build_dir)) @@ -122,7 +132,7 @@ class LibtorrentRecipe(Recipe): python_libtorrent = get_lib_from(join(build_dir, 'bindings/python/bin')) shutil.copyfile(python_libtorrent, - join(self.ctx.get_site_packages_dir(arch.arch), 'libtorrent.so')) + join(self.ctx.get_site_packages_dir(arch), 'libtorrent.so')) def get_recipe_env(self, arch): # Use environment from boost recipe, cause we use b2 tool from boost diff --git a/p4a/pythonforandroid/recipes/libtorrent/setup-lib-name.patch b/p4a/pythonforandroid/recipes/libtorrent/setup-lib-name.patch index 183705c..4b688be 100644 --- a/p4a/pythonforandroid/recipes/libtorrent/setup-lib-name.patch +++ b/p4a/pythonforandroid/recipes/libtorrent/setup-lib-name.patch @@ -15,6 +15,6 @@ setup( - name='python-libtorrent', + name='libtorrent', - version='1.2.0', + version='1.2.1', author='Arvid Norberg', author_email='arvid@libtorrent.org', diff --git a/p4a/pythonforandroid/recipes/libvorbis/__init__.py b/p4a/pythonforandroid/recipes/libvorbis/__init__.py index 87c7a44..bbbca6f 100644 --- a/p4a/pythonforandroid/recipes/libvorbis/__init__.py +++ b/p4a/pythonforandroid/recipes/libvorbis/__init__.py @@ -12,7 +12,7 @@ class VorbisRecipe(NDKRecipe): generated_libraries = ['libvorbis.so', 'libvorbisfile.so', 'libvorbisenc.so'] def get_recipe_env(self, arch=None): - env = super(VorbisRecipe, self).get_recipe_env(arch) + env = super().get_recipe_env(arch) ogg = self.get_recipe('libogg', self.ctx) env['CFLAGS'] += ' -I{}'.format(join(ogg.get_build_dir(arch.arch), 'include')) return env @@ -21,8 +21,7 @@ class VorbisRecipe(NDKRecipe): with current_directory(self.get_build_dir(arch.arch)): env = self.get_recipe_env(arch) flags = [ - '--with-sysroot=' + self.ctx.ndk_platform, - '--host=' + arch.toolchain_prefix, + '--host=' + arch.command_prefix, ] configure = sh.Command('./configure') shprint(configure, *flags, _env=env) diff --git a/p4a/pythonforandroid/recipes/libvpx/__init__.py b/p4a/pythonforandroid/recipes/libvpx/__init__.py new file mode 100644 index 0000000..0173e36 --- /dev/null +++ b/p4a/pythonforandroid/recipes/libvpx/__init__.py @@ -0,0 +1,59 @@ +from pythonforandroid.recipe import Recipe +from pythonforandroid.toolchain import current_directory, shprint +from os.path import join, realpath +from multiprocessing import cpu_count +import sh + + +TARGETS = { + 'armeabi-v7a': 'armv7-android-gcc', + 'arm64-v8a': 'arm64-android-gcc', + 'x86': 'x86-android-gcc', + 'x86_64': 'x86_64-android-gcc', +} + + +class VPXRecipe(Recipe): + version = '1.11.0' + url = 'https://github.com/webmproject/libvpx/archive/v{version}.tar.gz' + + patches = [ + # See https://git.io/Jq50q + join('patches', '0001-android-force-neon-runtime.patch'), + ] + + def get_recipe_env(self, arch=None): + env = super().get_recipe_env(arch) + env['CXXFLAGS'] += f' -I{self.ctx.ndk.libcxx_include_dir}' + return env + + def build_arch(self, arch): + with current_directory(self.get_build_dir(arch.arch)): + env = self.get_recipe_env(arch) + flags = [ + '--target=' + TARGETS[arch.arch], + '--enable-pic', + '--enable-vp8', + '--enable-vp9', + '--enable-static', + '--enable-small', + '--disable-shared', + '--disable-examples', + '--disable-unit-tests', + '--disable-tools', + '--disable-docs', + '--disable-install-docs', + '--disable-realtime-only', + f'--prefix={realpath(".")}', + ] + + if arch.arch == 'armeabi-v7a': + flags.append('--disable-neon-asm') + + configure = sh.Command('./configure') + shprint(configure, *flags, _env=env) + shprint(sh.make, '-j', str(cpu_count()), _env=env) + shprint(sh.make, 'install', _env=env) + + +recipe = VPXRecipe() diff --git a/p4a/pythonforandroid/recipes/libvpx/patches/0001-android-force-neon-runtime.patch b/p4a/pythonforandroid/recipes/libvpx/patches/0001-android-force-neon-runtime.patch new file mode 100644 index 0000000..220800d --- /dev/null +++ b/p4a/pythonforandroid/recipes/libvpx/patches/0001-android-force-neon-runtime.patch @@ -0,0 +1,25 @@ +diff -u -r ../libvpx-1.6.1/vpx_ports/arm_cpudetect.c ./vpx_ports/arm_cpudetect.c +--- ../libvpx-1.6.1/vpx_ports/arm_cpudetect.c 2017-01-12 21:27:27.000000000 +0100 ++++ ./vpx_ports/arm_cpudetect.c 2017-01-29 23:55:05.399283897 +0100 +@@ -92,20 +92,17 @@ + } + + #elif defined(__ANDROID__) /* end _MSC_VER */ +-#include + + int arm_cpu_caps(void) { + int flags; + int mask; +- uint64_t features; + if (!arm_cpu_env_flags(&flags)) { + return flags; + } + mask = arm_cpu_env_mask(); +- features = android_getCpuFeatures(); + + #if HAVE_NEON || HAVE_NEON_ASM +- if (features & ANDROID_CPU_ARM_FEATURE_NEON) flags |= HAS_NEON; ++ flags |= HAS_NEON; + #endif /* HAVE_NEON || HAVE_NEON_ASM */ + return flags & mask; + } diff --git a/p4a/pythonforandroid/recipes/libwebp/__init__.py b/p4a/pythonforandroid/recipes/libwebp/__init__.py new file mode 100644 index 0000000..aacd485 --- /dev/null +++ b/p4a/pythonforandroid/recipes/libwebp/__init__.py @@ -0,0 +1,50 @@ +from multiprocessing import cpu_count +from os.path import join + +import sh + +from pythonforandroid.util import current_directory, ensure_dir +from pythonforandroid.toolchain import shprint +from pythonforandroid.recipe import Recipe + + +class LibwebpRecipe(Recipe): + version = '1.1.0' + url = 'https://storage.googleapis.com/downloads.webmproject.org/releases/webp/libwebp-{version}.tar.gz' # noqa + depends = [] + built_libraries = { + 'libwebp.so': 'installation/lib', + 'libwebpdecoder.so': 'installation/lib', + 'libwebpdemux.so': 'installation/lib', + 'libwebpmux.so': 'installation/lib', + } + + def build_arch(self, arch): + source_dir = self.get_build_dir(arch.arch) + build_dir = join(source_dir, 'build') + install_dir = join(source_dir, 'installation') + toolchain_file = join( + self.ctx.ndk_dir, 'build', 'cmake', 'android.toolchain.cmake', + ) + + ensure_dir(build_dir) + with current_directory(build_dir): + env = self.get_recipe_env(arch) + shprint(sh.cmake, source_dir, + f'-DANDROID_ABI={arch.arch}', + f'-DANDROID_NATIVE_API_LEVEL={self.ctx.ndk_api}', + + f'-DCMAKE_TOOLCHAIN_FILE={toolchain_file}', + f'-DCMAKE_INSTALL_PREFIX={install_dir}', + '-DCMAKE_BUILD_TYPE=Release', + + '-DBUILD_SHARED_LIBS=1', + + _env=env) + shprint(sh.make, '-j' + str(cpu_count()), _env=env) + # We make the install because this way we will have + # all the includes and libraries in one place + shprint(sh.make, 'install', _env=env) + + +recipe = LibwebpRecipe() diff --git a/p4a/pythonforandroid/recipes/libx264/__init__.py b/p4a/pythonforandroid/recipes/libx264/__init__.py index c139b4c..6341309 100644 --- a/p4a/pythonforandroid/recipes/libx264/__init__.py +++ b/p4a/pythonforandroid/recipes/libx264/__init__.py @@ -1,31 +1,29 @@ -from pythonforandroid.toolchain import Recipe, current_directory, shprint -from os.path import exists, join, realpath +from pythonforandroid.recipe import Recipe +from pythonforandroid.util import current_directory +from pythonforandroid.logger import shprint +from multiprocessing import cpu_count +from os.path import realpath import sh class LibX264Recipe(Recipe): - version = 'x264-snapshot-20171218-2245-stable' # using mirror url since can't use ftp - url = 'http://mirror.yandex.ru/mirrors/ftp.videolan.org/x264/snapshots/{version}.tar.bz2' - - def should_build(self, arch): - build_dir = self.get_build_dir(arch.arch) - return not exists(join(build_dir, 'lib', 'libx264.a')) + version = '5db6aa6cab1b146e07b60cc1736a01f21da01154' # commit of latest known stable version + url = 'https://code.videolan.org/videolan/x264/-/archive/{version}/x264-{version}.zip' + built_libraries = {'libx264.a': 'lib'} def build_arch(self, arch): with current_directory(self.get_build_dir(arch.arch)): env = self.get_recipe_env(arch) configure = sh.Command('./configure') shprint(configure, - '--cross-prefix=arm-linux-androideabi-', - '--host=arm-linux', + f'--host={arch.command_prefix}', '--disable-asm', '--disable-cli', '--enable-pic', - '--disable-shared', '--enable-static', '--prefix={}'.format(realpath('.')), _env=env) - shprint(sh.make, '-j4', _env=env) + shprint(sh.make, '-j', str(cpu_count()), _env=env) shprint(sh.make, 'install', _env=env) diff --git a/p4a/pythonforandroid/recipes/libxml2/__init__.py b/p4a/pythonforandroid/recipes/libxml2/__init__.py index cdeaf88..100c528 100644 --- a/p4a/pythonforandroid/recipes/libxml2/__init__.py +++ b/p4a/pythonforandroid/recipes/libxml2/__init__.py @@ -1,22 +1,18 @@ from pythonforandroid.recipe import Recipe -from pythonforandroid.toolchain import shprint, shutil, current_directory -from os.path import exists, join +from pythonforandroid.util import current_directory +from pythonforandroid.logger import shprint +from os.path import exists import sh class Libxml2Recipe(Recipe): - version = '2.9.8' + version = '2.9.12' url = 'http://xmlsoft.org/sources/libxml2-{version}.tar.gz' depends = [] patches = ['add-glob.c.patch'] - - def should_build(self, arch): - super(Libxml2Recipe, self).should_build(arch) - return not exists( - join(self.get_build_dir(arch.arch), '.libs', 'libxml2.a')) + built_libraries = {'libxml2.a': '.libs'} def build_arch(self, arch): - super(Libxml2Recipe, self).build_arch(arch) env = self.get_recipe_env(arch) with current_directory(self.get_build_dir(arch.arch)): @@ -46,11 +42,8 @@ class Libxml2Recipe(Recipe): # we'll need the glob dependency which is a big headache shprint(sh.make, "libxml2.la", _env=env) - shutil.copyfile('.libs/libxml2.a', - join(self.ctx.libs_dir, 'libxml2.a')) - def get_recipe_env(self, arch): - env = super(Libxml2Recipe, self).get_recipe_env(arch) + env = super().get_recipe_env(arch) env['CONFIG_SHELL'] = '/bin/bash' env['SHELL'] = '/bin/bash' env['CC'] += ' -I' + self.get_build_dir(arch.arch) diff --git a/p4a/pythonforandroid/recipes/libxslt/__init__.py b/p4a/pythonforandroid/recipes/libxslt/__init__.py index 076d6cc..d9127cf 100644 --- a/p4a/pythonforandroid/recipes/libxslt/__init__.py +++ b/p4a/pythonforandroid/recipes/libxslt/__init__.py @@ -1,24 +1,23 @@ from pythonforandroid.recipe import Recipe -from pythonforandroid.toolchain import shprint, shutil, current_directory +from pythonforandroid.util import current_directory +from pythonforandroid.logger import shprint from os.path import exists, join import sh class LibxsltRecipe(Recipe): - version = '1.1.32' + version = '1.1.34' url = 'http://xmlsoft.org/sources/libxslt-{version}.tar.gz' depends = ['libxml2'] patches = ['fix-dlopen.patch'] + built_libraries = { + 'libxslt.a': 'libxslt/.libs', + 'libexslt.a': 'libexslt/.libs' + } call_hostpython_via_targetpython = False - def should_build(self, arch): - return not exists( - join(self.get_build_dir(arch.arch), - 'libxslt', '.libs', 'libxslt.a')) - def build_arch(self, arch): - super(LibxsltRecipe, self).build_arch(arch) env = self.get_recipe_env(arch) build_dir = self.get_build_dir(arch.arch) with current_directory(build_dir): @@ -45,13 +44,10 @@ class LibxsltRecipe(Recipe): _env=env) shprint(sh.make, "V=1", _env=env) - shutil.copyfile('libxslt/.libs/libxslt.a', - join(self.ctx.libs_dir, 'libxslt.a')) - shutil.copyfile('libexslt/.libs/libexslt.a', - join(self.ctx.libs_dir, 'libexslt.a')) + shprint(sh.Command('chmod'), '+x', 'xslt-config') def get_recipe_env(self, arch): - env = super(LibxsltRecipe, self).get_recipe_env(arch) + env = super().get_recipe_env(arch) env['CONFIG_SHELL'] = '/bin/bash' env['SHELL'] = '/bin/bash' diff --git a/p4a/pythonforandroid/recipes/libzbar/__init__.py b/p4a/pythonforandroid/recipes/libzbar/__init__.py index 43ae34c..4e26ca4 100644 --- a/p4a/pythonforandroid/recipes/libzbar/__init__.py +++ b/p4a/pythonforandroid/recipes/libzbar/__init__.py @@ -1,6 +1,7 @@ import os -from pythonforandroid.toolchain import shprint, current_directory from pythonforandroid.recipe import Recipe +from pythonforandroid.util import current_directory +from pythonforandroid.logger import shprint from multiprocessing import cpu_count import sh @@ -15,12 +16,10 @@ class LibZBarRecipe(Recipe): patches = ["werror.patch"] - def should_build(self, arch): - return not os.path.exists( - os.path.join(self.ctx.get_libs_dir(arch.arch), 'libzbar.so')) + built_libraries = {'libzbar.so': 'zbar/.libs'} def get_recipe_env(self, arch=None, with_flags_in_cc=True): - env = super(LibZBarRecipe, self).get_recipe_env(arch, with_flags_in_cc) + env = super().get_recipe_env(arch, with_flags_in_cc) libiconv = self.get_recipe('libiconv', self.ctx) libiconv_dir = libiconv.get_build_dir(arch.arch) env['CFLAGS'] += ' -I' + os.path.join(libiconv_dir, 'include') @@ -28,15 +27,14 @@ class LibZBarRecipe(Recipe): return env def build_arch(self, arch): - super(LibZBarRecipe, self).build_arch(arch) env = self.get_recipe_env(arch) with current_directory(self.get_build_dir(arch.arch)): shprint(sh.Command('autoreconf'), '-vif', _env=env) shprint( sh.Command('./configure'), - '--host=' + arch.toolchain_prefix, - '--target=' + arch.toolchain_prefix, - '--prefix=' + self.ctx.get_python_install_dir(), + '--host=' + arch.command_prefix, + '--target=' + arch.command_prefix, + '--prefix=' + self.ctx.get_python_install_dir(arch.arch), # Python bindings are compiled in a separated recipe '--with-python=no', '--with-gtk=no', @@ -50,8 +48,6 @@ class LibZBarRecipe(Recipe): '--enable-static=no', _env=env) shprint(sh.make, '-j' + str(cpu_count()), _env=env) - libs = ['zbar/.libs/libzbar.so'] - self.install_libs(arch, *libs) recipe = LibZBarRecipe() diff --git a/p4a/pythonforandroid/recipes/libzmq/__init__.py b/p4a/pythonforandroid/recipes/libzmq/__init__.py index b33f3ac..243517b 100644 --- a/p4a/pythonforandroid/recipes/libzmq/__init__.py +++ b/p4a/pythonforandroid/recipes/libzmq/__init__.py @@ -1,21 +1,18 @@ -from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory -from pythonforandroid.util import ensure_dir -from os.path import exists, join +from pythonforandroid.recipe import Recipe +from pythonforandroid.logger import shprint +from pythonforandroid.util import current_directory +from os.path import join import sh class LibZMQRecipe(Recipe): - version = '4.1.4' - url = 'http://download.zeromq.org/zeromq-{version}.tar.gz' + version = '4.3.2' + url = 'https://github.com/zeromq/libzmq/releases/download/v{version}/zeromq-{version}.zip' depends = [] - - def should_build(self, arch): - super(LibZMQRecipe, self).should_build(arch) - return True - return not exists(join(self.ctx.get_libs_dir(arch.arch), 'libzmq.so')) + built_libraries = {'libzmq.so': 'src/.libs'} + need_stl_shared = True def build_arch(self, arch): - super(LibZMQRecipe, self).build_arch(arch) env = self.get_recipe_env(arch) # # libsodium_recipe = Recipe.get_recipe('libsodium', self.ctx) @@ -27,49 +24,19 @@ class LibZMQRecipe(Recipe): curdir = self.get_build_dir(arch.arch) prefix = join(curdir, "install") + with current_directory(curdir): bash = sh.Command('sh') shprint( bash, './configure', - '--host=arm-linux-androideabi', + '--host={}'.format(arch.command_prefix), '--without-documentation', '--prefix={}'.format(prefix), '--with-libsodium=no', + '--disable-libunwind', _env=env) shprint(sh.make, _env=env) shprint(sh.make, 'install', _env=env) - shutil.copyfile('.libs/libzmq.so', join( - self.ctx.get_libs_dir(arch.arch), 'libzmq.so')) - - bootstrap_obj_dir = join(self.ctx.bootstrap.build_dir, 'obj', 'local', arch.arch) - ensure_dir(bootstrap_obj_dir) - shutil.copyfile( - '{}/sources/cxx-stl/gnu-libstdc++/{}/libs/{}/libgnustl_shared.so'.format( - self.ctx.ndk_dir, self.ctx.toolchain_version, arch), - join(bootstrap_obj_dir, 'libgnustl_shared.so')) - - # Copy libgnustl_shared.so - with current_directory(self.get_build_dir(arch.arch)): - sh.cp( - "{ctx.ndk_dir}/sources/cxx-stl/gnu-libstdc++/{ctx.toolchain_version}/libs/{arch.arch}/libgnustl_shared.so".format(ctx=self.ctx, arch=arch), - self.ctx.get_libs_dir(arch.arch) - ) - - def get_recipe_env(self, arch): - # XXX should stl be configuration for the toolchain itself? - env = super(LibZMQRecipe, self).get_recipe_env(arch) - env['CFLAGS'] += ' -Os' - env['CXXFLAGS'] += ' -Os -fPIC -fvisibility=default' - env['CXXFLAGS'] += ' -I{}/sources/cxx-stl/gnu-libstdc++/{}/include'.format( - self.ctx.ndk_dir, self.ctx.toolchain_version) - env['CXXFLAGS'] += ' -I{}/sources/cxx-stl/gnu-libstdc++/{}/libs/{}/include'.format( - self.ctx.ndk_dir, self.ctx.toolchain_version, arch) - env['CXXFLAGS'] += ' -L{}/sources/cxx-stl/gnu-libstdc++/{}/libs/{}'.format( - self.ctx.ndk_dir, self.ctx.toolchain_version, arch) - env['CXXFLAGS'] += ' -lgnustl_shared' - env['LDFLAGS'] += ' -L{}/sources/cxx-stl/gnu-libstdc++/{}/libs/{}'.format( - self.ctx.ndk_dir, self.ctx.toolchain_version, arch) - return env recipe = LibZMQRecipe() diff --git a/p4a/pythonforandroid/recipes/lxml/__init__.py b/p4a/pythonforandroid/recipes/lxml/__init__.py index 6d4b91c..4910caf 100644 --- a/p4a/pythonforandroid/recipes/lxml/__init__.py +++ b/p4a/pythonforandroid/recipes/lxml/__init__.py @@ -4,7 +4,7 @@ from os import uname class LXMLRecipe(CompiledComponentsPythonRecipe): - version = '4.2.5' + version = '4.8.0' url = 'https://pypi.python.org/packages/source/l/lxml/lxml-{version}.tar.gz' # noqa depends = ['librt', 'libxml2', 'libxslt', 'setuptools'] name = 'lxml' @@ -12,53 +12,52 @@ class LXMLRecipe(CompiledComponentsPythonRecipe): call_hostpython_via_targetpython = False # Due to setuptools def should_build(self, arch): - super(LXMLRecipe, self).should_build(arch) + super().should_build(arch) py_ver = self.ctx.python_recipe.major_minor_version_string - build_platform = '{system}-{machine}'.format( - system=uname()[0], machine=uname()[-1]).lower() - build_dir = join(self.get_build_dir(arch.arch), 'build', - 'lib.' + build_platform + '-' + py_ver, 'lxml') - py_libs = ['_elementpath.so', 'builder.so', 'etree.so', 'objectify.so'] + build_platform = "{system}-{machine}".format( + system=uname()[0], machine=uname()[-1] + ).lower() + build_dir = join( + self.get_build_dir(arch.arch), + "build", + "lib." + build_platform + "-" + py_ver, + "lxml", + ) + py_libs = ["_elementpath.so", "builder.so", "etree.so", "objectify.so"] return not all([exists(join(build_dir, lib)) for lib in py_libs]) def get_recipe_env(self, arch): - env = super(LXMLRecipe, self).get_recipe_env(arch) + env = super().get_recipe_env(arch) # libxslt flags libxslt_recipe = Recipe.get_recipe('libxslt', self.ctx) libxslt_build_dir = libxslt_recipe.get_build_dir(arch.arch) - cflags = ' -I' + libxslt_build_dir - cflags += ' -I' + join(libxslt_build_dir, 'libxslt') - cflags += ' -I' + join(libxslt_build_dir, 'libexslt') - - env['LDFLAGS'] += ' -L' + join(libxslt_build_dir, 'libxslt', '.libs') - env['LDFLAGS'] += ' -L' + join(libxslt_build_dir, 'libexslt', '.libs') - env['LIBS'] = '-lxslt -lexslt' - # libxml2 flags libxml2_recipe = Recipe.get_recipe('libxml2', self.ctx) libxml2_build_dir = libxml2_recipe.get_build_dir(arch.arch) - libxml2_libs_dir = join(libxml2_build_dir, '.libs') - cflags += ' -I' + libxml2_build_dir - cflags += ' -I' + join(libxml2_build_dir, 'include') - cflags += ' -I' + join(libxml2_build_dir, 'include', 'libxml') - cflags += ' -I' + self.get_build_dir(arch.arch) - env['LDFLAGS'] += ' -L' + libxml2_libs_dir - env['LIBS'] += ' -lxml2' + env["STATIC"] = "true" - # android's ndk flags - ndk_lib_dir = join(self.ctx.ndk_platform, 'usr', 'lib') - ndk_include_dir = join(self.ctx.ndk_dir, 'sysroot', 'usr', 'include') - cflags += ' -I' + ndk_include_dir - env['LDFLAGS'] += ' -L' + ndk_lib_dir - env['LIBS'] += ' -lz -lm -lc' + env["LXML_STATIC_INCLUDE_DIRS"] = "{}:{}".format( + join(libxml2_build_dir, "include"), join(libxslt_build_dir) + ) + env["LXML_STATIC_LIBRARY_DIRS"] = "{}:{}:{}".format( + join(libxml2_build_dir, ".libs"), + join(libxslt_build_dir, "libxslt", ".libs"), + join(libxslt_build_dir, "libexslt", ".libs"), + ) - if cflags not in env['CFLAGS']: - env['CFLAGS'] += cflags + env["WITH_XML2_CONFIG"] = join(libxml2_build_dir, "xml2-config") + env["WITH_XSLT_CONFIG"] = join(libxslt_build_dir, "xslt-config") + + env["LXML_STATIC_BINARIES"] = "{}:{}:{}".format( + join(libxml2_build_dir, ".libs", "libxml2.a"), + join(libxslt_build_dir, "libxslt", ".libs", "libxslt.a"), + join(libxslt_build_dir, "libexslt", ".libs", "libexslt.a"), + ) return env diff --git a/p4a/pythonforandroid/recipes/m2crypto/__init__.py b/p4a/pythonforandroid/recipes/m2crypto/__init__.py index 653eeca..5786049 100644 --- a/p4a/pythonforandroid/recipes/m2crypto/__init__.py +++ b/p4a/pythonforandroid/recipes/m2crypto/__init__.py @@ -32,7 +32,7 @@ class M2CryptoRecipe(CompiledComponentsPythonRecipe): env['STRIP'], '{}', ';', _env=env) def get_recipe_env(self, arch): - env = super(M2CryptoRecipe, self).get_recipe_env(arch) + env = super().get_recipe_env(arch) env['OPENSSL_BUILD_PATH'] = self.get_recipe('openssl', self.ctx).get_build_dir(arch.arch) return env diff --git a/p4a/pythonforandroid/recipes/matplotlib/__init__.py b/p4a/pythonforandroid/recipes/matplotlib/__init__.py new file mode 100644 index 0000000..f79cde3 --- /dev/null +++ b/p4a/pythonforandroid/recipes/matplotlib/__init__.py @@ -0,0 +1,94 @@ +from pythonforandroid.recipe import CppCompiledComponentsPythonRecipe +from pythonforandroid.util import ensure_dir + +from os.path import join +import shutil + + +class MatplotlibRecipe(CppCompiledComponentsPythonRecipe): + + version = '3.5.2' + url = 'https://github.com/matplotlib/matplotlib/archive/v{version}.zip' + + depends = ['kiwisolver', 'numpy', 'pillow', 'setuptools', 'freetype'] + + python_depends = ['cycler', 'fonttools', 'packaging', 'pyparsing', 'python-dateutil'] + + def generate_libraries_pc_files(self, arch): + """ + Create *.pc files for libraries that `matplotib` depends on. + + Because, for unix platforms, the mpl install script uses `pkg-config` + to detect libraries installed in non standard locations (our case... + well...we don't even install the libraries...so we must trick a little + the mlp install). + """ + pkg_config_path = self.get_recipe_env(arch)['PKG_CONFIG_PATH'] + ensure_dir(pkg_config_path) + + lib_to_pc_file = { + # `pkg-config` search for version freetype2.pc, our current + # version for freetype, but we have our recipe named without + # the version...so we add it in here for our pc file + 'freetype': 'freetype2.pc', + } + + for lib_name in {'freetype'}: + pc_template_file = join( + self.get_recipe_dir(), + f'lib{lib_name}.pc.template' + ) + # read template file into buffer + with open(pc_template_file) as template_file: + text_buffer = template_file.read() + # set the library absolute path and library version + lib_recipe = self.get_recipe(lib_name, self.ctx) + text_buffer = text_buffer.replace( + 'path_to_built', lib_recipe.get_build_dir(arch.arch), + ) + text_buffer = text_buffer.replace( + 'library_version', lib_recipe.version, + ) + + # write the library pc file into our defined dir `PKG_CONFIG_PATH` + pc_dest_file = join(pkg_config_path, lib_to_pc_file[lib_name]) + with open(pc_dest_file, 'w') as pc_file: + pc_file.write(text_buffer) + + def prebuild_arch(self, arch): + shutil.copyfile( + join(self.get_recipe_dir(), "setup.cfg.template"), + join(self.get_build_dir(arch), "mplsetup.cfg"), + ) + self.generate_libraries_pc_files(arch) + + def get_recipe_env(self, arch=None, with_flags_in_cc=True): + env = super().get_recipe_env(arch, with_flags_in_cc) + + # we make use of the same directory than `XDG_CACHE_HOME`, for our + # custom library pc files, so we have all the install files that we + # generate at the same place + env['XDG_CACHE_HOME'] = join(self.get_build_dir(arch), 'p4a_files') + env['PKG_CONFIG_PATH'] = env['XDG_CACHE_HOME'] + + # creating proper *.pc files for our libraries does not seem enough to + # success with our build (without depending on system development + # libraries), but if we tell the compiler where to find our libraries + # and includes, then the install success :) + freetype = self.get_recipe('freetype', self.ctx) + free_lib_dir = join(freetype.get_build_dir(arch.arch), 'objs', '.libs') + free_inc_dir = join(freetype.get_build_dir(arch.arch), 'include') + env['CFLAGS'] += f' -I{free_inc_dir}' + env['LDFLAGS'] += f' -L{free_lib_dir}' + + # `freetype` could be built with `harfbuzz` support, + # so we also include the necessary flags...just to be sure + if 'harfbuzz' in self.ctx.recipe_build_order: + harfbuzz = self.get_recipe('harfbuzz', self.ctx) + harf_build = harfbuzz.get_build_dir(arch.arch) + env['CFLAGS'] += f' -I{harf_build} -I{join(harf_build, "src")}' + env['LDFLAGS'] += f' -L{join(harf_build, "src", ".libs")}' + return env + + +recipe = MatplotlibRecipe() diff --git a/p4a/pythonforandroid/recipes/matplotlib/libfreetype.pc.template b/p4a/pythonforandroid/recipes/matplotlib/libfreetype.pc.template new file mode 100644 index 0000000..df5ef28 --- /dev/null +++ b/p4a/pythonforandroid/recipes/matplotlib/libfreetype.pc.template @@ -0,0 +1,10 @@ +prefix=path_to_built +exec_prefix=${prefix} +includedir=${prefix}/include +libdir=${exec_prefix}/objs/.libs + +Name: freetype2 +Description: The freetype2 library +Version: library_version +Cflags: -I${includedir} +Libs: -L${libdir} -lfreetype diff --git a/p4a/pythonforandroid/recipes/matplotlib/setup.cfg.template b/p4a/pythonforandroid/recipes/matplotlib/setup.cfg.template new file mode 100644 index 0000000..96ef80d --- /dev/null +++ b/p4a/pythonforandroid/recipes/matplotlib/setup.cfg.template @@ -0,0 +1,38 @@ +# Rename this file to mplsetup.cfg to modify Matplotlib's build options. + +[libs] +# By default, Matplotlib builds with LTO, which may be slow if you re-compile +# often, and don't need the space saving/speedup. +enable_lto = False +# By default, Matplotlib downloads and builds its own copies of FreeType and of +# Qhull. You may set the following to True to instead link against a system +# FreeType/Qhull. As an exception, Matplotlib defaults to the system version +# of FreeType on AIX. +system_freetype = True +#system_qhull = False + +[packages] +# There are a number of data subpackages from Matplotlib that are +# considered optional. All except 'tests' data (meaning the baseline +# image files) are installed by default, but that can be changed here. +#tests = False + +[gui_support] +# Matplotlib supports multiple GUI toolkits, known as backends. +# The MacOSX backend requires the Cocoa headers included with XCode. +# You can select whether to build it by uncommenting the following line. +# It is never built on Linux or Windows, regardless of the config value. +# +macosx = False + +[rc_options] +# User-configurable options +# +# Default backend, one of: Agg, Cairo, GTK3Agg, GTK3Cairo, GTK4Agg, GTK4Cairo, +# MacOSX, Pdf, Ps, QtAgg, QtCairo, SVG, TkAgg, WX, WXAgg. +# +# The Agg, Ps, Pdf and SVG backends do not require external dependencies. Do +# not choose MacOSX if you have disabled the relevant extension modules. The +# default is determined by fallback. +# +#backend = Agg \ No newline at end of file diff --git a/p4a/pythonforandroid/recipes/mysqldb/__init__.py b/p4a/pythonforandroid/recipes/mysqldb/__init__.py index f084585..768cb72 100644 --- a/p4a/pythonforandroid/recipes/mysqldb/__init__.py +++ b/p4a/pythonforandroid/recipes/mysqldb/__init__.py @@ -23,15 +23,15 @@ class MysqldbRecipe(CompiledComponentsPythonRecipe): f.write(data.replace(b'\r\n', b'\n').replace(b'\r', b'\n')) def prebuild_arch(self, arch): - super(MysqldbRecipe, self).prebuild_arch(arch) + super().prebuild_arch(arch) setupbase = join(self.get_build_dir(arch.arch), 'setup') self.convert_newlines(setupbase + '.py') self.convert_newlines(setupbase + '_posix.py') def get_recipe_env(self, arch=None): - env = super(MysqldbRecipe, self).get_recipe_env(arch) + env = super().get_recipe_env(arch) - hostpython = self.get_recipe('hostpython2', self.ctx) + hostpython = self.get_recipe('hostpython3', self.ctx) # TODO: fix hardcoded path env['PYTHONPATH'] = (join(hostpython.get_build_dir(arch.arch), 'build', 'lib.linux-x86_64-2.7') + diff --git a/p4a/pythonforandroid/recipes/ndghttpsclient b/p4a/pythonforandroid/recipes/ndghttpsclient index 35e996f..bfe581a 100644 --- a/p4a/pythonforandroid/recipes/ndghttpsclient +++ b/p4a/pythonforandroid/recipes/ndghttpsclient @@ -1,9 +1,9 @@ from pythonforandroid.recipe import PythonRecipe class NdgHttpsClientRecipe(PythonRecipe): - version = '0.4.0' + version = '0.5.1' url = 'https://pypi.python.org/packages/source/n/ndg-httpsclient/ndg_httpsclient-{version}.tar.gz' - depends = ['python2', 'pyopenssl', 'cryptography'] + depends = ['python3', 'pyopenssl', 'cryptography'] call_hostpython_via_targetpython = False recipe = NdgHttpsClientRecipe() diff --git a/p4a/pythonforandroid/recipes/numpy/__init__.py b/p4a/pythonforandroid/recipes/numpy/__init__.py index 6b6e6b3..55a0279 100644 --- a/p4a/pythonforandroid/recipes/numpy/__init__.py +++ b/p4a/pythonforandroid/recipes/numpy/__init__.py @@ -1,57 +1,74 @@ from pythonforandroid.recipe import CompiledComponentsPythonRecipe +from pythonforandroid.logger import shprint, info +from pythonforandroid.util import current_directory from multiprocessing import cpu_count from os.path import join +import glob +import sh +import shutil class NumpyRecipe(CompiledComponentsPythonRecipe): - version = '1.15.1' + version = '1.22.3' url = 'https://pypi.python.org/packages/source/n/numpy/numpy-{version}.zip' site_packages_name = 'numpy' - depends = [('python2', 'python3', 'python3crystax')] + depends = ['setuptools', 'cython'] + install_in_hostpython = True + call_hostpython_via_targetpython = False patches = [ - join('patches', 'fix-numpy.patch'), - join('patches', 'prevent_libs_check.patch'), - join('patches', 'ar.patch'), - join('patches', 'lib.patch'), - join('patches', 'python-fixes.patch') + join("patches", "remove-default-paths.patch"), + join("patches", "add_libm_explicitly_to_build.patch"), + join("patches", "ranlib.patch"), ] + def get_recipe_env(self, arch=None, with_flags_in_cc=True): + env = super().get_recipe_env(arch, with_flags_in_cc) + + # _PYTHON_HOST_PLATFORM declares that we're cross-compiling + # and avoids issues when building on macOS for Android targets. + env["_PYTHON_HOST_PLATFORM"] = arch.command_prefix + + # NPY_DISABLE_SVML=1 allows numpy to build for non-AVX512 CPUs + # See: https://github.com/numpy/numpy/issues/21196 + env["NPY_DISABLE_SVML"] = "1" + + return env + + def _build_compiled_components(self, arch): + info('Building compiled components in {}'.format(self.name)) + + env = self.get_recipe_env(arch) + with current_directory(self.get_build_dir(arch.arch)): + hostpython = sh.Command(self.hostpython_location) + shprint(hostpython, 'setup.py', self.build_cmd, '-v', + _env=env, *self.setup_extra_args) + build_dir = glob.glob('build/lib.*')[0] + shprint(sh.find, build_dir, '-name', '"*.o"', '-exec', + env['STRIP'], '{}', ';', _env=env) + + def _rebuild_compiled_components(self, arch, env): + info('Rebuilding compiled components in {}'.format(self.name)) + + hostpython = sh.Command(self.real_hostpython_location) + shprint(hostpython, 'setup.py', 'clean', '--all', '--force', _env=env) + shprint(hostpython, 'setup.py', self.build_cmd, '-v', _env=env, + *self.setup_extra_args) + def build_compiled_components(self, arch): self.setup_extra_args = ['-j', str(cpu_count())] - super(NumpyRecipe, self).build_compiled_components(arch) + self._build_compiled_components(arch) self.setup_extra_args = [] def rebuild_compiled_components(self, arch, env): self.setup_extra_args = ['-j', str(cpu_count())] - super(NumpyRecipe, self).rebuild_compiled_components(arch, env) + self._rebuild_compiled_components(arch, env) self.setup_extra_args = [] - def get_recipe_env(self, arch): - env = super(NumpyRecipe, self).get_recipe_env(arch) - - flags = " -L{} --sysroot={}".format( - join(self.ctx.ndk_platform, 'usr', 'lib'), - self.ctx.ndk_platform - ) - - py_ver = self.ctx.python_recipe.major_minor_version_string - py_inc_dir = self.ctx.python_recipe.include_root(arch.arch) - py_lib_dir = self.ctx.python_recipe.link_root(arch.arch) - if self.ctx.ndk == 'crystax': - src_dir = join(self.ctx.ndk_dir, 'sources') - flags += " -I{}".format(join(src_dir, 'crystax', 'include')) - flags += " -L{}".format(join(src_dir, 'crystax', 'libs', arch.arch)) - flags += ' -I{}'.format(py_inc_dir) - flags += ' -L{} -lpython{}'.format(py_lib_dir, py_ver) - if 'python3' in self.ctx.python_recipe.name: - flags += 'm' - - if flags not in env['CC']: - env['CC'] += flags - if flags not in env['LD']: - env['LD'] += flags + ' -shared' + def get_hostrecipe_env(self, arch): + env = super().get_hostrecipe_env(arch) + env['RANLIB'] = shutil.which('ranlib') return env diff --git a/p4a/pythonforandroid/recipes/numpy/patches/add_libm_explicitly_to_build.patch b/p4a/pythonforandroid/recipes/numpy/patches/add_libm_explicitly_to_build.patch new file mode 100644 index 0000000..f9ba9e9 --- /dev/null +++ b/p4a/pythonforandroid/recipes/numpy/patches/add_libm_explicitly_to_build.patch @@ -0,0 +1,20 @@ +diff --git a/numpy/linalg/setup.py b/numpy/linalg/setup.py +index 66c07c9..d34bd93 100644 +--- a/numpy/linalg/setup.py ++++ b/numpy/linalg/setup.py +@@ -46,6 +46,7 @@ def configuration(parent_package='', top_path=None): + sources=['lapack_litemodule.c', get_lapack_lite_sources], + depends=['lapack_lite/f2c.h'], + extra_info=lapack_info, ++ libraries=['m'], + ) + + # umath_linalg module +@@ -54,7 +54,7 @@ def configuration(parent_package='', top_path=None): + sources=['umath_linalg.c.src', get_lapack_lite_sources], + depends=['lapack_lite/f2c.h'], + extra_info=lapack_info, +- libraries=['npymath'], ++ libraries=['npymath', 'm'], + ) + return config diff --git a/p4a/pythonforandroid/recipes/numpy/patches/ranlib.patch b/p4a/pythonforandroid/recipes/numpy/patches/ranlib.patch new file mode 100644 index 0000000..c0b5dad --- /dev/null +++ b/p4a/pythonforandroid/recipes/numpy/patches/ranlib.patch @@ -0,0 +1,11 @@ +diff -Naur numpy.orig/numpy/distutils/unixccompiler.py numpy/numpy/distutils/unixccompiler.py +--- numpy.orig/numpy/distutils/unixccompiler.py 2022-05-28 10:22:10.000000000 +0200 ++++ numpy/numpy/distutils/unixccompiler.py 2022-05-28 10:22:24.000000000 +0200 +@@ -124,6 +124,7 @@ + # platform intelligence here to skip ranlib if it's not + # needed -- or maybe Python's configure script took care of + # it for us, hence the check for leading colon. ++ self.ranlib = [os.environ.get('RANLIB')] + if self.ranlib: + display = '%s:@ %s' % (os.path.basename(self.ranlib[0]), + output_filename) diff --git a/p4a/pythonforandroid/recipes/numpy/patches/remove-default-paths.patch b/p4a/pythonforandroid/recipes/numpy/patches/remove-default-paths.patch new file mode 100644 index 0000000..3581f0f --- /dev/null +++ b/p4a/pythonforandroid/recipes/numpy/patches/remove-default-paths.patch @@ -0,0 +1,28 @@ +diff --git a/numpy/distutils/system_info.py b/numpy/distutils/system_info.py +index fc7018a..7b514bc 100644 +--- a/numpy/distutils/system_info.py ++++ b/numpy/distutils/system_info.py +@@ -340,10 +340,10 @@ if os.path.join(sys.prefix, 'lib') not in default_lib_dirs: + default_include_dirs.append(os.path.join(sys.prefix, 'include')) + default_src_dirs.append(os.path.join(sys.prefix, 'src')) + +-default_lib_dirs = [_m for _m in default_lib_dirs if os.path.isdir(_m)] +-default_runtime_dirs = [_m for _m in default_runtime_dirs if os.path.isdir(_m)] +-default_include_dirs = [_m for _m in default_include_dirs if os.path.isdir(_m)] +-default_src_dirs = [_m for _m in default_src_dirs if os.path.isdir(_m)] ++default_lib_dirs = [] #[_m for _m in default_lib_dirs if os.path.isdir(_m)] ++default_runtime_dirs =[] # [_m for _m in default_runtime_dirs if os.path.isdir(_m)] ++default_include_dirs =[] # [_m for _m in default_include_dirs if os.path.isdir(_m)] ++default_src_dirs =[] # [_m for _m in default_src_dirs if os.path.isdir(_m)] + + so_ext = get_shared_lib_extension() + +@@ -814,7 +814,7 @@ class system_info(object): + path = self.get_paths(self.section, key) + if path == ['']: + path = [] +- return path ++ return [] + + def get_include_dirs(self, key='include_dirs'): + return self.get_paths(self.section, key) diff --git a/p4a/pythonforandroid/recipes/omemo-backend-signal/__init__.py b/p4a/pythonforandroid/recipes/omemo-backend-signal/__init__.py index c87034c..8efa815 100644 --- a/p4a/pythonforandroid/recipes/omemo-backend-signal/__init__.py +++ b/p4a/pythonforandroid/recipes/omemo-backend-signal/__init__.py @@ -3,7 +3,7 @@ from pythonforandroid.recipe import PythonRecipe class OmemoBackendSignalRecipe(PythonRecipe): name = 'omemo-backend-signal' - version = '0.2.2' + version = '0.2.5' url = 'https://pypi.python.org/packages/source/o/omemo-backend-signal/omemo-backend-signal-{version}.tar.gz' site_packages_name = 'omemo-backend-signal' depends = [ @@ -15,7 +15,6 @@ class OmemoBackendSignalRecipe(PythonRecipe): 'cryptography', 'omemo', ] - patches = ['wireformat.patch'] call_hostpython_via_targetpython = False diff --git a/p4a/pythonforandroid/recipes/omemo/__init__.py b/p4a/pythonforandroid/recipes/omemo/__init__.py index a940105..7ea3d68 100644 --- a/p4a/pythonforandroid/recipes/omemo/__init__.py +++ b/p4a/pythonforandroid/recipes/omemo/__init__.py @@ -3,7 +3,7 @@ from pythonforandroid.recipe import PythonRecipe class OmemoRecipe(PythonRecipe): name = 'omemo' - version = '0.10.3' + version = '0.11.0' url = 'https://pypi.python.org/packages/source/O/OMEMO/OMEMO-{version}.tar.gz' site_packages_name = 'omemo' depends = [ diff --git a/p4a/pythonforandroid/recipes/openal/__init__.py b/p4a/pythonforandroid/recipes/openal/__init__.py index ad93065..f5b7d01 100644 --- a/p4a/pythonforandroid/recipes/openal/__init__.py +++ b/p4a/pythonforandroid/recipes/openal/__init__.py @@ -1,32 +1,24 @@ from pythonforandroid.recipe import NDKRecipe from pythonforandroid.toolchain import current_directory, shprint from os.path import join -import os import sh class OpenALRecipe(NDKRecipe): - version = '1.18.2' - url = 'https://github.com/kcat/openal-soft/archive/openal-soft-{version}.tar.gz' + version = '1.21.1' + url = 'https://github.com/kcat/openal-soft/archive/refs/tags/{version}.tar.gz' generated_libraries = ['libopenal.so'] - def prebuild_arch(self, arch): - # we need to build native tools for host system architecture - with current_directory(join(self.get_build_dir(arch.arch), 'native-tools')): - shprint(sh.cmake, '.', _env=os.environ) - shprint(sh.make, _env=os.environ) - def build_arch(self, arch): with current_directory(self.get_build_dir(arch.arch)): env = self.get_recipe_env(arch) cmake_args = [ - '-DCMAKE_TOOLCHAIN_FILE={}'.format('XCompile-Android.txt'), - '-DHOST={}'.format(self.ctx.toolchain_prefix) + "-DANDROID_STL=" + self.stl_lib_name, + "-DCMAKE_TOOLCHAIN_FILE={}".format( + join(self.ctx.ndk_dir, "build", "cmake", "android.toolchain.cmake") + ), ] - if self.ctx.ndk == 'crystax': - # avoids a segfault in libcrystax when calling lrintf - cmake_args += ['-DHAVE_LRINTF=0'] shprint( sh.cmake, '.', *cmake_args, diff --git a/p4a/pythonforandroid/recipes/opencv/__init__.py b/p4a/pythonforandroid/recipes/opencv/__init__.py index 6932bc2..c760cbd 100644 --- a/p4a/pythonforandroid/recipes/opencv/__init__.py +++ b/p4a/pythonforandroid/recipes/opencv/__init__.py @@ -1,10 +1,8 @@ from os.path import join import sh from pythonforandroid.recipe import NDKRecipe -from pythonforandroid.toolchain import ( - current_directory, - shprint, -) +from pythonforandroid.util import current_directory +from pythonforandroid.logger import shprint from multiprocessing import cpu_count @@ -15,7 +13,7 @@ class OpenCVRecipe(NDKRecipe): build of most of the libraries of the opencv's package, so we can process images, videos, objects, photos... ''' - version = '4.0.1' + version = '4.5.1' url = 'https://github.com/opencv/opencv/archive/{version}.zip' depends = ['numpy'] patches = ['patches/p4a_build.patch'] @@ -33,14 +31,14 @@ class OpenCVRecipe(NDKRecipe): 'libopencv_video.so', 'libopencv_dnn.so', 'libopencv_imgcodecs.so', - 'libopencv_photo.so' + 'libopencv_photo.so', ] def get_lib_dir(self, arch): return join(self.get_build_dir(arch.arch), 'build', 'lib', arch.arch) def get_recipe_env(self, arch): - env = super(OpenCVRecipe, self).get_recipe_env(arch) + env = super().get_recipe_env(arch) env['ANDROID_NDK'] = self.ctx.ndk_dir env['ANDROID_SDK'] = self.ctx.sdk_dir return env @@ -48,16 +46,24 @@ class OpenCVRecipe(NDKRecipe): def build_arch(self, arch): build_dir = join(self.get_build_dir(arch.arch), 'build') shprint(sh.mkdir, '-p', build_dir) + + opencv_extras = [] + if 'opencv_extras' in self.ctx.recipe_build_order: + opencv_extras_dir = self.get_recipe( + 'opencv_extras', self.ctx).get_build_dir(arch.arch) + opencv_extras = [ + f'-DOPENCV_EXTRA_MODULES_PATH={opencv_extras_dir}/modules', + '-DBUILD_opencv_legacy=OFF', + ] + with current_directory(build_dir): env = self.get_recipe_env(arch) python_major = self.ctx.python_recipe.version[0] python_include_root = self.ctx.python_recipe.include_root(arch.arch) - python_site_packages = self.ctx.get_site_packages_dir() + python_site_packages = self.ctx.get_site_packages_dir(arch) python_link_root = self.ctx.python_recipe.link_root(arch.arch) - python_link_version = self.ctx.python_recipe.major_minor_version_string - if 'python3' in self.ctx.python_recipe.name: - python_link_version += 'm' + python_link_version = self.ctx.python_recipe.link_version python_library = join(python_link_root, 'libpython{}.so'.format(python_link_version)) python_include_numpy = join(python_site_packages, @@ -69,6 +75,8 @@ class OpenCVRecipe(NDKRecipe): '-DANDROID_STANDALONE_TOOLCHAIN={}'.format(self.ctx.ndk_dir), '-DANDROID_NATIVE_API_LEVEL={}'.format(self.ctx.ndk_api), '-DANDROID_EXECUTABLE={}/tools/android'.format(env['ANDROID_SDK']), + '-DANDROID_SDK_TOOLS_VERSION=6514223', + '-DANDROID_PROJECTS_SUPPORT_GRADLE=ON', '-DCMAKE_TOOLCHAIN_FILE={}'.format( join(self.ctx.ndk_dir, 'build', 'cmake', @@ -122,6 +130,8 @@ class OpenCVRecipe(NDKRecipe): '-DPYTHON{major}_PACKAGES_PATH={site_packages}'.format( major=python_major, site_packages=python_site_packages), + *opencv_extras, + self.get_build_dir(arch.arch), _env=env) shprint(sh.make, '-j' + str(cpu_count()), 'opencv_python' + python_major) diff --git a/p4a/pythonforandroid/recipes/opencv_extras/__init__.py b/p4a/pythonforandroid/recipes/opencv_extras/__init__.py new file mode 100644 index 0000000..693c365 --- /dev/null +++ b/p4a/pythonforandroid/recipes/opencv_extras/__init__.py @@ -0,0 +1,23 @@ +from pythonforandroid.recipe import Recipe + + +class OpenCVExtrasRecipe(Recipe): + """ + OpenCV extras recipe allows us to build extra modules from the + `opencv_contrib` repository. It depends on opencv recipe and all the build + of the modules will be performed inside opencv recipe build directory. + + .. note:: the version of this recipe should be the same than opencv recipe. + + .. warning:: Be aware that these modules are experimental, some of them + maybe included in opencv future releases and removed from extras. + + .. seealso:: https://github.com/opencv/opencv_contrib + + """ + version = '4.5.1' + url = 'https://github.com/opencv/opencv_contrib/archive/{version}.zip' + depends = ['opencv'] + + +recipe = OpenCVExtrasRecipe() diff --git a/p4a/pythonforandroid/recipes/openssl/__init__.py b/p4a/pythonforandroid/recipes/openssl/__init__.py index 3a9505f..520fe6d 100644 --- a/p4a/pythonforandroid/recipes/openssl/__init__.py +++ b/p4a/pythonforandroid/recipes/openssl/__init__.py @@ -1,6 +1,8 @@ from os.path import join -from pythonforandroid.toolchain import Recipe, shprint, current_directory +from pythonforandroid.recipe import Recipe +from pythonforandroid.util import current_directory +from pythonforandroid.logger import shprint import sh @@ -20,11 +22,6 @@ class OpenSSLRecipe(Recipe): using the methods: :meth:`include_flags`, :meth:`link_dirs_flags` and :meth:`link_libs_flags`. - .. note:: the python2legacy version is too old to support openssl 1.1+, so - we must use version 1.0.x. Also python3crystax is not building - successfully with openssl libs 1.1+ so we use the legacy version as - we do with python2legacy. - .. warning:: This recipe is very sensitive because is used for our core recipes, the python recipes. The used API should match with the one used in our python build, otherwise we will be unable to build the @@ -41,40 +38,24 @@ class OpenSSLRecipe(Recipe): - Add ability to build a legacy version of the openssl libs when using python2legacy or python3crystax. + .. versionchanged:: 2019.06.06.1.dev0 + + - Removed legacy version of openssl libraries + ''' - standard_version = '1.1' + version = '1.1' '''the major minor version used to link our recipes''' - legacy_version = '1.0' - '''the major minor version used to link our recipes when using - python2legacy or python3crystax''' - standard_url_version = '1.1.1' + url_version = '1.1.1m' '''the version used to download our libraries''' - legacy_url_version = '1.0.2q' - '''the version used to download our libraries when using python2legacy or - python3crystax''' url = 'https://www.openssl.org/source/openssl-{url_version}.tar.gz' - @property - def use_legacy(self): - if not self.ctx.recipe_build_order: - return False - return any([i for i in ('python2legacy', 'python3crystax') if - i in self.ctx.recipe_build_order]) - - @property - def version(self): - if self.use_legacy: - return self.legacy_version - return self.standard_version - - @property - def url_version(self): - if self.use_legacy: - return self.legacy_url_version - return self.standard_url_version + built_libraries = { + 'libcrypto{version}.so'.format(version=version): '.', + 'libssl{version}.so'.format(version=version): '.', + } @property def versioned_url(self): @@ -83,7 +64,9 @@ class OpenSSLRecipe(Recipe): return self.url.format(url_version=self.url_version) def get_build_dir(self, arch): - return join(self.get_build_container_dir(arch), self.name + self.version) + return join( + self.get_build_container_dir(arch), self.name + self.version + ) def include_flags(self, arch): '''Returns a string with the include folders''' @@ -109,27 +92,20 @@ class OpenSSLRecipe(Recipe): in the format: `-L -l`''' return self.link_dirs_flags(arch) + self.link_libs_flags() - def should_build(self, arch): - return not self.has_libs(arch, 'libssl' + self.version + '.so', - 'libcrypto' + self.version + '.so') - def get_recipe_env(self, arch=None): - env = super(OpenSSLRecipe, self).get_recipe_env(arch, clang=not self.use_legacy) + env = super().get_recipe_env(arch) env['OPENSSL_VERSION'] = self.version env['MAKE'] = 'make' # This removes the '-j5', which isn't safe - if self.use_legacy: - env['CFLAGS'] += ' ' + env['LDFLAGS'] - env['CC'] += ' ' + env['LDFLAGS'] - else: - env['ANDROID_NDK'] = self.ctx.ndk_dir + env['CC'] = 'clang' + env['ANDROID_NDK_HOME'] = self.ctx.ndk_dir return env def select_build_arch(self, arch): aname = arch.arch if 'arm64' in aname: - return 'android-arm64' if not self.use_legacy else 'linux-aarch64' + return 'android-arm64' if 'v7a' in aname: - return 'android-arm' if not self.use_legacy else 'android-armv7' + return 'android-arm' if 'arm' in aname: return 'android' if 'x86_64' in aname: @@ -145,29 +121,17 @@ class OpenSSLRecipe(Recipe): # so instead we manually run perl passing in Configure perl = sh.Command('perl') buildarch = self.select_build_arch(arch) - # XXX if we don't have no-asm, using clang and ndk-15c, i got: - # crypto/aes/bsaes-armv7.S:1372:14: error: immediate operand must be in the range [0,4095] - # add r8, r6, #.LREVM0SR-.LM0 @ borrow r8 - # ^ - # crypto/aes/bsaes-armv7.S:1434:14: error: immediate operand must be in the range [0,4095] - # sub r6, r8, #.LREVM0SR-.LSR @ pass constants - config_args = ['shared', 'no-dso', 'no-asm'] - if self.use_legacy: - config_args.append('no-krb5') - config_args.append(buildarch) - if not self.use_legacy: - config_args.append('-D__ANDROID_API__={}'.format(self.ctx.ndk_api)) + config_args = [ + 'shared', + 'no-dso', + 'no-asm', + buildarch, + '-D__ANDROID_API__={}'.format(self.ctx.ndk_api), + ] shprint(perl, 'Configure', *config_args, _env=env) - self.apply_patch( - 'disable-sover{}.patch'.format( - '-legacy' if self.use_legacy else ''), arch.arch) - if self.use_legacy: - self.apply_patch('rename-shared-lib.patch', arch.arch) + self.apply_patch('disable-sover.patch', arch.arch) shprint(sh.make, 'build_libs', _env=env) - self.install_libs(arch, 'libssl' + self.version + '.so', - 'libcrypto' + self.version + '.so') - recipe = OpenSSLRecipe() diff --git a/p4a/pythonforandroid/recipes/pandas/__init__.py b/p4a/pythonforandroid/recipes/pandas/__init__.py new file mode 100644 index 0000000..a217ab6 --- /dev/null +++ b/p4a/pythonforandroid/recipes/pandas/__init__.py @@ -0,0 +1,35 @@ +from os.path import join + +from pythonforandroid.recipe import CppCompiledComponentsPythonRecipe + + +class PandasRecipe(CppCompiledComponentsPythonRecipe): + version = '1.0.3' + url = 'https://github.com/pandas-dev/pandas/releases/download/v{version}/pandas-{version}.tar.gz' # noqa + + depends = ['cython', 'numpy', 'pytz', 'libbz2', 'liblzma'] + + python_depends = ['python-dateutil'] + patches = ['fix_numpy_includes.patch'] + + call_hostpython_via_targetpython = False + need_stl_shared = True + + def get_recipe_env(self, arch): + env = super().get_recipe_env(arch) + # we need the includes from our installed numpy at site packages + # because we need some includes generated at numpy's compile time + env['NUMPY_INCLUDES'] = join( + self.ctx.get_python_install_dir(arch.arch), "numpy/core/include", + ) + + # this flag below is to fix a runtime error: + # ImportError: dlopen failed: cannot locate symbol + # "_ZTVSt12length_error" referenced by + # "/data/data/org.test.matplotlib_testapp/files/app/_python_bundle + # /site-packages/pandas/_libs/window/aggregations.so"... + env['LDFLAGS'] += f' -landroid -l{self.stl_lib_name}' + return env + + +recipe = PandasRecipe() diff --git a/p4a/pythonforandroid/recipes/pandas/fix_numpy_includes.patch b/p4a/pythonforandroid/recipes/pandas/fix_numpy_includes.patch new file mode 100644 index 0000000..ef1643b --- /dev/null +++ b/p4a/pythonforandroid/recipes/pandas/fix_numpy_includes.patch @@ -0,0 +1,31 @@ +--- pandas-1.0.1/setup.py.orig 2020-02-05 17:15:24.000000000 +0100 ++++ pandas-1.0.1/setup.py 2020-03-15 13:47:57.612237225 +0100 +@@ -37,11 +37,12 @@ min_cython_ver = "0.29.13" # note: sync + + setuptools_kwargs = { + "install_requires": [ +- "python-dateutil >= 2.6.1", +- "pytz >= 2017.2", +- f"numpy >= {min_numpy_ver}", ++ # dependencies managed via p4a's recipe ++ # "python-dateutil >= 2.6.1", ++ # "pytz >= 2017.2", ++ # f"numpy >= {min_numpy_ver}", + ], +- "setup_requires": [f"numpy >= {min_numpy_ver}"], ++ "setup_requires": [], + "zip_safe": False, + } + +@@ -514,7 +515,10 @@ def maybe_cythonize(extensions, *args, * + ) + raise RuntimeError("Cannot cythonize without Cython installed.") + +- numpy_incl = pkg_resources.resource_filename("numpy", "core/include") ++ if 'NUMPY_INCLUDES' in os.environ: ++ numpy_incl = os.environ['NUMPY_INCLUDES'] ++ else: ++ numpy_incl = pkg_resources.resource_filename("numpy", "core/include") + # TODO: Is this really necessary here? + for ext in extensions: + if hasattr(ext, "include_dirs") and numpy_incl not in ext.include_dirs: diff --git a/p4a/pythonforandroid/recipes/pil/__init__.py b/p4a/pythonforandroid/recipes/pil/__init__.py index f3ad2f4..46bace1 100644 --- a/p4a/pythonforandroid/recipes/pil/__init__.py +++ b/p4a/pythonforandroid/recipes/pil/__init__.py @@ -1,79 +1,23 @@ -from os.path import join, exists -from pythonforandroid.recipe import CompiledComponentsPythonRecipe -from pythonforandroid.toolchain import shprint -import sh +from pythonforandroid.recipes.Pillow import PillowRecipe +from pythonforandroid.logger import warning -class PILRecipe(CompiledComponentsPythonRecipe): - name = 'pil' - version = '1.1.7' - url = 'http://effbot.org/downloads/Imaging-{version}.tar.gz' - depends = ['png', 'jpeg', 'setuptools'] - opt_depends = ['freetype'] - site_packages_name = 'PIL' +class PilRecipe(PillowRecipe): + """A transparent wrapper around the Pillow recipe, it should build + Pillow automatically as if "pillow" were specified in the + requirements. + """ - patches = ['disable-tk.patch', - 'fix-directories.patch'] + name = 'Pillow' # ensures the Pillow recipe directory is used where necessary - def get_recipe_env(self, arch=None, with_flags_in_cc=True): - env = super(PILRecipe, self).get_recipe_env(arch, with_flags_in_cc) + conflicts = ['pillow'] - env['PYTHON_INCLUDE_ROOT'] = self.ctx.python_recipe.include_root(arch.arch) - env['PYTHON_LINK_ROOT'] = self.ctx.python_recipe.link_root(arch.arch) - - ndk_lib_dir = join(self.ctx.ndk_platform, 'usr', 'lib') - ndk_include_dir = join(self.ctx.ndk_dir, 'sysroot', 'usr', 'include') - - png = self.get_recipe('png', self.ctx) - png_lib_dir = png.get_lib_dir(arch) - png_jni_dir = png.get_jni_dir(arch) - - jpeg = self.get_recipe('jpeg', self.ctx) - jpeg_inc_dir = jpeg_lib_dir = jpeg.get_build_dir(arch.arch) - - if 'freetype' in self.ctx.recipe_build_order: - freetype = self.get_recipe('freetype', self.ctx) - free_lib_dir = join(freetype.get_build_dir(arch.arch), 'objs', '.libs') - free_inc_dir = join(freetype.get_build_dir(arch.arch), 'include') - # hack freetype to be found by pil - freetype_link = join(free_inc_dir, 'freetype') - if not exists(freetype_link): - shprint(sh.ln, '-s', join(free_inc_dir), freetype_link) - - harfbuzz = self.get_recipe('harfbuzz', self.ctx) - harf_lib_dir = join(harfbuzz.get_build_dir(arch.arch), 'src', '.libs') - harf_inc_dir = harfbuzz.get_build_dir(arch.arch) - - env['FREETYPE_ROOT'] = '{}|{}'.format(free_lib_dir, free_inc_dir) - - env['JPEG_ROOT'] = '{}|{}'.format(jpeg_lib_dir, jpeg_inc_dir) - env['ZLIB_ROOT'] = '{}|{}'.format(ndk_lib_dir, ndk_include_dir) - - cflags = ' -std=c99' - cflags += ' -I{}'.format(png_jni_dir) - if 'freetype' in self.ctx.recipe_build_order: - cflags += ' -I{} -I{}'.format(harf_inc_dir, join(harf_inc_dir, 'src')) - cflags += ' -I{}'.format(free_inc_dir) - cflags += ' -I{}'.format(jpeg_inc_dir) - cflags += ' -I{}'.format(ndk_include_dir) - - py_v = self.ctx.python_recipe.major_minor_version_string - if py_v[0] == '3': - py_v += 'm' - - env['LIBS'] = ' -lpython{version} -lpng'.format(version=py_v) - if 'freetype' in self.ctx.recipe_build_order: - env['LIBS'] += ' -lfreetype -lharfbuzz' - env['LIBS'] += ' -ljpeg -lturbojpeg' - - env['LDFLAGS'] += ' -L{} -L{}'.format(env['PYTHON_LINK_ROOT'], png_lib_dir) - if 'freetype' in self.ctx.recipe_build_order: - env['LDFLAGS'] += ' -L{} -L{}'.format(harf_lib_dir, free_lib_dir) - env['LDFLAGS'] += ' -L{} -L{}'.format(jpeg_lib_dir, ndk_lib_dir) - - if cflags not in env['CFLAGS']: - env['CFLAGS'] += cflags - return env + def build_arch(self, arch): + warning('PIL is no longer supported, building Pillow instead. ' + 'This should be a drop-in replacement.') + warning('It is recommended to change "pil" to "pillow" in your requirements, ' + 'to ensure future compatibility') + super().build_arch(arch) -recipe = PILRecipe() +recipe = PilRecipe() diff --git a/p4a/pythonforandroid/recipes/png/__init__.py b/p4a/pythonforandroid/recipes/png/__init__.py index 5b69688..6138195 100644 --- a/p4a/pythonforandroid/recipes/png/__init__.py +++ b/p4a/pythonforandroid/recipes/png/__init__.py @@ -1,19 +1,30 @@ -from pythonforandroid.recipe import NDKRecipe +from pythonforandroid.recipe import Recipe +from pythonforandroid.logger import shprint +from pythonforandroid.util import current_directory +from multiprocessing import cpu_count +import sh -class PngRecipe(NDKRecipe): +class PngRecipe(Recipe): name = 'png' - # This version is the last `sha commit` published in the repo (it's more - # than one year old...) and it's for libpng version `1.6.29`. We set a - # commit for a version because the author of the github's repo never - # released/tagged it, despite He performed the necessary changes in - # master branch. - version = 'b43b4c6' + version = '1.6.37' + url = 'https://github.com/glennrp/libpng/archive/v{version}.zip' + built_libraries = {'libpng16.so': '.libs'} - # TODO: Try to move the repo to mainline - url = 'https://github.com/julienr/libpng-android/archive/{version}.zip' - - generated_libraries = ['libpng.a'] + def build_arch(self, arch): + build_dir = self.get_build_dir(arch.arch) + with current_directory(build_dir): + env = self.get_recipe_env(arch) + shprint( + sh.Command('./configure'), + '--host=' + arch.command_prefix, + '--target=' + arch.command_prefix, + '--disable-static', + '--enable-shared', + '--prefix={}/install'.format(self.get_build_dir(arch.arch)), + _env=env, + ) + shprint(sh.make, '-j', str(cpu_count()), _env=env) recipe = PngRecipe() diff --git a/p4a/pythonforandroid/recipes/png/build_shared_library.patch b/p4a/pythonforandroid/recipes/png/build_shared_library.patch new file mode 100644 index 0000000..01e3080 --- /dev/null +++ b/p4a/pythonforandroid/recipes/png/build_shared_library.patch @@ -0,0 +1,17 @@ +diff --git a/jni/Android.mk b/jni/Android.mk +index df2ff1a..2f70985 100644 +--- a/jni/Android.mk ++++ b/jni/Android.mk +@@ -26,8 +26,9 @@ LOCAL_SRC_FILES :=\ + arm/filter_neon_intrinsics.c + + #LOCAL_SHARED_LIBRARIES := -lz +-LOCAL_EXPORT_LDLIBS := -lz ++# LOCAL_EXPORT_LDLIBS := -lz ++LOCAL_LDLIBS := -lz + LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/. + +-#include $(BUILD_SHARED_LIBRARY) +-include $(BUILD_STATIC_LIBRARY) ++include $(BUILD_SHARED_LIBRARY) ++# include $(BUILD_STATIC_LIBRARY) diff --git a/p4a/pythonforandroid/recipes/protobuf_cpp/__init__.py b/p4a/pythonforandroid/recipes/protobuf_cpp/__init__.py index 30ca030..c1149f2 100644 --- a/p4a/pythonforandroid/recipes/protobuf_cpp/__init__.py +++ b/p4a/pythonforandroid/recipes/protobuf_cpp/__init__.py @@ -1,6 +1,6 @@ -from pythonforandroid.recipe import PythonRecipe +from pythonforandroid.recipe import CppCompiledComponentsPythonRecipe from pythonforandroid.logger import shprint, info_notify -from pythonforandroid.util import current_directory, shutil +from pythonforandroid.util import current_directory from os.path import exists, join import sh from multiprocessing import cpu_count @@ -9,17 +9,23 @@ import sys import os -class ProtobufCppRecipe(PythonRecipe): +class ProtobufCppRecipe(CppCompiledComponentsPythonRecipe): + """This is a two-in-one recipe: + - build labraru `libprotobuf.so` + - build and install python binding for protobuf_cpp + """ name = 'protobuf_cpp' version = '3.6.1' url = 'https://github.com/google/protobuf/releases/download/v{version}/protobuf-python-{version}.tar.gz' call_hostpython_via_targetpython = False depends = ['cffi', 'setuptools'] site_packages_name = 'google/protobuf/pyext' + setup_extra_args = ['--cpp_implementation'] + built_libraries = {'libprotobuf.so': 'src/.libs'} protoc_dir = None def prebuild_arch(self, arch): - super(ProtobufCppRecipe, self).prebuild_arch(arch) + super().prebuild_arch(arch) patch_mark = join(self.get_build_dir(arch.arch), '.protobuf-patched') if self.ctx.python_recipe.name == 'python3' and not exists(patch_mark): @@ -65,42 +71,39 @@ class ProtobufCppRecipe(PythonRecipe): def build_arch(self, arch): env = self.get_recipe_env(arch) - # Build libproto.a + # Build libproto.so with current_directory(self.get_build_dir(arch.arch)): - env['HOSTARCH'] = 'arm-eabi' - env['BUILDARCH'] = shprint(sh.gcc, '-dumpmachine').stdout.decode('utf-8').split('\n')[0] + build_arch = ( + shprint(sh.gcc, '-dumpmachine') + .stdout.decode('utf-8') + .split('\n')[0] + ) if not exists('configure'): shprint(sh.Command('./autogen.sh'), _env=env) shprint(sh.Command('./configure'), - '--host={}'.format(env['HOSTARCH']), + '--build={}'.format(build_arch), + '--host={}'.format(arch.command_prefix), + '--target={}'.format(arch.command_prefix), + '--disable-static', '--enable-shared', _env=env) with current_directory(join(self.get_build_dir(arch.arch), 'src')): shprint(sh.make, 'libprotobuf.la', '-j'+str(cpu_count()), _env=env) - shprint(sh.cp, '.libs/libprotobuf.a', join(self.ctx.get_libs_dir(arch.arch), 'libprotobuf.a')) - # Copy stl library - shutil.copyfile( - self.ctx.ndk_dir + '/sources/cxx-stl/gnu-libstdc++/' + self.ctx.toolchain_version + '/libs/' + arch.arch + '/libgnustl_shared.so', - join(self.ctx.get_libs_dir(arch.arch), 'libgnustl_shared.so')) + self.install_python_package(arch) + def build_compiled_components(self, arch): # Build python bindings and _message.so + env = self.get_recipe_env(arch) with current_directory(join(self.get_build_dir(arch.arch), 'python')): hostpython = sh.Command(self.hostpython_location) shprint(hostpython, 'setup.py', 'build_ext', - '--cpp_implementation', _env=env) - - # Install python bindings - self.install_python_package(arch) - - # Create __init__.py which is missing (cf. https://github.com/protocolbuffers/protobuf/issues/1296 - # and https://stackoverflow.com/questions/13862562/google-protocol-buffers-not-found-when-trying-to-freeze-python-app) - open(join(self.ctx.get_site_packages_dir(), 'google', '__init__.py'), 'a').close() + _env=env, *self.setup_extra_args) def install_python_package(self, arch): env = self.get_recipe_env(arch) @@ -112,34 +115,27 @@ class ProtobufCppRecipe(PythonRecipe): hpenv = env.copy() shprint(hostpython, 'setup.py', 'install', '-O2', - '--root={}'.format(self.ctx.get_python_install_dir()), + '--root={}'.format(self.ctx.get_python_install_dir(arch.arch)), '--install-lib=.', - '--cpp_implementation', _env=hpenv, *self.setup_extra_args) + # Create __init__.py which is missing, see also: + # - https://github.com/protocolbuffers/protobuf/issues/1296 + # - https://stackoverflow.com/questions/13862562/ + # google-protocol-buffers-not-found-when-trying-to-freeze-python-app + open( + join(self.ctx.get_site_packages_dir(arch), 'google', '__init__.py'), + 'a', + ).close() + def get_recipe_env(self, arch): - env = super(ProtobufCppRecipe, self).get_recipe_env(arch) + env = super().get_recipe_env(arch) if self.protoc_dir is not None: # we need protoc with binary for host platform env['PROTOC'] = join(self.protoc_dir, 'bin', 'protoc') env['TARGET_OS'] = 'OS_ANDROID_CROSSCOMPILE' - env['CFLAGS'] += ( - ' -I' + self.ctx.ndk_dir + '/platforms/android-' + - str(self.ctx.android_api) + - '/arch-' + arch.arch.replace('eabi', '') + '/usr/include' + - ' -I' + self.ctx.ndk_dir + '/sources/cxx-stl/gnu-libstdc++/' + - self.ctx.toolchain_version + '/include' + - ' -I' + self.ctx.ndk_dir + '/sources/cxx-stl/gnu-libstdc++/' + - self.ctx.toolchain_version + '/libs/' + arch.arch + '/include') - env['CFLAGS'] += ' -std=gnu++11' - env['CXXFLAGS'] = env['CFLAGS'] - env['CXXFLAGS'] += ' -frtti' - env['CXXFLAGS'] += ' -fexceptions' - env['LDFLAGS'] += ( - ' -lgnustl_shared -landroid -llog' + - ' -L' + self.ctx.ndk_dir + - '/sources/cxx-stl/gnu-libstdc++/' + self.ctx.toolchain_version + - '/libs/' + arch.arch) + env['CXXFLAGS'] += ' -std=c++11' + env['LDFLAGS'] += ' -lm -landroid -llog' return env diff --git a/p4a/pythonforandroid/recipes/psycopg2/__init__.py b/p4a/pythonforandroid/recipes/psycopg2/__init__.py index aaf5a33..1d946e7 100644 --- a/p4a/pythonforandroid/recipes/psycopg2/__init__.py +++ b/p4a/pythonforandroid/recipes/psycopg2/__init__.py @@ -10,9 +10,9 @@ class Psycopg2Recipe(PythonRecipe): `ANDROID_API` (`ndk-api`) >= 26, see: https://github.com/kivy/python-for-android/issues/1711#issuecomment-465747557 """ - version = 'latest' - url = 'http://initd.org/psycopg/tarballs/psycopg2-{version}.tar.gz' - depends = ['libpq'] + version = '2.8.5' + url = 'https://pypi.python.org/packages/source/p/psycopg2/psycopg2-{version}.tar.gz' + depends = ['libpq', 'setuptools'] site_packages_name = 'psycopg2' call_hostpython_via_targetpython = False @@ -26,7 +26,7 @@ class Psycopg2Recipe(PythonRecipe): 'setup.py') def get_recipe_env(self, arch): - env = super(Psycopg2Recipe, self).get_recipe_env(arch) + env = super().get_recipe_env(arch) env['LDFLAGS'] = "{} -L{}".format(env['LDFLAGS'], self.ctx.get_libs_dir(arch.arch)) env['EXTRA_CFLAGS'] = "--host linux-armv" return env @@ -43,7 +43,7 @@ class Psycopg2Recipe(PythonRecipe): shprint(hostpython, 'setup.py', 'build_ext', '--static-libpq', _env=env) shprint(hostpython, 'setup.py', 'install', '-O2', - '--root={}'.format(self.ctx.get_python_install_dir()), + '--root={}'.format(self.ctx.get_python_install_dir(arch.arch)), '--install-lib=.', _env=env) diff --git a/p4a/pythonforandroid/recipes/py3dns/__init__.py b/p4a/pythonforandroid/recipes/py3dns/__init__.py new file mode 100644 index 0000000..bccb39f --- /dev/null +++ b/p4a/pythonforandroid/recipes/py3dns/__init__.py @@ -0,0 +1,13 @@ +from pythonforandroid.recipe import PythonRecipe + + +class Py3DNSRecipe(PythonRecipe): + site_packages_name = 'DNS' + version = '3.2.1' + url = 'https://launchpad.net/py3dns/trunk/{version}/+download/py3dns-{version}.tar.gz' + depends = ['setuptools'] + patches = ['patches/android.patch'] + call_hostpython_via_targetpython = False + + +recipe = Py3DNSRecipe() diff --git a/p4a/pythonforandroid/recipes/py3dns/patches/android.patch b/p4a/pythonforandroid/recipes/py3dns/patches/android.patch new file mode 100644 index 0000000..f9ab78f --- /dev/null +++ b/p4a/pythonforandroid/recipes/py3dns/patches/android.patch @@ -0,0 +1,27 @@ +diff --git a/DNS/Base.py b/DNS/Base.py +index 34a6da7..a558889 100644 +--- a/DNS/Base.py ++++ b/DNS/Base.py +@@ -15,6 +15,7 @@ import socket, string, types, time, select + import errno + from . import Type,Class,Opcode + import asyncore ++import os + # + # This random generator is used for transaction ids and port selection. This + # is important to prevent spurious results from lost packets, and malicious +@@ -50,8 +51,12 @@ defaults= { 'protocol':'udp', 'port':53, 'opcode':Opcode.QUERY, + + def ParseResolvConf(resolv_path="/etc/resolv.conf"): + "parses the /etc/resolv.conf file and sets defaults for name servers" +- with open(resolv_path, 'r') as stream: +- return ParseResolvConfFromIterable(stream) ++ if os.path.exists(resolv_path): ++ with open(resolv_path, 'r') as stream: ++ return ParseResolvConfFromIterable(stream) ++ else: ++ defaults['server'].append('127.0.0.1') ++ return + + def ParseResolvConfFromIterable(lines): + "parses a resolv.conf formatted stream and sets defaults for name servers" diff --git a/p4a/pythonforandroid/recipes/pybind11/__init__.py b/p4a/pythonforandroid/recipes/pybind11/__init__.py new file mode 100644 index 0000000..affff81 --- /dev/null +++ b/p4a/pythonforandroid/recipes/pybind11/__init__.py @@ -0,0 +1,17 @@ +from pythonforandroid.recipe import PythonRecipe +from os.path import join + + +class Pybind11Recipe(PythonRecipe): + + version = '2.9.0' + url = 'https://github.com/pybind/pybind11/archive/refs/tags/v{version}.zip' + depends = ['setuptools'] + call_hostpython_via_targetpython = False + install_in_hostpython = True + + def get_include_dir(self, arch): + return join(self.get_build_dir(arch.arch), 'include') + + +recipe = Pybind11Recipe() diff --git a/p4a/pythonforandroid/recipes/pycrypto/__init__.py b/p4a/pythonforandroid/recipes/pycrypto/__init__.py index e8bfab2..f142d37 100644 --- a/p4a/pythonforandroid/recipes/pycrypto/__init__.py +++ b/p4a/pythonforandroid/recipes/pycrypto/__init__.py @@ -10,13 +10,13 @@ import sh class PyCryptoRecipe(CompiledComponentsPythonRecipe): version = '2.7a1' url = 'https://github.com/dlitz/pycrypto/archive/v{version}.zip' - depends = ['openssl', ('python2', 'python3')] + depends = ['openssl', 'python3'] site_packages_name = 'Crypto' call_hostpython_via_targetpython = False patches = ['add_length.patch'] - def get_recipe_env(self, arch=None, clang=True): - env = super(PyCryptoRecipe, self).get_recipe_env(arch) + def get_recipe_env(self, arch=None): + env = super().get_recipe_env(arch) openssl_recipe = Recipe.get_recipe('openssl', self.ctx) env['CC'] = env['CC'] + openssl_recipe.include_flags(arch) @@ -36,9 +36,9 @@ class PyCryptoRecipe(CompiledComponentsPythonRecipe): with current_directory(self.get_build_dir(arch.arch)): configure = sh.Command('./configure') shprint(configure, '--host=arm-eabi', - '--prefix={}'.format(self.ctx.get_python_install_dir()), + '--prefix={}'.format(self.ctx.get_python_install_dir(arch.arch)), '--enable-shared', _env=env) - super(PyCryptoRecipe, self).build_compiled_components(arch) + super().build_compiled_components(arch) recipe = PyCryptoRecipe() diff --git a/p4a/pythonforandroid/recipes/pydantic/__init__.py b/p4a/pythonforandroid/recipes/pydantic/__init__.py new file mode 100644 index 0000000..eb4c504 --- /dev/null +++ b/p4a/pythonforandroid/recipes/pydantic/__init__.py @@ -0,0 +1,12 @@ +from pythonforandroid.recipe import PythonRecipe + + +class PydanticRecipe(PythonRecipe): + version = '1.8.2' + url = 'https://github.com/samuelcolvin/pydantic/archive/refs/tags/v{version}.zip' + depends = ['setuptools'] + python_depends = ['Cython', 'devtools', 'email-validator', 'dataclasses', 'typing-extensions', 'python-dotenv'] + call_hostpython_via_targetpython = False + + +recipe = PydanticRecipe() diff --git a/p4a/pythonforandroid/recipes/pygame/__init__.py b/p4a/pythonforandroid/recipes/pygame/__init__.py index 981fa44..99124de 100644 --- a/p4a/pythonforandroid/recipes/pygame/__init__.py +++ b/p4a/pythonforandroid/recipes/pygame/__init__.py @@ -1,74 +1,67 @@ - -from pythonforandroid.recipe import Recipe -from pythonforandroid.util import current_directory, ensure_dir -from pythonforandroid.logger import debug, shprint, info, warning from os.path import join -import sh -import glob + +from pythonforandroid.recipe import CompiledComponentsPythonRecipe +from pythonforandroid.toolchain import current_directory -class PygameRecipe(Recipe): +class Pygame2Recipe(CompiledComponentsPythonRecipe): + """ + Recipe to build apps based on SDL2-based pygame. + + .. warning:: Some pygame functionality is still untested, and some + dependencies like freetype, postmidi and libjpeg are currently + not part of the build. It's usable, but not complete. + """ + + version = '2.1.0' + url = 'https://github.com/pygame/pygame/archive/{version}.tar.gz' + + site_packages_name = 'pygame' name = 'pygame' - version = '1.9.1' - url = 'http://pygame.org/ftp/pygame-{version}release.tar.gz' - depends = ['python2legacy', 'sdl'] - conflicts = ['sdl2'] - - patches = ['patches/fix-surface-access.patch', - 'patches/fix-array-surface.patch', - 'patches/fix-sdl-spam-log.patch'] - - def get_recipe_env(self, arch=None): - env = super(PygameRecipe, self).get_recipe_env(arch) - env['LDFLAGS'] = env['LDFLAGS'] + ' -L{}'.format( - self.ctx.get_libs_dir(arch.arch)) - env['LDSHARED'] = join(self.ctx.root_dir, 'tools', 'liblink') - env['LIBLINK'] = 'NOTNONE' - env['NDKPLATFORM'] = self.ctx.ndk_platform - - # Every recipe uses its own liblink path, object files are collected and biglinked later - liblink_path = join(self.get_build_container_dir(arch.arch), 'objects_{}'.format(self.name)) - env['LIBLINK_PATH'] = liblink_path - ensure_dir(liblink_path) - return env + depends = ['sdl2', 'sdl2_image', 'sdl2_mixer', 'sdl2_ttf', 'setuptools', 'jpeg', 'png'] + call_hostpython_via_targetpython = False # Due to setuptools + install_in_hostpython = False def prebuild_arch(self, arch): - if self.is_patched(arch): - return - shprint(sh.cp, join(self.get_recipe_dir(), 'Setup'), - join(self.get_build_dir(arch.arch), 'Setup')) - - def build_arch(self, arch): - env = self.get_recipe_env(arch) - - env['CFLAGS'] = env['CFLAGS'] + ' -I{jni_path}/png -I{jni_path}/jpeg'.format( - jni_path=join(self.ctx.bootstrap.build_dir, 'jni')) - env['CFLAGS'] = env['CFLAGS'] + ' -I{jni_path}/sdl/include -I{jni_path}/sdl_mixer'.format( - jni_path=join(self.ctx.bootstrap.build_dir, 'jni')) - env['CFLAGS'] = env['CFLAGS'] + ' -I{jni_path}/sdl_ttf -I{jni_path}/sdl_image'.format( - jni_path=join(self.ctx.bootstrap.build_dir, 'jni')) - debug('pygame cflags', env['CFLAGS']) - - env['LDFLAGS'] = env['LDFLAGS'] + ' -L{libs_path} -L{src_path}/obj/local/{arch} -lm -lz'.format( - libs_path=self.ctx.libs_dir, src_path=self.ctx.bootstrap.build_dir, arch=env['ARCH']) - - env['LDSHARED'] = join(self.ctx.root_dir, 'tools', 'liblink') - + super().prebuild_arch(arch) with current_directory(self.get_build_dir(arch.arch)): - info('hostpython is ' + self.ctx.hostpython) - hostpython = sh.Command(self.ctx.hostpython) - shprint(hostpython, 'setup.py', 'install', '-O2', _env=env, - _tail=10, _critical=True) + setup_template = open(join("buildconfig", "Setup.Android.SDL2.in")).read() + env = self.get_recipe_env(arch) + env['ANDROID_ROOT'] = join(self.ctx.ndk.sysroot, 'usr') - info('strip is ' + env['STRIP']) - build_lib = glob.glob('./build/lib*') - assert len(build_lib) == 1 - print('stripping pygame') - shprint(sh.find, build_lib[0], '-name', '*.o', '-exec', - env['STRIP'], '{}', ';') + png = self.get_recipe('png', self.ctx) + png_lib_dir = join(png.get_build_dir(arch.arch), '.libs') + png_inc_dir = png.get_build_dir(arch) - warning('Should remove pygame tests etc. here, but skipping for now') + jpeg = self.get_recipe('jpeg', self.ctx) + jpeg_inc_dir = jpeg_lib_dir = jpeg.get_build_dir(arch.arch) + + sdl_mixer_includes = "" + sdl2_mixer_recipe = self.get_recipe('sdl2_mixer', self.ctx) + for include_dir in sdl2_mixer_recipe.get_include_dirs(arch): + sdl_mixer_includes += f"-I{include_dir} " + + setup_file = setup_template.format( + sdl_includes=( + " -I" + join(self.ctx.bootstrap.build_dir, 'jni', 'SDL', 'include') + + " -L" + join(self.ctx.bootstrap.build_dir, "libs", str(arch)) + + " -L" + png_lib_dir + " -L" + jpeg_lib_dir + " -L" + arch.ndk_lib_dir_versioned), + sdl_ttf_includes="-I"+join(self.ctx.bootstrap.build_dir, 'jni', 'SDL2_ttf'), + sdl_image_includes="-I"+join(self.ctx.bootstrap.build_dir, 'jni', 'SDL2_image'), + sdl_mixer_includes=sdl_mixer_includes, + jpeg_includes="-I"+jpeg_inc_dir, + png_includes="-I"+png_inc_dir, + freetype_includes="" + ) + open("Setup", "w").write(setup_file) + + def get_recipe_env(self, arch): + env = super().get_recipe_env(arch) + env['USE_SDL2'] = '1' + env["PYGAME_CROSS_COMPILE"] = "TRUE" + env["PYGAME_ANDROID"] = "TRUE" + return env -recipe = PygameRecipe() +recipe = Pygame2Recipe() diff --git a/p4a/pythonforandroid/recipes/pyicu/__init__.py b/p4a/pythonforandroid/recipes/pyicu/__init__.py index 98ec7b7..d1e3749 100644 --- a/p4a/pythonforandroid/recipes/pyicu/__init__.py +++ b/p4a/pythonforandroid/recipes/pyicu/__init__.py @@ -1,58 +1,29 @@ -import os -import sh from os.path import join -from pythonforandroid.recipe import CompiledComponentsPythonRecipe -from pythonforandroid.toolchain import shprint, info +from pythonforandroid.recipe import CppCompiledComponentsPythonRecipe -class PyICURecipe(CompiledComponentsPythonRecipe): +class PyICURecipe(CppCompiledComponentsPythonRecipe): version = '1.9.2' - url = 'https://pypi.python.org/packages/source/P/PyICU/PyICU-{version}.tar.gz' + url = ('https://pypi.python.org/packages/source/P/PyICU/' + 'PyICU-{version}.tar.gz') depends = ["icu"] - patches = ['locale.patch', 'icu.patch'] + patches = ['locale.patch'] def get_recipe_env(self, arch): - env = super(PyICURecipe, self).get_recipe_env(arch) + env = super().get_recipe_env(arch) icu_include = join( - self.ctx.get_python_install_dir(), "include", "icu") + self.ctx.get_python_install_dir(arch.arch), "include", "icu") - env["CC"] += " -I"+icu_include + icu_recipe = self.get_recipe('icu', self.ctx) + icu_link_libs = icu_recipe.built_libraries.keys() + env["PYICU_LIBRARIES"] = ":".join(lib[3:-3] for lib in icu_link_libs) + env["CPPFLAGS"] += " -I" + icu_include + env["LDFLAGS"] += " -L" + join( + icu_recipe.get_build_dir(arch.arch), "icu_build", "lib" + ) - include = ( - " -I{ndk}/sources/cxx-stl/gnu-libstdc++/{version}/include/" - " -I{ndk}/sources/cxx-stl/gnu-libstdc++/{version}/libs/" - "{arch}/include") - include = include.format(ndk=self.ctx.ndk_dir, - version=env["TOOLCHAIN_VERSION"], - arch=arch.arch) - env["CC"] += include - - lib = "{ndk}/sources/cxx-stl/gnu-libstdc++/{version}/libs/{arch}" - lib = lib.format(ndk=self.ctx.ndk_dir, - version=env["TOOLCHAIN_VERSION"], - arch=arch.arch) - env["LDFLAGS"] += " -lgnustl_shared -L"+lib - - build_dir = self.get_build_dir(arch.arch) - env["LDFLAGS"] += " -L"+build_dir return env - def build_arch(self, arch): - build_dir = self.get_build_dir(arch.arch) - - info("create links to icu libs") - lib_dir = join(self.ctx.get_python_install_dir(), "lib") - icu_libs = [f for f in os.listdir(lib_dir) if f.startswith("libicu")] - - for l in icu_libs: - raw = l.rsplit(".", 1)[0] - try: - shprint(sh.ln, "-s", join(lib_dir, l), join(build_dir, raw)) - except Exception: - pass - - super(PyICURecipe, self).build_arch(arch) - recipe = PyICURecipe() diff --git a/p4a/pythonforandroid/recipes/pyjnius/__init__.py b/p4a/pythonforandroid/recipes/pyjnius/__init__.py index 8aeac6c..58103e2 100644 --- a/p4a/pythonforandroid/recipes/pyjnius/__init__.py +++ b/p4a/pythonforandroid/recipes/pyjnius/__init__.py @@ -6,19 +6,22 @@ from os.path import join class PyjniusRecipe(CythonRecipe): - # "6553ad4" is one commit after last release (1.2.0) - # it fixes method resolution, required for resolving requestPermissions() - version = '6553ad4' + version = '1.4.2' url = 'https://github.com/kivy/pyjnius/archive/{version}.zip' name = 'pyjnius' - depends = [('genericndkbuild', 'sdl2', 'sdl'), 'six'] + depends = [('genericndkbuild', 'sdl2'), 'six'] site_packages_name = 'jnius' - patches = [('sdl2_jnienv_getter.patch', will_build('sdl2')), - ('genericndkbuild_jnienv_getter.patch', will_build('genericndkbuild'))] + patches = [('genericndkbuild_jnienv_getter.patch', will_build('genericndkbuild'))] + + def get_recipe_env(self, arch): + env = super().get_recipe_env(arch) + # NDKPLATFORM is our switch for detecting Android platform, so can't be None + env['NDKPLATFORM'] = "NOTNONE" + return env def postbuild_arch(self, arch): - super(PyjniusRecipe, self).postbuild_arch(arch) + super().postbuild_arch(arch) info('Copying pyjnius java class to classes build dir') with current_directory(self.get_build_dir(arch.arch)): shprint(sh.cp, '-a', join('jnius', 'src', 'org'), self.ctx.javaclass_dir) diff --git a/p4a/pythonforandroid/recipes/pyjnius/genericndkbuild_jnienv_getter.patch b/p4a/pythonforandroid/recipes/pyjnius/genericndkbuild_jnienv_getter.patch index ff26994..fcd5387 100644 --- a/p4a/pythonforandroid/recipes/pyjnius/genericndkbuild_jnienv_getter.patch +++ b/p4a/pythonforandroid/recipes/pyjnius/genericndkbuild_jnienv_getter.patch @@ -1,25 +1,24 @@ -diff --git a/jnius/jnius_jvm_android.pxi b/jnius/jnius_jvm_android.pxi -index ac89fec..71daa43 100644 ---- a/jnius/jnius_jvm_android.pxi -+++ b/jnius/jnius_jvm_android.pxi -@@ -1,5 +1,5 @@ +diff -Naur pyjnius.orig/jnius/env.py pyjnius/jnius/env.py +--- pyjnius.orig/jnius/env.py 2022-05-28 11:16:02.000000000 +0200 ++++ pyjnius/jnius/env.py 2022-05-28 11:18:30.000000000 +0200 +@@ -268,7 +268,7 @@ + + class AndroidJavaLocation(UnixJavaLocation): + def get_libraries(self): +- return ['SDL2', 'log'] ++ return ['main', 'log'] + + def get_include_dirs(self): + # When cross-compiling for Android, we should not use the include dirs +diff -Naur pyjnius.orig/jnius/jnius_jvm_android.pxi pyjnius/jnius/jnius_jvm_android.pxi +--- pyjnius.orig/jnius/jnius_jvm_android.pxi 2022-05-28 11:16:02.000000000 +0200 ++++ pyjnius/jnius/jnius_jvm_android.pxi 2022-05-28 11:17:17.000000000 +0200 +@@ -1,6 +1,6 @@ # on android, rely on SDL to get the JNI env --cdef extern JNIEnv *SDL_ANDROID_GetJNIEnv() +-cdef extern JNIEnv *SDL_AndroidGetJNIEnv() +cdef extern JNIEnv *WebView_AndroidGetJNIEnv() - cdef JNIEnv *get_platform_jnienv(): -- return SDL_ANDROID_GetJNIEnv() -+ return WebView_AndroidGetJNIEnv() -diff --git a/setup.py b/setup.py -index 740510f..0c8e55f 100644 ---- a/setup.py -+++ b/setup.py -@@ -53,7 +53,7 @@ except ImportError: - if PLATFORM == 'android': - # for android, we use SDL... -- LIBRARIES = ['sdl', 'log'] -+ LIBRARIES = ['main', 'log'] - LIBRARY_DIRS = ['libs/' + getenv('ARCH')] - elif PLATFORM == 'darwin': - import subprocess + cdef JNIEnv *get_platform_jnienv() except NULL: +- return SDL_AndroidGetJNIEnv() ++ return WebView_AndroidGetJNIEnv() diff --git a/p4a/pythonforandroid/recipes/pyleveldb/__init__.py b/p4a/pythonforandroid/recipes/pyleveldb/__init__.py index 6147709..54dfb64 100644 --- a/p4a/pythonforandroid/recipes/pyleveldb/__init__.py +++ b/p4a/pythonforandroid/recipes/pyleveldb/__init__.py @@ -2,12 +2,26 @@ from pythonforandroid.recipe import CppCompiledComponentsPythonRecipe class PyLevelDBRecipe(CppCompiledComponentsPythonRecipe): - version = '0.193' - url = 'https://pypi.python.org/packages/source/l/leveldb/leveldb-{version}.tar.gz' - depends = ['snappy', 'leveldb', ('hostpython2', 'hostpython3'), 'setuptools'] + version = '0.194' + url = ('https://pypi.python.org/packages/source/l/leveldb/' + 'leveldb-{version}.tar.gz') + depends = ['snappy', 'leveldb', 'setuptools'] patches = ['bindings-only.patch'] - call_hostpython_via_targetpython = False # Due to setuptools site_packages_name = 'leveldb' + def get_recipe_env(self, arch): + env = super().get_recipe_env(arch) + + snappy_recipe = self.get_recipe('snappy', self.ctx) + leveldb_recipe = self.get_recipe('leveldb', self.ctx) + + env["LDFLAGS"] += " -L" + snappy_recipe.get_build_dir(arch.arch) + env["LDFLAGS"] += " -L" + leveldb_recipe.get_build_dir(arch.arch) + + env["SNAPPY_BUILD_PATH"] = snappy_recipe.get_build_dir(arch.arch) + env["LEVELDB_BUILD_PATH"] = leveldb_recipe.get_build_dir(arch.arch) + + return env + recipe = PyLevelDBRecipe() diff --git a/p4a/pythonforandroid/recipes/pyleveldb/bindings-only.patch b/p4a/pythonforandroid/recipes/pyleveldb/bindings-only.patch index 2899f4e..9f7027a 100644 --- a/p4a/pythonforandroid/recipes/pyleveldb/bindings-only.patch +++ b/p4a/pythonforandroid/recipes/pyleveldb/bindings-only.patch @@ -1,103 +1,119 @@ ---- pyleveldb/setup.py 2014-03-28 02:51:24.000000000 +0100 -+++ pyleveldb-patch/setup.py 2016-03-02 11:52:13.780678586 +0100 -@@ -7,41 +7,22 @@ - # - # See LICENSE for details. - --import glob --import platform --import sys -- +This patch force to only build the python bindings, and to do so, we modify +the setup.py file in oder that finds our compiled libraries (libleveldb.so and +libsnappy.so) +--- leveldb-0.194/setup.py.orig 2016-09-17 02:05:55.000000000 +0200 ++++ leveldb-0.194/setup.py 2019-02-26 16:57:40.997435911 +0100 +@@ -11,44 +11,25 @@ import platform + import sys + from setuptools import setup, Extension - --system,node,release,version,machine,processor = platform.uname() ++from os import environ + + system, node, release, version, machine, processor = platform.uname() -common_flags = [ +- '-I./leveldb/include', +- '-I./leveldb', +- '-I./snappy', +extra_compile_args = [ - '-I./leveldb/include', - '-I./leveldb', -- '-I./snappy', -+ '-I./leveldb/snappy', - '-I.', -- '-fno-builtin-memcmp', - '-O2', - '-fPIC', - '-DNDEBUG', - '-DSNAPPY', --] -- ++ '-I{}/include'.format(environ.get('LEVELDB_BUILD_PATH')), ++ '-I{}'.format(environ.get('LEVELDB_BUILD_PATH')), ++ '-I{}'.format(environ.get('SNAPPY_BUILD_PATH')), ++ '-I.', + '-I.', +- '-fno-builtin-memcmp', + '-O2', + '-fPIC', + '-DNDEBUG', + '-DSNAPPY', ++ '-pthread', ++ '-Wall', ++ '-D_REENTRANT', ++ '-DOS_ANDROID', + ] + -if system == 'Darwin': -- extra_compile_args = common_flags + [ -- '-DOS_MACOSX', -+ '-Wall', - '-DLEVELDB_PLATFORM_POSIX', -- '-Wno-error=unused-command-line-argument-hard-error-in-future', -- ] +- extra_compile_args = common_flags + [ +- '-DOS_MACOSX', +- '-DLEVELDB_PLATFORM_POSIX', +- '-Wno-error=unused-command-line-argument-hard-error-in-future', +- ] -elif system == 'Linux': +- extra_compile_args = common_flags + [ +- '-pthread', +- '-Wall', +- '-DOS_LINUX', +- '-DLEVELDB_PLATFORM_POSIX', +- ] +-elif system == 'SunOS': - extra_compile_args = common_flags + [ - '-pthread', -- '-Wall', -- '-DOS_LINUX', +- '-Wall', +- '-DOS_SOLARIS', - '-DLEVELDB_PLATFORM_POSIX', - ] -else: -- print >>sys.stderr, "Don't know how to compile leveldb for %s!" % system -- sys.exit(0) -+ '-D_REENTRANT', -+ '-DOS_ANDROID', -+] - +- sys.stderr.write("Don't know how to compile leveldb for %s!\n" % system) +- sys.exit(1) +- setup( - name = 'leveldb', -@@ -75,52 +56,6 @@ - ext_modules = [ - Extension('leveldb', - sources = [ -- # snappy -- './snappy/snappy.cc', -- './snappy/snappy-stubs-internal.cc', -- './snappy/snappy-sinksource.cc', -- './snappy/snappy-c.cc', + name = 'leveldb', + version = '0.194', +@@ -81,57 +62,11 @@ setup( + ext_modules = [ + Extension('leveldb', + sources = [ +- # snappy +- './snappy/snappy.cc', +- './snappy/snappy-stubs-internal.cc', +- './snappy/snappy-sinksource.cc', +- './snappy/snappy-c.cc', - -- #leveldb -- 'leveldb/db/builder.cc', -- 'leveldb/db/c.cc', -- 'leveldb/db/db_impl.cc', -- 'leveldb/db/db_iter.cc', -- 'leveldb/db/dbformat.cc', -- 'leveldb/db/filename.cc', -- 'leveldb/db/log_reader.cc', -- 'leveldb/db/log_writer.cc', -- 'leveldb/db/memtable.cc', -- 'leveldb/db/repair.cc', -- 'leveldb/db/table_cache.cc', -- 'leveldb/db/version_edit.cc', -- 'leveldb/db/version_set.cc', -- 'leveldb/db/write_batch.cc', -- 'leveldb/table/block.cc', -- 'leveldb/table/block_builder.cc', -- 'leveldb/table/filter_block.cc', -- 'leveldb/table/format.cc', -- 'leveldb/table/iterator.cc', -- 'leveldb/table/merger.cc', -- 'leveldb/table/table.cc', -- 'leveldb/table/table_builder.cc', -- 'leveldb/table/two_level_iterator.cc', -- 'leveldb/util/arena.cc', -- 'leveldb/util/bloom.cc', -- 'leveldb/util/cache.cc', -- 'leveldb/util/coding.cc', -- 'leveldb/util/comparator.cc', -- 'leveldb/util/crc32c.cc', -- 'leveldb/util/env.cc', -- 'leveldb/util/env_posix.cc', -- 'leveldb/util/filter_policy.cc', -- 'leveldb/util/hash.cc', -- 'leveldb/util/histogram.cc', -- 'leveldb/util/logging.cc', -- 'leveldb/util/options.cc', -- 'leveldb/util/status.cc', -- 'leveldb/port/port_posix.cc', +- #leveldb +- 'leveldb/db/builder.cc', +- 'leveldb/db/c.cc', +- 'leveldb/db/db_impl.cc', +- 'leveldb/db/db_iter.cc', +- 'leveldb/db/dbformat.cc', +- 'leveldb/db/filename.cc', +- 'leveldb/db/log_reader.cc', +- 'leveldb/db/log_writer.cc', +- 'leveldb/db/memtable.cc', +- 'leveldb/db/repair.cc', +- 'leveldb/db/table_cache.cc', +- 'leveldb/db/version_edit.cc', +- 'leveldb/db/version_set.cc', +- 'leveldb/db/write_batch.cc', +- 'leveldb/table/block.cc', +- 'leveldb/table/block_builder.cc', +- 'leveldb/table/filter_block.cc', +- 'leveldb/table/format.cc', +- 'leveldb/table/iterator.cc', +- 'leveldb/table/merger.cc', +- 'leveldb/table/table.cc', +- 'leveldb/table/table_builder.cc', +- 'leveldb/table/two_level_iterator.cc', +- 'leveldb/util/arena.cc', +- 'leveldb/util/bloom.cc', +- 'leveldb/util/cache.cc', +- 'leveldb/util/coding.cc', +- 'leveldb/util/comparator.cc', +- 'leveldb/util/crc32c.cc', +- 'leveldb/util/env.cc', +- 'leveldb/util/env_posix.cc', +- 'leveldb/util/filter_policy.cc', +- 'leveldb/util/hash.cc', +- 'leveldb/util/histogram.cc', +- 'leveldb/util/logging.cc', +- 'leveldb/util/options.cc', +- 'leveldb/util/status.cc', +- 'leveldb/port/port_posix.cc', - - # python stuff - 'leveldb_ext.cc', - 'leveldb_object.cc', + # python stuff + 'leveldb_ext.cc', + 'leveldb_object.cc', + ], +- libraries = ['stdc++'], ++ libraries = ['snappy', 'leveldb', 'stdc++', 'c++_shared'], + extra_compile_args = extra_compile_args, + ) + ] diff --git a/p4a/pythonforandroid/recipes/pymunk/__init__.py b/p4a/pythonforandroid/recipes/pymunk/__init__.py index b72b85b..a982098 100644 --- a/p4a/pythonforandroid/recipes/pymunk/__init__.py +++ b/p4a/pythonforandroid/recipes/pymunk/__init__.py @@ -1,21 +1,17 @@ -from os.path import join from pythonforandroid.recipe import CompiledComponentsPythonRecipe class PymunkRecipe(CompiledComponentsPythonRecipe): name = "pymunk" - version = '5.3.2' - url = 'https://pypi.python.org/packages/source/p/pymunk/pymunk-{version}.zip' - depends = ['cffi', 'setuptools'] + version = "6.0.0" + url = "https://pypi.python.org/packages/source/p/pymunk/pymunk-{version}.zip" + depends = ["cffi", "setuptools"] call_hostpython_via_targetpython = False def get_recipe_env(self, arch): - env = super(PymunkRecipe, self).get_recipe_env(arch) - env['PYTHON_ROOT'] = self.ctx.get_python_install_dir() - env['LDFLAGS'] += " -shared -llog" - env['LDFLAGS'] += ' -L{}'.format(join(self.ctx.ndk_platform, 'usr', 'lib')) - env['LDFLAGS'] += " --sysroot={}".format(self.ctx.ndk_platform) - env['LIBS'] = env.get('LIBS', '') + ' -landroid' + env = super().get_recipe_env(arch) + env["LDFLAGS"] += " -llog" # Used by Chipmunk cpMessage + env["LDFLAGS"] += " -lm" # For older versions of Android return env diff --git a/p4a/pythonforandroid/recipes/pynacl/__init__.py b/p4a/pythonforandroid/recipes/pynacl/__init__.py index eb9ca2d..0ab9352 100644 --- a/p4a/pythonforandroid/recipes/pynacl/__init__.py +++ b/p4a/pythonforandroid/recipes/pynacl/__init__.py @@ -7,11 +7,11 @@ class PyNaCLRecipe(CompiledComponentsPythonRecipe): version = '1.3.0' url = 'https://pypi.python.org/packages/source/P/PyNaCl/PyNaCl-{version}.tar.gz' - depends = [('hostpython2', 'hostpython3'), 'six', 'setuptools', 'cffi', 'libsodium'] + depends = ['hostpython3', 'six', 'setuptools', 'cffi', 'libsodium'] call_hostpython_via_targetpython = False def get_recipe_env(self, arch): - env = super(PyNaCLRecipe, self).get_recipe_env(arch) + env = super().get_recipe_env(arch) env['SODIUM_INSTALL'] = 'system' libsodium_build_dir = self.get_recipe( diff --git a/p4a/pythonforandroid/recipes/pyproj/__init__.py b/p4a/pythonforandroid/recipes/pyproj/__init__.py index 71b272d..0c47b29 100644 --- a/p4a/pythonforandroid/recipes/pyproj/__init__.py +++ b/p4a/pythonforandroid/recipes/pyproj/__init__.py @@ -2,8 +2,8 @@ from pythonforandroid.recipe import CythonRecipe class PyProjRecipe(CythonRecipe): - version = '1.9.5.1' - url = 'https://github.com/jswhit/pyproj/archive/master.zip' + version = '1.9.6' + url = 'https://github.com/pyproj4/pyproj/archive/v{version}rel.zip' depends = ['setuptools'] call_hostpython_via_targetpython = False diff --git a/p4a/pythonforandroid/recipes/pysdl2/__init__.py b/p4a/pythonforandroid/recipes/pysdl2/__init__.py index e0df9dc..b1dc9cb 100644 --- a/p4a/pythonforandroid/recipes/pysdl2/__init__.py +++ b/p4a/pythonforandroid/recipes/pysdl2/__init__.py @@ -1,10 +1,9 @@ - from pythonforandroid.recipe import PythonRecipe class PySDL2Recipe(PythonRecipe): - version = '0.9.3' - url = 'https://bitbucket.org/marcusva/py-sdl2/downloads/PySDL2-{version}.tar.gz' + version = '0.9.6' + url = 'https://files.pythonhosted.org/packages/source/P/PySDL2/PySDL2-{version}.tar.gz' depends = ['sdl2'] diff --git a/p4a/pythonforandroid/recipes/pysha3/__init__.py b/p4a/pythonforandroid/recipes/pysha3/__init__.py index 35cfff8..af38946 100644 --- a/p4a/pythonforandroid/recipes/pysha3/__init__.py +++ b/p4a/pythonforandroid/recipes/pysha3/__init__.py @@ -10,19 +10,14 @@ class Pysha3Recipe(PythonRecipe): call_hostpython_via_targetpython = False def get_recipe_env(self, arch=None, with_flags_in_cc=True): - env = super(Pysha3Recipe, self).get_recipe_env(arch, with_flags_in_cc) + env = super().get_recipe_env(arch, with_flags_in_cc) # CFLAGS may only be used to specify C compiler flags, for macro definitions use CPPFLAGS env['CPPFLAGS'] = env['CFLAGS'] - if self.ctx.ndk == 'crystax': - env['CPPFLAGS'] += ' -I{}/sources/python/{}/include/python/'.format( - self.ctx.ndk_dir, self.ctx.python_recipe.version[0:3]) env['CFLAGS'] = '' # LDFLAGS may only be used to specify linker flags, for libraries use LIBS - env['LDFLAGS'] = env['LDFLAGS'].replace('-lm', '').replace('-lcrystax', '') + env['LDFLAGS'] = env['LDFLAGS'].replace('-lm', '') env['LDFLAGS'] += ' -L{}'.format(os.path.join(self.ctx.bootstrap.build_dir, 'libs', arch.arch)) env['LIBS'] = ' -lm' - if self.ctx.ndk == 'crystax': - env['LIBS'] += ' -lcrystax -lpython{}m'.format(self.ctx.python_recipe.version[0:3]) env['LDSHARED'] += env['LIBS'] return env diff --git a/p4a/pythonforandroid/recipes/python3/__init__.py b/p4a/pythonforandroid/recipes/python3/__init__.py index c6d5ba5..5894b3f 100644 --- a/p4a/pythonforandroid/recipes/python3/__init__.py +++ b/p4a/pythonforandroid/recipes/python3/__init__.py @@ -1,54 +1,429 @@ +import glob import sh -from pythonforandroid.python import GuestPythonRecipe -from pythonforandroid.recipe import Recipe +import subprocess + +from multiprocessing import cpu_count +from os import environ, utime +from os.path import dirname, exists, join +from pathlib import Path +import shutil + +from pythonforandroid.logger import info, warning, shprint +from pythonforandroid.patching import version_starts_with +from pythonforandroid.recipe import Recipe, TargetPythonRecipe +from pythonforandroid.util import ( + current_directory, + ensure_dir, + walk_valid_filens, + BuildInterruptingException, +) + +NDK_API_LOWER_THAN_SUPPORTED_MESSAGE = ( + 'Target ndk-api is {ndk_api}, ' + 'but the python3 recipe supports only {min_ndk_api}+' +) -class Python3Recipe(GuestPythonRecipe): +class Python3Recipe(TargetPythonRecipe): ''' - The python3's recipe. + The python3's recipe + ^^^^^^^^^^^^^^^^^^^^ - .. note:: This recipe can be built only against API 21+. Also, in order to - build certain python modules, we need to add some extra recipes to our - build requirements: + The python 3 recipe can be built with some extra python modules, but to do + so, we need some libraries. By default, we ship the python3 recipe with + some common libraries, defined in ``depends``. We also support some optional + libraries, which are less common that the ones defined in ``depends``, so + we added them as optional dependencies (``opt_depends``). - - ctypes: you must add the recipe for ``libffi``. + Below you have a relationship between the python modules and the recipe + libraries:: + + - _ctypes: you must add the recipe for ``libffi``. + - _sqlite3: you must add the recipe for ``sqlite3``. + - _ssl: you must add the recipe for ``openssl``. + - _bz2: you must add the recipe for ``libbz2`` (optional). + - _lzma: you must add the recipe for ``liblzma`` (optional). + + .. note:: This recipe can be built only against API 21+. + + .. versionchanged:: 2019.10.06.post0 + - Refactored from deleted class ``python.GuestPythonRecipe`` into here + - Added optional dependencies: :mod:`~pythonforandroid.recipes.libbz2` + and :mod:`~pythonforandroid.recipes.liblzma` .. versionchanged:: 0.6.0 Refactored into class :class:`~pythonforandroid.python.GuestPythonRecipe` ''' - version = '3.7.1' + version = '3.9.9' url = 'https://www.python.org/ftp/python/{version}/Python-{version}.tgz' name = 'python3' - patches = ["patches/fix-ctypes-util-find-library.patch"] + patches = [ + 'patches/pyconfig_detection.patch', + 'patches/reproducible-buildinfo.diff', - if sh.which('lld') is not None: - patches = patches + ["patches/remove-fix-cortex-a8.patch"] + # Python 3.7.1 + ('patches/py3.7.1_fix-ctypes-util-find-library.patch', version_starts_with("3.7")), + ('patches/py3.7.1_fix-zlib-version.patch', version_starts_with("3.7")), + + # Python 3.8.1 & 3.9.X + ('patches/py3.8.1.patch', version_starts_with("3.8")), + ('patches/py3.8.1.patch', version_starts_with("3.9")) + ] + + if shutil.which('lld') is not None: + patches = patches + [ + ("patches/py3.7.1_fix_cortex_a8.patch", version_starts_with("3.7")), + ("patches/py3.8.1_fix_cortex_a8.patch", version_starts_with("3.8")), + ("patches/py3.8.1_fix_cortex_a8.patch", version_starts_with("3.9")) + ] depends = ['hostpython3', 'sqlite3', 'openssl', 'libffi'] - conflicts = ['python3crystax', 'python2', 'python2legacy'] + # those optional depends allow us to build python compression modules: + # - _bz2.so + # - _lzma.so + opt_depends = ['libbz2', 'liblzma'] + '''The optional libraries which we would like to get our python linked''' configure_args = ( '--host={android_host}', '--build={android_build}', '--enable-shared', - '--disable-ipv6', + '--enable-ipv6', 'ac_cv_file__dev_ptmx=yes', 'ac_cv_file__dev_ptc=no', '--without-ensurepip', 'ac_cv_little_endian_double=yes', '--prefix={prefix}', - '--exec-prefix={exec_prefix}') + '--exec-prefix={exec_prefix}', + '--enable-loadable-sqlite-extensions') + '''The configure arguments needed to build the python recipe. Those are + used in method :meth:`build_arch` (if not overwritten like python3's + recipe does). + ''' + + MIN_NDK_API = 21 + '''Sets the minimal ndk api number needed to use the recipe. + + .. warning:: This recipe can be built only against API 21+, so it means + that any class which inherits from class:`GuestPythonRecipe` will have + this limitation. + ''' + + stdlib_dir_blacklist = { + '__pycache__', + 'test', + 'tests', + 'lib2to3', + 'ensurepip', + 'idlelib', + 'tkinter', + } + '''The directories that we want to omit for our python bundle''' + + stdlib_filen_blacklist = [ + '*.py', + '*.exe', + '*.whl', + ] + '''The file extensions that we want to blacklist for our python bundle''' + + site_packages_dir_blacklist = { + '__pycache__', + 'tests' + } + '''The directories from site packages dir that we don't want to be included + in our python bundle.''' + + site_packages_filen_blacklist = [ + '*.py' + ] + '''The file extensions from site packages dir that we don't want to be + included in our python bundle.''' + + compiled_extension = '.pyc' + '''the default extension for compiled python files. + + .. note:: the default extension for compiled python files has been .pyo for + python 2.x-3.4 but as of Python 3.5, the .pyo filename extension is no + longer used and has been removed in favour of extension .pyc + ''' + + def __init__(self, *args, **kwargs): + self._ctx = None + super().__init__(*args, **kwargs) + + @property + def _libpython(self): + '''return the python's library name (with extension)''' + return 'libpython{link_version}.so'.format( + link_version=self.link_version + ) + + @property + def link_version(self): + '''return the python's library link version e.g. 3.7m, 3.8''' + major, minor = self.major_minor_version_string.split('.') + flags = '' + if major == '3' and int(minor) < 8: + flags += 'm' + return '{major}.{minor}{flags}'.format( + major=major, + minor=minor, + flags=flags + ) + + def include_root(self, arch_name): + return join(self.get_build_dir(arch_name), 'Include') + + def link_root(self, arch_name): + return join(self.get_build_dir(arch_name), 'android-build') + + def should_build(self, arch): + return not Path(self.link_root(arch.arch), self._libpython).is_file() + + def prebuild_arch(self, arch): + super().prebuild_arch(arch) + self.ctx.python_recipe = self + + def get_recipe_env(self, arch=None, with_flags_in_cc=True): + env = super().get_recipe_env(arch) + env['HOSTARCH'] = arch.command_prefix + + env['CC'] = arch.get_clang_exe(with_target=True) + + env['PATH'] = ( + '{hostpython_dir}:{old_path}').format( + hostpython_dir=self.get_recipe( + 'host' + self.name, self.ctx).get_path_to_python(), + old_path=env['PATH']) + + env['CFLAGS'] = ' '.join( + [ + '-fPIC', + '-DANDROID' + ] + ) + + env['LDFLAGS'] = env.get('LDFLAGS', '') + if shutil.which('lld') is not None: + # Note: The -L. is to fix a bug in python 3.7. + # https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=234409 + env['LDFLAGS'] += ' -L. -fuse-ld=lld' + else: + warning('lld not found, linking without it. ' + 'Consider installing lld if linker errors occur.') + + return env def set_libs_flags(self, env, arch): - env = super(Python3Recipe, self).set_libs_flags(env, arch) + '''Takes care to properly link libraries with python depending on our + requirements and the attribute :attr:`opt_depends`. + ''' + def add_flags(include_flags, link_dirs, link_libs): + env['CPPFLAGS'] = env.get('CPPFLAGS', '') + include_flags + env['LDFLAGS'] = env.get('LDFLAGS', '') + link_dirs + env['LIBS'] = env.get('LIBS', '') + link_libs + + if 'sqlite3' in self.ctx.recipe_build_order: + info('Activating flags for sqlite3') + recipe = Recipe.get_recipe('sqlite3', self.ctx) + add_flags(' -I' + recipe.get_build_dir(arch.arch), + ' -L' + recipe.get_lib_dir(arch), ' -lsqlite3') + + if 'libffi' in self.ctx.recipe_build_order: + info('Activating flags for libffi') + recipe = Recipe.get_recipe('libffi', self.ctx) + # In order to force the correct linkage for our libffi library, we + # set the following variable to point where is our libffi.pc file, + # because the python build system uses pkg-config to configure it. + env['PKG_CONFIG_PATH'] = recipe.get_build_dir(arch.arch) + add_flags(' -I' + ' -I'.join(recipe.get_include_dirs(arch)), + ' -L' + join(recipe.get_build_dir(arch.arch), '.libs'), + ' -lffi') + if 'openssl' in self.ctx.recipe_build_order: + info('Activating flags for openssl') recipe = Recipe.get_recipe('openssl', self.ctx) self.configure_args += \ ('--with-openssl=' + recipe.get_build_dir(arch.arch),) + add_flags(recipe.include_flags(arch), + recipe.link_dirs_flags(arch), recipe.link_libs_flags()) + + for library_name in {'libbz2', 'liblzma'}: + if library_name in self.ctx.recipe_build_order: + info(f'Activating flags for {library_name}') + recipe = Recipe.get_recipe(library_name, self.ctx) + add_flags(recipe.get_library_includes(arch), + recipe.get_library_ldflags(arch), + recipe.get_library_libs_flag()) + + # python build system contains hardcoded zlib version which prevents + # the build of zlib module, here we search for android's zlib version + # and sets the right flags, so python can be build with android's zlib + info("Activating flags for android's zlib") + zlib_lib_path = arch.ndk_lib_dir_versioned + zlib_includes = self.ctx.ndk.sysroot_include_dir + zlib_h = join(zlib_includes, 'zlib.h') + try: + with open(zlib_h) as fileh: + zlib_data = fileh.read() + except IOError: + raise BuildInterruptingException( + "Could not determine android's zlib version, no zlib.h ({}) in" + " the NDK dir includes".format(zlib_h) + ) + for line in zlib_data.split('\n'): + if line.startswith('#define ZLIB_VERSION '): + break + else: + raise BuildInterruptingException( + 'Could not parse zlib.h...so we cannot find zlib version,' + 'required by python build,' + ) + env['ZLIB_VERSION'] = line.replace('#define ZLIB_VERSION ', '') + add_flags(' -I' + zlib_includes, ' -L' + zlib_lib_path, ' -lz') + return env + def build_arch(self, arch): + if self.ctx.ndk_api < self.MIN_NDK_API: + raise BuildInterruptingException( + NDK_API_LOWER_THAN_SUPPORTED_MESSAGE.format( + ndk_api=self.ctx.ndk_api, min_ndk_api=self.MIN_NDK_API + ), + ) + + recipe_build_dir = self.get_build_dir(arch.arch) + + # Create a subdirectory to actually perform the build + build_dir = join(recipe_build_dir, 'android-build') + ensure_dir(build_dir) + + # TODO: Get these dynamically, like bpo-30386 does + sys_prefix = '/usr/local' + sys_exec_prefix = '/usr/local' + + env = self.get_recipe_env(arch) + env = self.set_libs_flags(env, arch) + + android_build = sh.Command( + join(recipe_build_dir, + 'config.guess'))().stdout.strip().decode('utf-8') + + with current_directory(build_dir): + if not exists('config.status'): + shprint( + sh.Command(join(recipe_build_dir, 'configure')), + *(' '.join(self.configure_args).format( + android_host=env['HOSTARCH'], + android_build=android_build, + prefix=sys_prefix, + exec_prefix=sys_exec_prefix)).split(' '), + _env=env) + + shprint( + sh.make, 'all', '-j', str(cpu_count()), + 'INSTSONAME={lib_name}'.format(lib_name=self._libpython), + _env=env + ) + + # TODO: Look into passing the path to pyconfig.h in a + # better way, although this is probably acceptable + sh.cp('pyconfig.h', join(recipe_build_dir, 'Include')) + + def compile_python_files(self, dir): + ''' + Compile the python files (recursively) for the python files inside + a given folder. + + .. note:: python2 compiles the files into extension .pyo, but in + python3, and as of Python 3.5, the .pyo filename extension is no + longer used...uses .pyc (https://www.python.org/dev/peps/pep-0488) + ''' + args = [self.ctx.hostpython] + args += ['-OO', '-m', 'compileall', '-b', '-f', dir] + subprocess.call(args) + + def create_python_bundle(self, dirn, arch): + """ + Create a packaged python bundle in the target directory, by + copying all the modules and standard library to the right + place. + """ + # Todo: find a better way to find the build libs folder + modules_build_dir = join( + self.get_build_dir(arch.arch), + 'android-build', + 'build', + 'lib.linux{}-{}-{}'.format( + '2' if self.version[0] == '2' else '', + arch.command_prefix.split('-')[0], + self.major_minor_version_string + )) + + # Compile to *.pyc the python modules + self.compile_python_files(modules_build_dir) + # Compile to *.pyc the standard python library + self.compile_python_files(join(self.get_build_dir(arch.arch), 'Lib')) + # Compile to *.pyc the other python packages (site-packages) + self.compile_python_files(self.ctx.get_python_install_dir(arch.arch)) + + # Bundle compiled python modules to a folder + modules_dir = join(dirn, 'modules') + c_ext = self.compiled_extension + ensure_dir(modules_dir) + module_filens = (glob.glob(join(modules_build_dir, '*.so')) + + glob.glob(join(modules_build_dir, '*' + c_ext))) + info("Copy {} files into the bundle".format(len(module_filens))) + for filen in module_filens: + info(" - copy {}".format(filen)) + shutil.copy2(filen, modules_dir) + + # zip up the standard library + stdlib_zip = join(dirn, 'stdlib.zip') + with current_directory(join(self.get_build_dir(arch.arch), 'Lib')): + stdlib_filens = list(walk_valid_filens( + '.', self.stdlib_dir_blacklist, self.stdlib_filen_blacklist)) + if 'SOURCE_DATE_EPOCH' in environ: + # for reproducible builds + stdlib_filens.sort() + timestamp = int(environ['SOURCE_DATE_EPOCH']) + for filen in stdlib_filens: + utime(filen, (timestamp, timestamp)) + info("Zip {} files into the bundle".format(len(stdlib_filens))) + shprint(sh.zip, '-X', stdlib_zip, *stdlib_filens) + + # copy the site-packages into place + ensure_dir(join(dirn, 'site-packages')) + ensure_dir(self.ctx.get_python_install_dir(arch.arch)) + # TODO: Improve the API around walking and copying the files + with current_directory(self.ctx.get_python_install_dir(arch.arch)): + filens = list(walk_valid_filens( + '.', self.site_packages_dir_blacklist, + self.site_packages_filen_blacklist)) + info("Copy {} files into the site-packages".format(len(filens))) + for filen in filens: + info(" - copy {}".format(filen)) + ensure_dir(join(dirn, 'site-packages', dirname(filen))) + shutil.copy2(filen, join(dirn, 'site-packages', filen)) + + # copy the python .so files into place + python_build_dir = join(self.get_build_dir(arch.arch), + 'android-build') + python_lib_name = 'libpython' + self.link_version + shprint( + sh.cp, + join(python_build_dir, python_lib_name + '.so'), + join(self.ctx.bootstrap.dist_dir, 'libs', arch.arch) + ) + + info('Renaming .so files to reflect cross-compile') + self.reduce_object_file_names(join(dirn, 'site-packages')) + + return join(dirn, 'site-packages') + recipe = Python3Recipe() diff --git a/p4a/pythonforandroid/recipes/python3/patches/py3.7.1_fix-ctypes-util-find-library.patch b/p4a/pythonforandroid/recipes/python3/patches/py3.7.1_fix-ctypes-util-find-library.patch new file mode 100644 index 0000000..494270d --- /dev/null +++ b/p4a/pythonforandroid/recipes/python3/patches/py3.7.1_fix-ctypes-util-find-library.patch @@ -0,0 +1,15 @@ +diff --git a/Lib/ctypes/util.py b/Lib/ctypes/util.py +--- a/Lib/ctypes/util.py ++++ b/Lib/ctypes/util.py +@@ -67,4 +67,11 @@ + return fname + return None + ++# This patch overrides the find_library to look in the right places on ++# Android ++if True: ++ from android._ctypes_library_finder import find_library as _find_lib ++ def find_library(name): ++ return _find_lib(name) ++ + elif os.name == "posix" and sys.platform == "darwin": diff --git a/p4a/pythonforandroid/recipes/python3/patches/py3.7.1_fix-zlib-version.patch b/p4a/pythonforandroid/recipes/python3/patches/py3.7.1_fix-zlib-version.patch new file mode 100644 index 0000000..0dbffae --- /dev/null +++ b/p4a/pythonforandroid/recipes/python3/patches/py3.7.1_fix-zlib-version.patch @@ -0,0 +1,12 @@ +--- Python-3.7.1/setup.py.orig 2018-10-20 08:04:19.000000000 +0200 ++++ Python-3.7.1/setup.py 2019-02-17 00:24:30.715904412 +0100 +@@ -1410,7 +1410,8 @@ class PyBuildExt(build_ext): + if zlib_inc is not None: + zlib_h = zlib_inc[0] + '/zlib.h' + version = '"0.0.0"' +- version_req = '"1.1.3"' ++ version_req = '"{}"'.format( ++ os.environ.get('ZLIB_VERSION', '1.1.3')) + if host_platform == 'darwin' and is_macosx_sdk_path(zlib_h): + zlib_h = os.path.join(macosx_sdk_root(), zlib_h[1:]) + with open(zlib_h) as fp: diff --git a/p4a/pythonforandroid/recipes/python3/patches/remove-fix-cortex-a8.patch b/p4a/pythonforandroid/recipes/python3/patches/py3.7.1_fix_cortex_a8.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python3/patches/remove-fix-cortex-a8.patch rename to p4a/pythonforandroid/recipes/python3/patches/py3.7.1_fix_cortex_a8.patch diff --git a/p4a/pythonforandroid/recipes/python3/patches/py3.8.1.patch b/p4a/pythonforandroid/recipes/python3/patches/py3.8.1.patch new file mode 100644 index 0000000..6018805 --- /dev/null +++ b/p4a/pythonforandroid/recipes/python3/patches/py3.8.1.patch @@ -0,0 +1,42 @@ +diff --git a/Lib/ctypes/util.py b/Lib/ctypes/util.py +index 97973bc..053c231 100644 +--- a/Lib/ctypes/util.py ++++ b/Lib/ctypes/util.py +@@ -67,6 +67,13 @@ if os.name == "nt": + return fname + return None + ++# This patch overrides the find_library to look in the right places on ++# Android ++if True: ++ from android._ctypes_library_finder import find_library as _find_lib ++ def find_library(name): ++ return _find_lib(name) ++ + elif os.name == "posix" and sys.platform == "darwin": + from ctypes.macholib.dyld import dyld_find as _dyld_find + def find_library(name): +diff --git a/configure b/configure +index 0914e24..dd00812 100755 +--- a/configure ++++ b/configure +@@ -18673,4 +18673,3 @@ if test "$Py_OPT" = 'false' -a "$Py_DEBUG" != 'true'; then + echo "" >&6 + echo "" >&6 + fi +- +diff --git a/setup.py b/setup.py +index 20d7f35..af15cc2 100644 +--- a/setup.py ++++ b/setup.py +@@ -1501,7 +1501,9 @@ class PyBuildExt(build_ext): + if zlib_inc is not None: + zlib_h = zlib_inc[0] + '/zlib.h' + version = '"0.0.0"' +- version_req = '"1.1.3"' ++ # version_req = '"1.1.3"' ++ version_req = '"{}"'.format( ++ os.environ.get('ZLIB_VERSION', '1.1.3')) + if MACOS and is_macosx_sdk_path(zlib_h): + zlib_h = os.path.join(macosx_sdk_root(), zlib_h[1:]) + with open(zlib_h) as fp: diff --git a/p4a/pythonforandroid/recipes/python3/patches/py3.8.1_fix_cortex_a8.patch b/p4a/pythonforandroid/recipes/python3/patches/py3.8.1_fix_cortex_a8.patch new file mode 100644 index 0000000..92a41b5 --- /dev/null +++ b/p4a/pythonforandroid/recipes/python3/patches/py3.8.1_fix_cortex_a8.patch @@ -0,0 +1,15 @@ +This patch removes --fix-cortex-a8 from the linker flags in order to support linking +with lld, as lld does not support this flag (https://github.com/android-ndk/ndk/issues/766). +diff --git a/configure b/configure +index 0914e24..7517168 100755 +--- a/configure ++++ b/configure +@@ -5642,7 +5642,7 @@ $as_echo_n "checking for the Android arm ABI... " >&6; } + $as_echo "$_arm_arch" >&6; } + if test "$_arm_arch" = 7; then + BASECFLAGS="${BASECFLAGS} -mfloat-abi=softfp -mfpu=vfpv3-d16" +- LDFLAGS="${LDFLAGS} -march=armv7-a -Wl,--fix-cortex-a8" ++ LDFLAGS="${LDFLAGS} -march=armv7-a" + fi + else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: not Android" >&5 diff --git a/p4a/pythonforandroid/recipes/python3/patches/pyconfig_detection.patch b/p4a/pythonforandroid/recipes/python3/patches/pyconfig_detection.patch new file mode 100644 index 0000000..087ab58 --- /dev/null +++ b/p4a/pythonforandroid/recipes/python3/patches/pyconfig_detection.patch @@ -0,0 +1,13 @@ +diff -Nru Python-3.8.2/Lib/site.py Python-3.8.2-new/Lib/site.py +--- Python-3.8.2/Lib/site.py 2020-04-28 12:48:38.000000000 -0700 ++++ Python-3.8.2-new/Lib/site.py 2020-04-28 12:52:46.000000000 -0700 +@@ -487,7 +487,8 @@ + if key == 'include-system-site-packages': + system_site = value.lower() + elif key == 'home': +- sys._home = value ++ # this is breaking pyconfig.h path detection with venv ++ print('Ignoring "sys._home = value" override') + + sys.prefix = sys.exec_prefix = site_prefix + diff --git a/p4a/pythonforandroid/recipes/python3/patches/reproducible-buildinfo.diff b/p4a/pythonforandroid/recipes/python3/patches/reproducible-buildinfo.diff new file mode 100644 index 0000000..807d180 --- /dev/null +++ b/p4a/pythonforandroid/recipes/python3/patches/reproducible-buildinfo.diff @@ -0,0 +1,13 @@ +# DP: Build getbuildinfo.o with DATE/TIME values when defined + +--- a/Makefile.pre.in ++++ b/Makefile.pre.in +@@ -785,6 +785,8 @@ Modules/getbuildinfo.o: $(PARSER_OBJS) \ + -DGITVERSION="\"`LC_ALL=C $(GITVERSION)`\"" \ + -DGITTAG="\"`LC_ALL=C $(GITTAG)`\"" \ + -DGITBRANCH="\"`LC_ALL=C $(GITBRANCH)`\"" \ ++ $(if $(BUILD_DATE),-DDATE='"$(BUILD_DATE)"') \ ++ $(if $(BUILD_TIME),-DTIME='"$(BUILD_TIME)"') \ + -o $@ $(srcdir)/Modules/getbuildinfo.c + + Modules/getpath.o: $(srcdir)/Modules/getpath.c Makefile diff --git a/p4a/pythonforandroid/recipes/pytz/__init__.py b/p4a/pythonforandroid/recipes/pytz/__init__.py index 12133bc..ff9bc37 100644 --- a/p4a/pythonforandroid/recipes/pytz/__init__.py +++ b/p4a/pythonforandroid/recipes/pytz/__init__.py @@ -3,8 +3,8 @@ from pythonforandroid.recipe import PythonRecipe class PytzRecipe(PythonRecipe): name = 'pytz' - version = '2015.7' - url = 'https://pypi.python.org/packages/source/p/pytz/pytz-{version}.tar.bz2' + version = '2019.3' + url = 'https://pypi.python.org/packages/source/p/pytz/pytz-{version}.tar.gz' depends = [] diff --git a/p4a/pythonforandroid/recipes/pyzbar/__init__.py b/p4a/pythonforandroid/recipes/pyzbar/__init__.py index ccfcd9b..cf78a55 100644 --- a/p4a/pythonforandroid/recipes/pyzbar/__init__.py +++ b/p4a/pythonforandroid/recipes/pyzbar/__init__.py @@ -13,10 +13,10 @@ class PyZBarRecipe(PythonRecipe): depends = ['setuptools', 'libzbar'] def get_recipe_env(self, arch=None, with_flags_in_cc=True): - env = super(PyZBarRecipe, self).get_recipe_env(arch, with_flags_in_cc) + env = super().get_recipe_env(arch, with_flags_in_cc) libzbar = self.get_recipe('libzbar', self.ctx) libzbar_dir = libzbar.get_build_dir(arch.arch) - env['PYTHON_ROOT'] = self.ctx.get_python_install_dir() + env['PYTHON_ROOT'] = self.ctx.get_python_install_dir(arch.arch) env['CFLAGS'] += ' -I' + join(libzbar_dir, 'include') env['LDFLAGS'] += ' -L' + join(libzbar_dir, 'zbar', '.libs') env['LIBS'] = env.get('LIBS', '') + ' -landroid -lzbar' diff --git a/p4a/pythonforandroid/recipes/pyzmq/__init__.py b/p4a/pythonforandroid/recipes/pyzmq/__init__.py index 5f9614d..41addc8 100644 --- a/p4a/pythonforandroid/recipes/pyzmq/__init__.py +++ b/p4a/pythonforandroid/recipes/pyzmq/__init__.py @@ -10,16 +10,16 @@ import glob class PyZMQRecipe(CythonRecipe): name = 'pyzmq' - version = 'master' - url = 'https://github.com/zeromq/pyzmq/archive/{version}.zip' + version = '20.0.0' + url = 'https://github.com/zeromq/pyzmq/archive/v{version}.zip' site_packages_name = 'zmq' - depends = ['libzmq'] + depends = ['setuptools', 'libzmq'] cython_args = ['-Izmq/utils', '-Izmq/backend/cython', '-Izmq/devices'] def get_recipe_env(self, arch=None): - env = super(PyZMQRecipe, self).get_recipe_env(arch) + env = super().get_recipe_env(arch) # TODO: fix hardcoded path # This is required to prevent issue with _io.so import. # hostpython = self.get_recipe('hostpython2', self.ctx) @@ -43,9 +43,9 @@ class PyZMQRecipe(CythonRecipe): [global] zmq_prefix = {} skip_check_zmq = True -""".format(libzmq_prefix)) +""".format(libzmq_prefix).encode()) - return super(PyZMQRecipe, self).build_cython_components(arch) + return super().build_cython_components(arch) with current_directory(self.get_build_dir(arch.arch)): hostpython = sh.Command(self.hostpython_location) diff --git a/p4a/pythonforandroid/recipes/regex/__init__.py b/p4a/pythonforandroid/recipes/regex/__init__.py index 9533905..6ac9148 100644 --- a/p4a/pythonforandroid/recipes/regex/__init__.py +++ b/p4a/pythonforandroid/recipes/regex/__init__.py @@ -3,10 +3,11 @@ from pythonforandroid.recipe import CompiledComponentsPythonRecipe class RegexRecipe(CompiledComponentsPythonRecipe): name = 'regex' - version = '2017.07.28' - url = 'https://pypi.python.org/packages/d1/23/5fa829706ee1d4452552eb32e0bfc1039553e01f50a8754c6f7152e85c1b/regex-{version}.tar.gz' + version = '2019.06.08' + url = 'https://pypi.python.org/packages/source/r/regex/regex-{version}.tar.gz' # noqa depends = ['setuptools'] + call_hostpython_via_targetpython = False recipe = RegexRecipe() diff --git a/p4a/pythonforandroid/recipes/reportlab/__init__.py b/p4a/pythonforandroid/recipes/reportlab/__init__.py index d5e8001..60f1a07 100644 --- a/p4a/pythonforandroid/recipes/reportlab/__init__.py +++ b/p4a/pythonforandroid/recipes/reportlab/__init__.py @@ -6,14 +6,14 @@ from pythonforandroid.logger import (info, shprint) class ReportLabRecipe(CompiledComponentsPythonRecipe): - version = 'c088826211ca' - url = 'https://bitbucket.org/rptlab/reportlab/get/{version}.tar.gz' + version = 'fe660f227cac' + url = 'https://hg.reportlab.com/hg-public/reportlab/archive/{version}.tar.gz' depends = ['freetype'] call_hostpython_via_targetpython = False def prebuild_arch(self, arch): if not self.is_patched(arch): - super(ReportLabRecipe, self).prebuild_arch(arch) + super().prebuild_arch(arch) recipe_dir = self.get_build_dir(arch.arch) # Some versions of reportlab ship with a GPL-licensed font. @@ -22,9 +22,9 @@ class ReportLabRecipe(CompiledComponentsPythonRecipe): font_dir = os.path.join(recipe_dir, "src", "reportlab", "fonts") if os.path.exists(font_dir): - for l in os.listdir(font_dir): - if l.lower().startswith('darkgarden'): - os.remove(os.path.join(font_dir, l)) + for file in os.listdir(font_dir): + if file.lower().startswith('darkgarden'): + os.remove(os.path.join(font_dir, file)) # Apply patches: self.apply_patch('patches/fix-setup.patch', arch.arch) diff --git a/p4a/pythonforandroid/recipes/scipy/__init__.py b/p4a/pythonforandroid/recipes/scipy/__init__.py new file mode 100644 index 0000000..455a988 --- /dev/null +++ b/p4a/pythonforandroid/recipes/scipy/__init__.py @@ -0,0 +1,81 @@ +from pythonforandroid.recipe import CompiledComponentsPythonRecipe, Recipe +from multiprocessing import cpu_count +from os.path import join +from os import environ +from pythonforandroid.util import build_platform + + +def arch_to_toolchain(arch): + if 'arm' in arch.arch: + return arch.command_prefix + return arch.arch + + +class ScipyRecipe(CompiledComponentsPythonRecipe): + + version = '1.8.1' + url = f'https://github.com/scipy/scipy/releases/download/v{version}/scipy-{version}.zip' + site_packages_name = 'scipy' + depends = ['setuptools', 'cython', 'numpy', 'lapack', 'pybind11'] + call_hostpython_via_targetpython = False + need_stl_shared = True + + def build_compiled_components(self, arch): + self.setup_extra_args = ['-j', str(cpu_count())] + super().build_compiled_components(arch) + self.setup_extra_args = [] + + def rebuild_compiled_components(self, arch, env): + self.setup_extra_args = ['-j', str(cpu_count())] + super().rebuild_compiled_components(arch, env) + self.setup_extra_args = [] + + def get_recipe_env(self, arch): + env = super().get_recipe_env(arch) + arch_env = arch.get_env() + + env['LDFLAGS'] = arch_env['LDFLAGS'] + env['LDFLAGS'] += ' -L{} -lpython{}'.format( + self.ctx.python_recipe.link_root(arch.arch), + self.ctx.python_recipe.link_version, + ) + + ndk_dir = environ["LEGACY_NDK"] + GCC_VER = '4.9' + HOST = build_platform + suffix = '64' if '64' in arch.arch else '' + + prefix = arch.command_prefix + CLANG_BIN = f'{ndk_dir}/toolchains/llvm/prebuilt/{HOST}/bin/' + GCC = f'{ndk_dir}/toolchains/{arch_to_toolchain(arch)}-{GCC_VER}/prebuilt/{HOST}' + libgfortran = f'{GCC}/{prefix}/lib{suffix}' + numpylib = self.ctx.get_python_install_dir(arch.arch) + '/numpy' + arch_cflags = ' '.join(arch.arch_cflags) + LDSHARED_opts = f'-target {arch.target} {arch_cflags} ' + ' '.join(arch.common_ldshared) + + # TODO: add pythran support + env['SCIPY_USE_PYTHRAN'] = '0' + + lapack_dir = join(Recipe.get_recipe('lapack', self.ctx).get_build_dir(arch.arch), 'build', 'install') + env['LAPACK'] = f'{lapack_dir}/lib' + env['BLAS'] = env['LAPACK'] + + # compilers + env['F77'] = f'{GCC}/bin/{prefix}-gfortran' + env['F90'] = f'{GCC}/bin/{prefix}-gfortran' + env['CC'] = f'{CLANG_BIN}clang -target {arch.target} {arch_cflags}' + env['CXX'] = f'{CLANG_BIN}clang++ -target {arch.target} {arch_cflags}' + + # scipy expects ldshared to be a single executable without options + env['LDSHARED'] = f'{CLANG_BIN}/clang' + + # erase the default NDK C++ include options + env['CPPFLAGS'] = '-DANDROID' + + # configure linker + env['LDFLAGS'] += f' {LDSHARED_opts} -L{libgfortran} -L{numpylib}/core/lib -L{numpylib}/random/lib' + env['LDFLAGS'] += f' -l{self.stl_lib_name}' + return env + + +recipe = ScipyRecipe() diff --git a/p4a/pythonforandroid/recipes/scrypt/__init__.py b/p4a/pythonforandroid/recipes/scrypt/__init__.py index 26b8048..7f23539 100644 --- a/p4a/pythonforandroid/recipes/scrypt/__init__.py +++ b/p4a/pythonforandroid/recipes/scrypt/__init__.py @@ -13,7 +13,7 @@ class ScryptRecipe(CythonRecipe): """ Adds openssl recipe to include and library path. """ - env = super(ScryptRecipe, self).get_recipe_env(arch, with_flags_in_cc) + env = super().get_recipe_env(arch, with_flags_in_cc) openssl_recipe = self.get_recipe('openssl', self.ctx) env['CFLAGS'] += openssl_recipe.include_flags(arch) env['LDFLAGS'] += ' -L{}'.format(self.ctx.get_libs_dir(arch.arch)) diff --git a/p4a/pythonforandroid/recipes/sdl2/__init__.py b/p4a/pythonforandroid/recipes/sdl2/__init__.py index bbfadc2..78e5e1f 100644 --- a/p4a/pythonforandroid/recipes/sdl2/__init__.py +++ b/p4a/pythonforandroid/recipes/sdl2/__init__.py @@ -1,29 +1,42 @@ +from os.path import exists, join + from pythonforandroid.recipe import BootstrapNDKRecipe from pythonforandroid.toolchain import current_directory, shprint import sh class LibSDL2Recipe(BootstrapNDKRecipe): - version = "2.0.9" - url = "https://www.libsdl.org/release/SDL2-{version}.tar.gz" - md5sum = 'f2ecfba915c54f7200f504d8b48a5dfe' + version = "2.26.0" + url = "https://github.com/libsdl-org/SDL/releases/download/release-{version}/SDL2-{version}.tar.gz" + md5sum = '35bc58cfe41b8fb6c8e6646be26fa47e' dir_name = 'SDL' + patches = ['remove-extra-include.patch'] + depends = ['sdl2_image', 'sdl2_mixer', 'sdl2_ttf'] - conflicts = ['sdl', 'pygame', 'pygame_bootstrap_components'] def get_recipe_env(self, arch=None, with_flags_in_cc=True, with_python=True): - env = super(LibSDL2Recipe, self).get_recipe_env( + env = super().get_recipe_env( arch=arch, with_flags_in_cc=with_flags_in_cc, with_python=with_python) env['APP_ALLOW_MISSING_DEPS'] = 'true' return env + def should_build(self, arch): + libdir = join(self.get_build_dir(arch.arch), "../..", "libs", arch.arch) + libs = ['libmain.so', 'libSDL2.so', 'libSDL2_image.so', 'libSDL2_mixer.so', 'libSDL2_ttf.so'] + return not all(exists(join(libdir, x)) for x in libs) + def build_arch(self, arch): env = self.get_recipe_env(arch) with current_directory(self.get_jni_dir()): - shprint(sh.ndk_build, "V=1", _env=env) + shprint( + sh.Command(join(self.ctx.ndk_dir, "ndk-build")), + "V=1", + "NDK_DEBUG=" + ("1" if self.ctx.build_as_debuggable else "0"), + _env=env + ) recipe = LibSDL2Recipe() diff --git a/p4a/pythonforandroid/recipes/sdl2/remove-extra-include.patch b/p4a/pythonforandroid/recipes/sdl2/remove-extra-include.patch new file mode 100644 index 0000000..f0fda08 --- /dev/null +++ b/p4a/pythonforandroid/recipes/sdl2/remove-extra-include.patch @@ -0,0 +1,12 @@ +diff -Naur SDL.orig/Android.mk SDL/Android.mk +--- SDL.orig/Android.mk 2022-11-22 07:41:32 ++++ SDL/Android.mk 2022-11-22 07:42:00 +@@ -12,7 +12,7 @@ + + LOCAL_C_INCLUDES := $(LOCAL_PATH)/include + +-LOCAL_EXPORT_C_INCLUDES := $(LOCAL_C_INCLUDES)/include ++LOCAL_EXPORT_C_INCLUDES := $(LOCAL_C_INCLUDES) + + LOCAL_SRC_FILES := \ + $(subst $(LOCAL_PATH)/,, \ diff --git a/p4a/pythonforandroid/recipes/sdl2_image/__init__.py b/p4a/pythonforandroid/recipes/sdl2_image/__init__.py index 920b3ae..a91c6f1 100644 --- a/p4a/pythonforandroid/recipes/sdl2_image/__init__.py +++ b/p4a/pythonforandroid/recipes/sdl2_image/__init__.py @@ -1,14 +1,25 @@ +import os +import sh +from pythonforandroid.logger import shprint from pythonforandroid.recipe import BootstrapNDKRecipe +from pythonforandroid.util import current_directory class LibSDL2Image(BootstrapNDKRecipe): - version = '2.0.4' - url = 'https://www.libsdl.org/projects/SDL_image/release/SDL2_image-{version}.tar.gz' + version = '2.6.2' + url = 'https://github.com/libsdl-org/SDL_image/releases/download/release-{version}/SDL2_image-{version}.tar.gz' dir_name = 'SDL2_image' - patches = ['toggle_jpg_png_webp.patch', - 'extra_cflags.patch', - ] + patches = ['enable-webp.patch'] + + def prebuild_arch(self, arch): + # We do not have a folder for each arch on BootstrapNDKRecipe, so we + # need to skip the external deps download if we already have done it. + external_deps_dir = os.path.join(self.get_build_dir(arch.arch), "external") + if not os.path.exists(os.path.join(external_deps_dir, "libwebp")): + with current_directory(external_deps_dir): + shprint(sh.Command("./download.sh")) + super().prebuild_arch(arch) recipe = LibSDL2Image() diff --git a/p4a/pythonforandroid/recipes/sdl2_image/enable-webp.patch b/p4a/pythonforandroid/recipes/sdl2_image/enable-webp.patch new file mode 100644 index 0000000..98d72f2 --- /dev/null +++ b/p4a/pythonforandroid/recipes/sdl2_image/enable-webp.patch @@ -0,0 +1,12 @@ +diff -Naur SDL2_image.orig/Android.mk SDL2_image/Android.mk +--- SDL2_image.orig/Android.mk 2022-10-03 20:51:52.000000000 +0200 ++++ SDL2_image/Android.mk 2022-10-03 20:52:48.000000000 +0200 +@@ -32,7 +32,7 @@ + + # Enable this if you want to support loading WebP images + # The library path should be a relative path to this directory. +-SUPPORT_WEBP ?= false ++SUPPORT_WEBP := true + WEBP_LIBRARY_PATH := external/libwebp + + diff --git a/p4a/pythonforandroid/recipes/sdl2_mixer/__init__.py b/p4a/pythonforandroid/recipes/sdl2_mixer/__init__.py index 1a8e0a9..0f02c4c 100644 --- a/p4a/pythonforandroid/recipes/sdl2_mixer/__init__.py +++ b/p4a/pythonforandroid/recipes/sdl2_mixer/__init__.py @@ -1,12 +1,17 @@ +import os + from pythonforandroid.recipe import BootstrapNDKRecipe class LibSDL2Mixer(BootstrapNDKRecipe): - version = '2.0.1' - url = 'https://www.libsdl.org/projects/SDL_mixer/release/SDL2_mixer-{version}.tar.gz' + version = '2.6.2' + url = 'https://github.com/libsdl-org/SDL_mixer/releases/download/release-{version}/SDL2_mixer-{version}.tar.gz' dir_name = 'SDL2_mixer' - patches = ['toggle_modplug_mikmod_smpeg_ogg.patch'] + def get_include_dirs(self, arch): + return [ + os.path.join(self.ctx.bootstrap.build_dir, "jni", "SDL2_mixer", "include") + ] recipe = LibSDL2Mixer() diff --git a/p4a/pythonforandroid/recipes/sdl2_ttf/__init__.py b/p4a/pythonforandroid/recipes/sdl2_ttf/__init__.py index 2d0a629..4934bd4 100644 --- a/p4a/pythonforandroid/recipes/sdl2_ttf/__init__.py +++ b/p4a/pythonforandroid/recipes/sdl2_ttf/__init__.py @@ -2,8 +2,8 @@ from pythonforandroid.recipe import BootstrapNDKRecipe class LibSDL2TTF(BootstrapNDKRecipe): - version = '2.0.14' - url = 'https://www.libsdl.org/projects/SDL_ttf/release/SDL2_ttf-{version}.tar.gz' + version = '2.20.1' + url = 'https://github.com/libsdl-org/SDL_ttf/releases/download/release-{version}/SDL2_ttf-{version}.tar.gz' dir_name = 'SDL2_ttf' diff --git a/p4a/pythonforandroid/recipes/secp256k1/__init__.py b/p4a/pythonforandroid/recipes/secp256k1/__init__.py index 8898031..1b30642 100644 --- a/p4a/pythonforandroid/recipes/secp256k1/__init__.py +++ b/p4a/pythonforandroid/recipes/secp256k1/__init__.py @@ -10,16 +10,21 @@ class Secp256k1Recipe(CppCompiledComponentsPythonRecipe): call_hostpython_via_targetpython = False depends = [ - 'openssl', ('hostpython3', 'hostpython2', 'hostpython3crystax'), - ('python2', 'python3', 'python3crystax'), 'setuptools', - 'libffi', 'cffi', 'libsecp256k1'] + 'openssl', + 'hostpython3', + 'python3', + 'setuptools', + 'libffi', + 'cffi', + 'libsecp256k1' + ] patches = [ "cross_compile.patch", "drop_setup_requires.patch", "pkg-config.patch", "find_lib.patch", "no-download.patch"] def get_recipe_env(self, arch=None): - env = super(Secp256k1Recipe, self).get_recipe_env(arch) + env = super().get_recipe_env(arch) libsecp256k1 = self.get_recipe('libsecp256k1', self.ctx) libsecp256k1_dir = libsecp256k1.get_build_dir(arch.arch) env['CFLAGS'] += ' -I' + os.path.join(libsecp256k1_dir, 'include') diff --git a/p4a/pythonforandroid/recipes/setuptools/__init__.py b/p4a/pythonforandroid/recipes/setuptools/__init__.py index 6a4b650..8190f8e 100644 --- a/p4a/pythonforandroid/recipes/setuptools/__init__.py +++ b/p4a/pythonforandroid/recipes/setuptools/__init__.py @@ -2,14 +2,10 @@ from pythonforandroid.recipe import PythonRecipe class SetuptoolsRecipe(PythonRecipe): - version = '40.0.0' - url = 'https://pypi.python.org/packages/source/s/setuptools/setuptools-{version}.zip' + version = '51.3.3' + url = 'https://pypi.python.org/packages/source/s/setuptools/setuptools-{version}.tar.gz' call_hostpython_via_targetpython = False install_in_hostpython = True - depends = [('python2', 'python2legacy', 'python3', 'python3crystax')] - # this recipe seems to control the dependency graph in some way, because - # if removed the python2legacy recipe fails to solve the dependency order - # when using the sdl2 bootstrap...so be careful removing this line!!! recipe = SetuptoolsRecipe() diff --git a/p4a/pythonforandroid/recipes/shapely/__init__.py b/p4a/pythonforandroid/recipes/shapely/__init__.py index e0b0937..fb3da7c 100644 --- a/p4a/pythonforandroid/recipes/shapely/__init__.py +++ b/p4a/pythonforandroid/recipes/shapely/__init__.py @@ -1,21 +1,33 @@ -from pythonforandroid.recipe import Recipe, CythonRecipe +from pythonforandroid.recipe import CythonRecipe +from os.path import join class ShapelyRecipe(CythonRecipe): - version = '1.5' - url = 'https://github.com/Toblerity/Shapely/archive/master.zip' + version = '1.7a1' + url = 'https://github.com/Toblerity/Shapely/archive/{version}.tar.gz' depends = ['setuptools', 'libgeos'] + call_hostpython_via_targetpython = False - patches = ['setup.patch'] # Patch to force setup to fail when C extention fails to build + # Patch to avoid libgeos check (because it fails), insert environment + # variables for our libgeos build (includes, lib paths...) and force + # the cython's compilation to raise an error in case that it fails + patches = ['setup.patch'] - # setup_extra_args = ['sdist'] # DontForce Cython + # Don't Force Cython + # setup_extra_args = ['sdist'] + + def get_recipe_env(self, arch=None, with_flags_in_cc=True): + env = super().get_recipe_env(arch) + + libgeos_install = join(self.get_recipe( + 'libgeos', self.ctx).get_build_dir(arch.arch), 'install_target') + # All this `GEOS_X` variables should be string types, separated + # by commas in case that we need to pass more than one value + env['GEOS_INCLUDE_DIRS'] = join(libgeos_install, 'include') + env['GEOS_LIBRARY_DIRS'] = join(libgeos_install, 'lib') + env['GEOS_LIBRARIES'] = 'geos_c,geos' - def get_recipe_env(self, arch, with_flags_in_cc=True): - """ Add libgeos headers to path """ - env = super(ShapelyRecipe, self).get_recipe_env(arch, with_flags_in_cc) - libgeos_dir = Recipe.get_recipe('libgeos', self.ctx).get_build_dir(arch.arch) - env['CFLAGS'] += " -I{}/dist/include".format(libgeos_dir) return env diff --git a/p4a/pythonforandroid/recipes/shapely/setup.patch b/p4a/pythonforandroid/recipes/shapely/setup.patch index 9523f35..7fd1ca9 100644 --- a/p4a/pythonforandroid/recipes/shapely/setup.patch +++ b/p4a/pythonforandroid/recipes/shapely/setup.patch @@ -1,12 +1,44 @@ -*** shapely/setup.py 2016-06-29 11:29:49.000000000 -0400 ---- b/setup.py 2016-07-09 01:51:37.759670990 -0400 -*************** -*** 359,364 **** ---- 359,365 ---- - construct_build_ext(existing_build_ext) - setup(ext_modules=ext_modules, **setup_args) - except BuildFailed as ex: -+ raise # Force python only build to fail - BUILD_EXT_WARNING = "The C extension could not be compiled, " \ - "speedups are not enabled." - log.warn(ex) +This patch does three things: + - disable the libgeos check, because, even setting the proper env variables, + it fails to load our libgeos library, so we skip that because it's not + mandatory for the cythonizing. + - sets some environment variables into the setup.py file, so we can pass + our libgeos information (includes, lib path and libraries) + - force to raise an error when cython file to compile (our current build + system relies on this failure to do the proper `cythonizing`, if we don't + raise the error, we will end up with the package installed without the + speed optimizations. +--- Shapely-1.7a1/setup.py.orig 2018-07-29 22:53:13.000000000 +0200 ++++ Shapely-1.7a1/setup.py 2019-02-24 14:26:19.178610660 +0100 +@@ -82,8 +82,8 @@ if not (py_version == (2, 7) or py_versi + + # Get geos_version from GEOS dynamic library, which depends on + # GEOS_LIBRARY_PATH and/or GEOS_CONFIG environment variables +-from shapely._buildcfg import geos_version_string, geos_version, \ +- geos_config, get_geos_config ++# from shapely._buildcfg import geos_version_string, geos_version, \ ++# geos_config, get_geos_config + + logging.basicConfig() + log = logging.getLogger(__file__) +@@ -248,9 +248,9 @@ if sys.platform == 'win32': + setup_args['package_data']['shapely'].append('shapely/DLLs/*.dll') + + # Prepare build opts and args for the speedups extension module. +-include_dirs = [] +-library_dirs = [] +-libraries = [] ++include_dirs = os.environ.get('GEOS_INCLUDE_DIRS', '').split(',') ++library_dirs = os.environ.get('GEOS_LIBRARY_DIRS', '').split(',') ++libraries = os.environ.get('GEOS_LIBRARIES', '').split(',') + extra_link_args = [] + + # If NO_GEOS_CONFIG is set in the environment, geos-config will not +@@ -375,6 +375,7 @@ try: + construct_build_ext(existing_build_ext) + setup(ext_modules=ext_modules, **setup_args) + except BuildFailed as ex: ++ raise # Force python only build to fail + BUILD_EXT_WARNING = "The C extension could not be compiled, " \ + "speedups are not enabled." + log.warn(ex) diff --git a/p4a/pythonforandroid/recipes/six/__init__.py b/p4a/pythonforandroid/recipes/six/__init__.py index 91adc6c..3be8ce7 100644 --- a/p4a/pythonforandroid/recipes/six/__init__.py +++ b/p4a/pythonforandroid/recipes/six/__init__.py @@ -1,14 +1,10 @@ - from pythonforandroid.recipe import PythonRecipe class SixRecipe(PythonRecipe): - version = '1.9.0' + version = '1.15.0' url = 'https://pypi.python.org/packages/source/s/six/six-{version}.tar.gz' - depends = [('python2', 'python2legacy', 'python3', 'python3crystax')] - # this recipe seems to control the dependency graph in some way, because - # if removed the python2legacy recipe fails to solve the dependency order - # when using the pygame bootstrap...so be careful removing this line!!! + depends = ['setuptools'] recipe = SixRecipe() diff --git a/p4a/pythonforandroid/recipes/snappy/__init__.py b/p4a/pythonforandroid/recipes/snappy/__init__.py index 4ca61a2..c57f797 100644 --- a/p4a/pythonforandroid/recipes/snappy/__init__.py +++ b/p4a/pythonforandroid/recipes/snappy/__init__.py @@ -1,13 +1,28 @@ -from pythonforandroid.toolchain import Recipe +from pythonforandroid.recipe import Recipe +from pythonforandroid.logger import shprint +from pythonforandroid.util import current_directory +from os.path import join +import sh class SnappyRecipe(Recipe): - version = '1.1.3' - url = 'https://github.com/google/snappy/releases/download/{version}/snappy-{version}.tar.gz' + version = '1.1.7' + url = 'https://github.com/google/snappy/archive/{version}.tar.gz' + built_libraries = {'libsnappy.so': '.'} - def should_build(self, arch): - # Only download to use in leveldb recipe - return False + def build_arch(self, arch): + env = self.get_recipe_env(arch) + source_dir = self.get_build_dir(arch.arch) + with current_directory(source_dir): + shprint(sh.cmake, source_dir, + '-DANDROID_ABI={}'.format(arch.arch), + '-DANDROID_NATIVE_API_LEVEL={}'.format(self.ctx.ndk_api), + '-DCMAKE_TOOLCHAIN_FILE={}'.format( + join(self.ctx.ndk_dir, 'build', 'cmake', + 'android.toolchain.cmake')), + '-DBUILD_SHARED_LIBS=1', + _env=env) + shprint(sh.make, _env=env) recipe = SnappyRecipe() diff --git a/p4a/pythonforandroid/recipes/sqlalchemy/__init__.py b/p4a/pythonforandroid/recipes/sqlalchemy/__init__.py index 974667a..9837a59 100644 --- a/p4a/pythonforandroid/recipes/sqlalchemy/__init__.py +++ b/p4a/pythonforandroid/recipes/sqlalchemy/__init__.py @@ -3,8 +3,9 @@ from pythonforandroid.recipe import CompiledComponentsPythonRecipe class SQLAlchemyRecipe(CompiledComponentsPythonRecipe): name = 'sqlalchemy' - version = '1.0.9' + version = '1.3.3' url = 'https://pypi.python.org/packages/source/S/SQLAlchemy/SQLAlchemy-{version}.tar.gz' + call_hostpython_via_targetpython = False depends = ['setuptools'] diff --git a/p4a/pythonforandroid/recipes/sqlalchemy/zipsafe.patch b/p4a/pythonforandroid/recipes/sqlalchemy/zipsafe.patch index 1820d09..46bdf60 100644 --- a/p4a/pythonforandroid/recipes/sqlalchemy/zipsafe.patch +++ b/p4a/pythonforandroid/recipes/sqlalchemy/zipsafe.patch @@ -1,12 +1,10 @@ -diff --git a/setup.py b/setup.py -index 09b524c..1e65772 100644 ---- a/setup.py -+++ b/setup.py -@@ -125,6 +125,7 @@ def run_setup(with_cext): - setup(name="SQLAlchemy", - version=VERSION, - description="Database Abstraction Library", -+ zip_safe=False, - author="Mike Bayer", - author_email="mike_mp@zzzcomputing.com", - url="http://www.sqlalchemy.org", +--- a/setup.py 2019-04-15 17:45:03.000000000 +0200 ++++ b/setup.py 2019-04-16 20:12:19.056710749 +0200 +@@ -145,6 +145,7 @@ + name="SQLAlchemy", + version=VERSION, + description="Database Abstraction Library", ++ zip_safe=False, + author="Mike Bayer", + author_email="mike_mp@zzzcomputing.com", + url="http://www.sqlalchemy.org", diff --git a/p4a/pythonforandroid/recipes/sqlite3/__init__.py b/p4a/pythonforandroid/recipes/sqlite3/__init__.py index cfdcb0f..955d808 100644 --- a/p4a/pythonforandroid/recipes/sqlite3/__init__.py +++ b/p4a/pythonforandroid/recipes/sqlite3/__init__.py @@ -5,29 +5,29 @@ import sh class Sqlite3Recipe(NDKRecipe): - version = '3.15.1' + version = '3.35.5' # Don't forget to change the URL when changing the version - url = 'https://www.sqlite.org/2016/sqlite-amalgamation-3150100.zip' + url = 'https://www.sqlite.org/2021/sqlite-amalgamation-3350500.zip' generated_libraries = ['sqlite3'] def should_build(self, arch): return not self.has_libs(arch, 'libsqlite3.so') def prebuild_arch(self, arch): - super(Sqlite3Recipe, self).prebuild_arch(arch) + super().prebuild_arch(arch) # Copy the Android make file sh.mkdir('-p', join(self.get_build_dir(arch.arch), 'jni')) shutil.copyfile(join(self.get_recipe_dir(), 'Android.mk'), join(self.get_build_dir(arch.arch), 'jni/Android.mk')) def build_arch(self, arch, *extra_args): - super(Sqlite3Recipe, self).build_arch(arch) + super().build_arch(arch) # Copy the shared library shutil.copyfile(join(self.get_build_dir(arch.arch), 'libs', arch.arch, 'libsqlite3.so'), join(self.ctx.get_libs_dir(arch.arch), 'libsqlite3.so')) def get_recipe_env(self, arch): - env = super(Sqlite3Recipe, self).get_recipe_env(arch) + env = super().get_recipe_env(arch) env['NDK_PROJECT_PATH'] = self.get_build_dir(arch.arch) return env diff --git a/p4a/pythonforandroid/recipes/tflite-runtime/CMakeLists.patch b/p4a/pythonforandroid/recipes/tflite-runtime/CMakeLists.patch new file mode 100644 index 0000000..f39d9b3 --- /dev/null +++ b/p4a/pythonforandroid/recipes/tflite-runtime/CMakeLists.patch @@ -0,0 +1,28 @@ +--- tflite-runtime/tensorflow/lite/CMakeLists.txt 2022-01-27 17:29:49.460000000 -1000 ++++ CMakeLists.txt 2022-02-21 15:03:09.568367300 -1000 +@@ -220,6 +220,9 @@ + if(NOT "${CMAKE_SYSTEM_NAME}" STREQUAL "iOS") + list(FILTER TFLITE_SRCS EXCLUDE REGEX ".*minimal_logging_ios\\.cc$") + endif() ++if("${CMAKE_SYSTEM_NAME}" STREQUAL "Android") ++ list(FILTER TFLITE_SRCS EXCLUDE REGEX ".*minimal_logging_default\\.cc$") ++endif() + populate_tflite_source_vars("core" TFLITE_CORE_SRCS) + populate_tflite_source_vars("core/api" TFLITE_CORE_API_SRCS) + populate_tflite_source_vars("c" TFLITE_C_SRCS) +@@ -505,6 +508,7 @@ + ruy + ${CMAKE_DL_LIBS} + ${TFLITE_TARGET_DEPENDENCIES} ++ ${ANDROID_LOG_LIB} + ) + + if (NOT BUILD_SHARED_LIBS) +@@ -550,6 +554,7 @@ + tensorflow-lite + ${CMAKE_DL_LIBS} + ) ++ + target_compile_options(_pywrap_tensorflow_interpreter_wrapper + PUBLIC ${TFLITE_TARGET_PUBLIC_OPTIONS} + PRIVATE ${TFLITE_TARGET_PRIVATE_OPTIONS} diff --git a/p4a/pythonforandroid/recipes/tflite-runtime/__init__.py b/p4a/pythonforandroid/recipes/tflite-runtime/__init__.py new file mode 100644 index 0000000..1d20886 --- /dev/null +++ b/p4a/pythonforandroid/recipes/tflite-runtime/__init__.py @@ -0,0 +1,108 @@ +from pythonforandroid.recipe import PythonRecipe, current_directory, \ + shprint, info_main, warning +from pythonforandroid.logger import error +from os.path import join +import sh + + +class TFLiteRuntimeRecipe(PythonRecipe): + ############################################################### + # + # tflite-runtime README: + # https://github.com/Android-for-Python/c4k_tflite_example/blob/main/README.md + # + # Recipe build references: + # https://developer.android.com/ndk/guides/cmake + # https://developer.android.com/ndk/guides/cpu-arm-neon#cmake + # https://www.tensorflow.org/lite/guide/build_cmake + # https://www.tensorflow.org/lite/guide/build_cmake_arm + # + # Tested using cmake 3.16.3 probably requires cmake >= 3.13 + # + # THIS RECIPE DOES NOT BUILD x86_64, USE X86 FOR AN EMULATOR + # + ############################################################### + + version = '2.8.0' + url = 'https://github.com/tensorflow/tensorflow/archive/refs/tags/v{version}.zip' + depends = ['pybind11', 'numpy'] + patches = ['CMakeLists.patch', 'build_with_cmake.patch'] + site_packages_name = 'tflite-runtime' + call_hostpython_via_targetpython = False + + def should_build(self, arch): + name = self.folder_name.replace('-', '_') + + if self.ctx.has_package(name, arch): + info_main('Python package already exists in site-packages') + return False + info_main('{} apparently isn\'t already in site-packages'.format(name)) + return True + + def build_arch(self, arch): + if arch.arch == 'x86_64': + warning("******** tflite-runtime x86_64 will not be built *******") + warning("Expect one of these app run time error messages:") + warning("ModuleNotFoundError: No module named 'tensorflow'") + warning("ModuleNotFoundError: No module named 'tflite_runtime'") + warning("Use x86 not x86_64") + return + + env = self.get_recipe_env(arch) + + # Directories + root_dir = self.get_build_dir(arch.arch) + script_dir = join(root_dir, + 'tensorflow', 'lite', 'tools', 'pip_package') + build_dir = join(script_dir, 'gen', 'tflite_pip', 'python3') + + # Includes + python_include_dir = self.ctx.python_recipe.include_root(arch.arch) + pybind11_recipe = self.get_recipe('pybind11', self.ctx) + pybind11_include_dir = pybind11_recipe.get_include_dir(arch) + numpy_include_dir = join(self.ctx.get_site_packages_dir(arch), + 'numpy', 'core', 'include') + includes = ' -I' + python_include_dir + \ + ' -I' + numpy_include_dir + \ + ' -I' + pybind11_include_dir + + # Scripts + build_script = join(script_dir, 'build_pip_package_with_cmake.sh') + toolchain = join(self.ctx.ndk_dir, + 'build', 'cmake', 'android.toolchain.cmake') + + # Build + ######## + with current_directory(root_dir): + env.update({ + 'TENSORFLOW_TARGET': 'android', + 'CMAKE_TOOLCHAIN_FILE': toolchain, + 'ANDROID_PLATFORM': str(self.ctx.ndk_api), + 'ANDROID_ABI': arch.arch, + 'WRAPPER_INCLUDES': includes, + 'CMAKE_SHARED_LINKER_FLAGS': env['LDFLAGS'], + }) + + try: + info_main('tflite-runtime is building...') + info_main('Expect this to take at least 5 minutes...') + cmd = sh.Command(build_script) + cmd(_env=env) + except sh.ErrorReturnCode as e: + error(str(e.stderr)) + exit(1) + + # Install + ########## + info_main('Installing tflite-runtime into site-packages') + with current_directory(build_dir): + hostpython = sh.Command(self.hostpython_location) + install_dir = self.ctx.get_python_install_dir(arch.arch) + env['PACKAGE_VERSION'] = self.version + shprint(hostpython, 'setup.py', 'install', '-O2', + '--root={}'.format(install_dir), + '--install-lib=.', + _env=env) + + +recipe = TFLiteRuntimeRecipe() diff --git a/p4a/pythonforandroid/recipes/tflite-runtime/build_with_cmake.patch b/p4a/pythonforandroid/recipes/tflite-runtime/build_with_cmake.patch new file mode 100644 index 0000000..9670e18 --- /dev/null +++ b/p4a/pythonforandroid/recipes/tflite-runtime/build_with_cmake.patch @@ -0,0 +1,48 @@ +--- tflite-runtime/tensorflow/lite/tools/pip_package/build_pip_package_with_cmake.sh 2022-01-22 08:57:16.000000000 -1000 ++++ build_pip_package_with_cmake.sh 2022-03-02 18:19:05.185550500 -1000 +@@ -28,7 +28,7 @@ + export TENSORFLOW_TARGET="armhf" + fi + PYTHON_INCLUDE=$(${PYTHON} -c "from sysconfig import get_paths as gp; print(gp()['include'])") +-PYBIND11_INCLUDE=$(${PYTHON} -c "import pybind11; print (pybind11.get_include())") ++# PYBIND11_INCLUDE=$(${PYTHON} -c "import pybind11; print (pybind11.get_include())") + export CROSSTOOL_PYTHON_INCLUDE_PATH=${PYTHON_INCLUDE} + + # Fix container image for cross build. +@@ -58,7 +58,7 @@ + "${TENSORFLOW_LITE_DIR}/python/metrics/metrics_portable.py" \ + "${BUILD_DIR}/tflite_runtime" + echo "__version__ = '${PACKAGE_VERSION}'" >> "${BUILD_DIR}/tflite_runtime/__init__.py" +-echo "__git_version__ = '$(git -C "${TENSORFLOW_DIR}" describe)'" >> "${BUILD_DIR}/tflite_runtime/__init__.py" ++echo "__git_version__ = '${PACKAGE_VERSION}'" >> "${BUILD_DIR}/tflite_runtime/__init__.py" + + # Build python interpreter_wrapper. + mkdir -p "${BUILD_DIR}/cmake_build" +@@ -111,6 +111,18 @@ + -DCMAKE_CXX_FLAGS="${BUILD_FLAGS}" \ + "${TENSORFLOW_LITE_DIR}" + ;; ++ android) ++ BUILD_FLAGS=${BUILD_FLAGS:-"${WRAPPER_INCLUDES}"} ++ cmake \ ++ -DCMAKE_SYSTEM_NAME=Android \ ++ -DANDROID_ARM_NEON=ON \ ++ -DCMAKE_CXX_FLAGS="${BUILD_FLAGS}" \ ++ -DCMAKE_SHARED_LINKER_FLAGS="${CMAKE_SHARED_LINKER_FLAGS}" \ ++ -DCMAKE_TOOLCHAIN_FILE="${CMAKE_TOOLCHAIN_FILE}" \ ++ -DANDROID_PLATFORM="${ANDROID_PLATFORM}" \ ++ -DANDROID_ABI="${ANDROID_ABI}" \ ++ "${TENSORFLOW_LITE_DIR}" ++ ;; + *) + BUILD_FLAGS=${BUILD_FLAGS:-"-I${PYTHON_INCLUDE} -I${PYBIND11_INCLUDE}"} + cmake \ +@@ -162,7 +174,7 @@ + ${PYTHON} setup.py bdist --plat-name=${WHEEL_PLATFORM_NAME} \ + bdist_wheel --plat-name=${WHEEL_PLATFORM_NAME} + else +- ${PYTHON} setup.py bdist bdist_wheel ++ ${PYTHON} setup.py bdist + fi + ;; + esac diff --git a/p4a/pythonforandroid/recipes/twisted/__init__.py b/p4a/pythonforandroid/recipes/twisted/__init__.py index ca22279..0c390a5 100644 --- a/p4a/pythonforandroid/recipes/twisted/__init__.py +++ b/p4a/pythonforandroid/recipes/twisted/__init__.py @@ -1,26 +1,35 @@ +import os +import shutil + from pythonforandroid.recipe import CythonRecipe class TwistedRecipe(CythonRecipe): - version = '17.9.0' + version = '20.3.0' url = 'https://github.com/twisted/twisted/archive/twisted-{version}.tar.gz' depends = ['setuptools', 'zope_interface', 'incremental', 'constantly'] - patches = ['incremental.patch'] + patches = ['incremental.patch', 'remove_tests.patch'] call_hostpython_via_targetpython = False install_in_hostpython = False def prebuild_arch(self, arch): - super(TwistedRecipe, self).prebuild_arch(arch) + super().prebuild_arch(arch) # TODO Need to whitelist tty.pyo and termios.so here - print('Should remove twisted tests etc. here, but skipping for now') + + # remove the unit test dirs + source_dir = os.path.join(self.get_build_dir(arch.arch), 'src/twisted') + for item in os.walk(source_dir): + if os.path.basename(item[0]) == 'test': + full_path = os.path.join(source_dir, item[0]) + shutil.rmtree(full_path, ignore_errors=True) def get_recipe_env(self, arch): - env = super(TwistedRecipe, self).get_recipe_env(arch) + env = super().get_recipe_env(arch) # We add BUILDLIB_PATH to PYTHONPATH so twisted can find _io.so env['PYTHONPATH'] = ':'.join([ - self.ctx.get_site_packages_dir(), + self.ctx.get_site_packages_dir(arch), env['BUILDLIB_PATH'], ]) return env diff --git a/p4a/pythonforandroid/recipes/twisted/incremental.patch b/p4a/pythonforandroid/recipes/twisted/incremental.patch index 85e5307..61656fc 100644 --- a/p4a/pythonforandroid/recipes/twisted/incremental.patch +++ b/p4a/pythonforandroid/recipes/twisted/incremental.patch @@ -1,13 +1,15 @@ -diff -Naur twisted-twisted-17.9.0/src/twisted/python/_setup.py twisted-twisted-17.9.0_patched/src/twisted/python/_setup.py ---- twisted-twisted-17.9.0/src/twisted/python/_setup.py 2017-09-23 07:56:08.000000000 +0200 -+++ twisted-twisted-17.9.0_patched/src/twisted/python/_setup.py 2018-10-05 11:06:23.305860722 +0200 -@@ -227,14 +227,11 @@ - requirements = ["zope.interface >= 3.6.0"] - - requirements.append("constantly >= 15.1") -- requirements.append("incremental >= 16.10.1") - requirements.append("Automat >= 0.3.0") - requirements.append("hyperlink >= 17.1.1") +diff -Naur twisted-twisted-19.7.0/src/twisted/python/_setup.py twisted-twisted-19.7.0_patched/src/twisted/python/_setup.py +--- twisted-twisted-19.7.0/src/twisted/python/_setup.py 2019-07-28 11:17:29.000000000 +0200 ++++ twisted-twisted-19.7.0_patched/src/twisted/python/_setup.py 2019-10-21 22:10:03.643068863 +0200 +@@ -282,7 +282,6 @@ + requirements = [ + "zope.interface >= 4.4.2", + "constantly >= 15.1", +- "incremental >= 16.10.1", + "Automat >= 0.3.0", + "hyperlink >= 17.1.1", + "PyHamcrest >= 1.9.0", +@@ -291,8 +290,6 @@ arguments.update(dict( packages=find_packages("src"), diff --git a/p4a/pythonforandroid/recipes/twisted/remove_tests.patch b/p4a/pythonforandroid/recipes/twisted/remove_tests.patch new file mode 100644 index 0000000..492062b --- /dev/null +++ b/p4a/pythonforandroid/recipes/twisted/remove_tests.patch @@ -0,0 +1,16 @@ +diff --git a/src/twisted/python/_setup.py b/src/twisted/python/_setup.py +index 32cb096c7..a607fef07 100644 +--- a/src/twisted/python/_setup.py ++++ b/src/twisted/python/_setup.py +@@ -160,11 +160,6 @@ class ConditionalExtension(Extension, object): + + # The C extensions used for Twisted. + _EXTENSIONS = [ +- ConditionalExtension( +- "twisted.test.raiser", +- sources=["src/twisted/test/raiser.c"], +- condition=lambda _: _isCPython), +- + ConditionalExtension( + "twisted.internet.iocpreactor.iocpsupport", + sources=[ diff --git a/p4a/pythonforandroid/recipes/vlc/__init__.py b/p4a/pythonforandroid/recipes/vlc/__init__.py index 66f51b9..490c4f2 100644 --- a/p4a/pythonforandroid/recipes/vlc/__init__.py +++ b/p4a/pythonforandroid/recipes/vlc/__init__.py @@ -18,7 +18,7 @@ class VlcRecipe(Recipe): aars = {} # for future use of multiple arch def prebuild_arch(self, arch): - super(VlcRecipe, self).prebuild_arch(arch) + super().prebuild_arch(arch) build_dir = self.get_build_dir(arch.arch) port_dir = join(build_dir, 'vlc-port-android') if self.ENV_LIBVLC_AAR in environ: @@ -50,7 +50,7 @@ class VlcRecipe(Recipe): # _tail=20, _critical=True) def build_arch(self, arch): - super(VlcRecipe, self).build_arch(arch) + super().build_arch(arch) build_dir = self.get_build_dir(arch.arch) port_dir = join(build_dir, 'vlc-port-android') aar = self.aars[arch] diff --git a/p4a/pythonforandroid/recipes/xeddsa/__init__.py b/p4a/pythonforandroid/recipes/xeddsa/__init__.py index eb0e2ae..d386f92 100644 --- a/p4a/pythonforandroid/recipes/xeddsa/__init__.py +++ b/p4a/pythonforandroid/recipes/xeddsa/__init__.py @@ -24,12 +24,9 @@ class XedDSARecipe(CythonRecipe): hostpython, 'ref10/build.py', _env=env ) - python_version = self.ctx.python_recipe.version[0:3] - site_packages_dir = 'lib/python{python_version}/site-packages'.format( - python_version=python_version) - site_packages = join(self.ctx.get_python_install_dir(), - site_packages_dir) - shprint(sh.cp, '_crypto_sign.so', site_packages) + # the library could be `_crypto_sign.cpython-37m-x86_64-linux-gnu.so` + # or simply `_crypto_sign.so` depending on the platform/distribution + sh.cp('-a', sh.glob('_crypto_sign*.so'), self.ctx.get_site_packages_dir(arch)) self.install_python_package(arch) diff --git a/p4a/pythonforandroid/recipes/zbar/__init__.py b/p4a/pythonforandroid/recipes/zbar/__init__.py index 62aa85b..c24971e 100644 --- a/p4a/pythonforandroid/recipes/zbar/__init__.py +++ b/p4a/pythonforandroid/recipes/zbar/__init__.py @@ -20,10 +20,10 @@ class ZBarRecipe(PythonRecipe): patches = ["zbar-0.10-python-crash.patch"] def get_recipe_env(self, arch=None, with_flags_in_cc=True): - env = super(ZBarRecipe, self).get_recipe_env(arch, with_flags_in_cc) + env = super().get_recipe_env(arch, with_flags_in_cc) libzbar = self.get_recipe('libzbar', self.ctx) libzbar_dir = libzbar.get_build_dir(arch.arch) - env['PYTHON_ROOT'] = self.ctx.get_python_install_dir() + env['PYTHON_ROOT'] = self.ctx.get_python_install_dir(arch.arch) env['CFLAGS'] += ' -I' + join(libzbar_dir, 'include') env['LDFLAGS'] += ' -L' + join(libzbar_dir, 'zbar', '.libs') env['LIBS'] = env.get('LIBS', '') + ' -landroid -lzbar' diff --git a/p4a/pythonforandroid/recipes/zbarlight/__init__.py b/p4a/pythonforandroid/recipes/zbarlight/__init__.py index 966c7fb..36365cd 100644 --- a/p4a/pythonforandroid/recipes/zbarlight/__init__.py +++ b/p4a/pythonforandroid/recipes/zbarlight/__init__.py @@ -13,10 +13,10 @@ class ZBarLightRecipe(PythonRecipe): depends = ['setuptools', 'libzbar'] def get_recipe_env(self, arch=None, with_flags_in_cc=True): - env = super(ZBarLightRecipe, self).get_recipe_env(arch, with_flags_in_cc) + env = super().get_recipe_env(arch, with_flags_in_cc) libzbar = self.get_recipe('libzbar', self.ctx) libzbar_dir = libzbar.get_build_dir(arch.arch) - env['PYTHON_ROOT'] = self.ctx.get_python_install_dir() + env['PYTHON_ROOT'] = self.ctx.get_python_install_dir(arch.arch) env['CFLAGS'] += ' -I' + join(libzbar_dir, 'include') env['LDFLAGS'] += ' -L' + join(libzbar_dir, 'zbar', '.libs') env['LIBS'] = env.get('LIBS', '') + ' -landroid -lzbar' diff --git a/p4a/pythonforandroid/recipes/zeroconf/__init__.py b/p4a/pythonforandroid/recipes/zeroconf/__init__.py index 5ca5708..a23bd6e 100644 --- a/p4a/pythonforandroid/recipes/zeroconf/__init__.py +++ b/p4a/pythonforandroid/recipes/zeroconf/__init__.py @@ -3,9 +3,9 @@ from pythonforandroid.recipe import PythonRecipe class ZeroconfRecipe(PythonRecipe): name = 'zeroconf' - version = '0.17.4' + version = '0.24.5' url = 'https://pypi.python.org/packages/source/z/zeroconf/zeroconf-{version}.tar.gz' - depends = ['setuptools', 'enum34', 'six'] + depends = ['setuptools', 'ifaddr', 'typing;python_version<"3.5"'] call_hostpython_via_targetpython = False diff --git a/p4a/pythonforandroid/recipes/zope/__init__.py b/p4a/pythonforandroid/recipes/zope/__init__.py index 579a760..9c5ab7b 100644 --- a/p4a/pythonforandroid/recipes/zope/__init__.py +++ b/p4a/pythonforandroid/recipes/zope/__init__.py @@ -6,22 +6,25 @@ from os.path import join class ZopeRecipe(PythonRecipe): name = 'zope' version = '4.1.3' - url = 'http://pypi.python.org/packages/source/z/zope.interface/zope.interface-{version}.tar.gz' + url = 'https://pypi.python.org/packages/source/z/zope.interface/zope.interface-{version}.tar.gz' depends = [] def get_recipe_env(self, arch): - env = super(ZopeRecipe, self).get_recipe_env(arch) + env = super().get_recipe_env(arch) # These are in the old zope recipe but seem like they shouldn't actually be necessary env['LDFLAGS'] = env['LDFLAGS'] + ' -L{}'.format( self.ctx.get_libs_dir(arch.arch)) env['LDSHARED'] = join(self.ctx.root_dir, 'tools', 'liblink') + return env def postbuild_arch(self, arch): - super(ZopeRecipe, self).postbuild_arch(arch) + super().postbuild_arch(arch) # Should do some deleting here recipe = ZopeRecipe() + +# FIXME: @mirko liblink & LD diff --git a/p4a/pythonforandroid/recipes/zope_interface/__init__.py b/p4a/pythonforandroid/recipes/zope_interface/__init__.py index b1fb0bd..46a1820 100644 --- a/p4a/pythonforandroid/recipes/zope_interface/__init__.py +++ b/p4a/pythonforandroid/recipes/zope_interface/__init__.py @@ -14,18 +14,22 @@ class ZopeInterfaceRecipe(PythonRecipe): patches = ['no_tests.patch'] def build_arch(self, arch): - super(ZopeInterfaceRecipe, self).build_arch(arch) + super().build_arch(arch) # The zope.interface module lacks of the __init__.py file in one of his # folders (once is installed), that leads into an ImportError. # Here we intentionally apply a patch to solve that, so, in case that # this is solved in the future an error will be triggered - zope_install = join(self.ctx.get_site_packages_dir(arch.arch), 'zope') + zope_install = join(self.ctx.get_site_packages_dir(arch), 'zope') self.apply_patch('fix-init.patch', arch.arch, build_dir=zope_install) def prebuild_arch(self, arch): - super(ZopeInterfaceRecipe, self).prebuild_arch(arch) + super().prebuild_arch(arch) with current_directory(self.get_build_dir(arch.arch)): - sh.rm('-rf', 'src/zope/interface/tests', 'src/zope/interface/common/tests') + sh.rm( + '-rf', + 'src/zope/interface/tests', + 'src/zope/interface/common/tests', + ) recipe = ZopeInterfaceRecipe() diff --git a/p4a/pythonforandroid/recommendations.py b/p4a/pythonforandroid/recommendations.py index fd2fd3a..040c962 100644 --- a/p4a/pythonforandroid/recommendations.py +++ b/p4a/pythonforandroid/recommendations.py @@ -1,37 +1,115 @@ """Simple functions for checking dependency versions.""" +import sys from distutils.version import LooseVersion from os.path import join + from pythonforandroid.logger import info, warning from pythonforandroid.util import BuildInterruptingException # We only check the NDK major version -MIN_NDK_VERSION = 17 -MAX_NDK_VERSION = 17 +MIN_NDK_VERSION = 25 +MAX_NDK_VERSION = 25 -RECOMMENDED_NDK_VERSION = '17c' -OLD_NDK_MESSAGE = 'Older NDKs may not be compatible with all p4a features.' +# DO NOT CHANGE LINE FORMAT: buildozer parses the existence of a RECOMMENDED_NDK_VERSION +RECOMMENDED_NDK_VERSION = "25b" + +NDK_DOWNLOAD_URL = "https://developer.android.com/ndk/downloads/" + +# Important log messages NEW_NDK_MESSAGE = 'Newer NDKs may not be fully supported by p4a.' +UNKNOWN_NDK_MESSAGE = ( + 'Could not determine NDK version, no source.properties in the NDK dir.' +) +PARSE_ERROR_NDK_MESSAGE = ( + 'Could not parse $NDK_DIR/source.properties, not checking NDK version.' +) +READ_ERROR_NDK_MESSAGE = ( + 'Unable to read the NDK version from the given directory {ndk_dir}.' +) +ENSURE_RIGHT_NDK_MESSAGE = ( + 'Make sure your NDK version is greater than {min_supported}. If you get ' + 'build errors, download the recommended NDK {rec_version} from {ndk_url}.' +) +NDK_LOWER_THAN_SUPPORTED_MESSAGE = ( + 'The minimum supported NDK version is {min_supported}. ' + 'You can download it from {ndk_url}.' +) +UNSUPPORTED_NDK_API_FOR_ARMEABI_MESSAGE = ( + 'Asked to build for armeabi architecture with API ' + '{req_ndk_api}, but API {max_ndk_api} or greater does not support armeabi.' +) +CURRENT_NDK_VERSION_MESSAGE = ( + 'Found NDK version {ndk_version}' +) +RECOMMENDED_NDK_VERSION_MESSAGE = ( + 'Maximum recommended NDK version is {recommended_ndk_version}, but newer versions may work.' +) def check_ndk_version(ndk_dir): - # Check the NDK version against what is currently recommended + """ + Check the NDK version against what is currently recommended and raise an + exception of :class:`~pythonforandroid.util.BuildInterruptingException` in + case that the user tries to use an NDK lower than minimum supported, + specified via attribute `MIN_NDK_VERSION`. + + .. versionchanged:: 2019.06.06.1.dev0 + Added the ability to get android's NDK `letter version` and also + rewrote to raise an exception in case that an NDK version lower than + the minimum supported is detected. + """ version = read_ndk_version(ndk_dir) if version is None: - return # if we failed to read the version, just don't worry about it + warning(READ_ERROR_NDK_MESSAGE.format(ndk_dir=ndk_dir)) + warning( + ENSURE_RIGHT_NDK_MESSAGE.format( + min_supported=MIN_NDK_VERSION, + rec_version=RECOMMENDED_NDK_VERSION, + ndk_url=NDK_DOWNLOAD_URL, + ) + ) + return + + # create a dictionary which will describe the relationship of the android's + # NDK minor version with the `human readable` letter version, egs: + # Pkg.Revision = 17.1.4828580 => ndk-17b + # Pkg.Revision = 17.2.4988734 => ndk-17c + # Pkg.Revision = 19.0.5232133 => ndk-19 (No letter) + minor_to_letter = {0: ''} + minor_to_letter.update( + {n + 1: chr(i) for n, i in enumerate(range(ord('b'), ord('b') + 25))} + ) major_version = version.version[0] + letter_version = minor_to_letter[version.version[1]] + string_version = '{major_version}{letter_version}'.format( + major_version=major_version, letter_version=letter_version + ) - info('Found NDK revision {}'.format(version)) + info(CURRENT_NDK_VERSION_MESSAGE.format(ndk_version=string_version)) if major_version < MIN_NDK_VERSION: - warning('Minimum recommended NDK version is {}'.format( - RECOMMENDED_NDK_VERSION)) - warning(OLD_NDK_MESSAGE) + raise BuildInterruptingException( + NDK_LOWER_THAN_SUPPORTED_MESSAGE.format( + min_supported=MIN_NDK_VERSION, ndk_url=NDK_DOWNLOAD_URL + ), + instructions=( + 'Please, go to the android NDK page ({ndk_url}) and download a' + ' supported version.\n*** The currently recommended NDK' + ' version is {rec_version} ***'.format( + ndk_url=NDK_DOWNLOAD_URL, + rec_version=RECOMMENDED_NDK_VERSION, + ) + ), + ) elif major_version > MAX_NDK_VERSION: - warning('Maximum recommended NDK version is {}'.format( - RECOMMENDED_NDK_VERSION)) + warning( + RECOMMENDED_NDK_VERSION_MESSAGE.format( + recommended_ndk_version=RECOMMENDED_NDK_VERSION + ) + ) warning(NEW_NDK_MESSAGE) @@ -41,16 +119,14 @@ def read_ndk_version(ndk_dir): with open(join(ndk_dir, 'source.properties')) as fileh: ndk_data = fileh.read() except IOError: - info('Could not determine NDK version, no source.properties ' - 'in the NDK dir') + info(UNKNOWN_NDK_MESSAGE) return for line in ndk_data.split('\n'): if line.startswith('Pkg.Revision'): break else: - info('Could not parse $NDK_DIR/source.properties, not checking ' - 'NDK version') + info(PARSE_ERROR_NDK_MESSAGE) return # Line should have the form "Pkg.Revision = ..." @@ -59,15 +135,15 @@ def read_ndk_version(ndk_dir): return ndk_version -MIN_TARGET_API = 26 +MIN_TARGET_API = 30 # highest version tested to work fine with SDL2 # should be a good default for other bootstraps too -RECOMMENDED_TARGET_API = 27 +RECOMMENDED_TARGET_API = 33 ARMEABI_MAX_TARGET_API = 21 OLD_API_MESSAGE = ( - 'Target APIs lower than 26 are no longer supported on Google Play, ' + 'Target APIs lower than 30 are no longer supported on Google Play, ' 'and are not recommended. Note that the Target API can be higher than ' 'your device Android version, and should usually be as high as possible.') @@ -77,11 +153,12 @@ def check_target_api(api, arch): recommendation """ + # FIXME: Should We remove support for armeabi (ARMv5)? if api >= ARMEABI_MAX_TARGET_API and arch == 'armeabi': raise BuildInterruptingException( - 'Asked to build for armeabi architecture with API ' - '{}, but API {} or greater does not support armeabi'.format( - api, ARMEABI_MAX_TARGET_API), + UNSUPPORTED_NDK_API_FOR_ARMEABI_MESSAGE.format( + req_ndk_api=api, max_ndk_api=ARMEABI_MAX_TARGET_API + ), instructions='You probably want to build with --arch=armeabi-v7a instead') if api < MIN_TARGET_API: @@ -92,16 +169,65 @@ def check_target_api(api, arch): MIN_NDK_API = 21 RECOMMENDED_NDK_API = 21 OLD_NDK_API_MESSAGE = ('NDK API less than {} is not supported'.format(MIN_NDK_API)) +TARGET_NDK_API_GREATER_THAN_TARGET_API_MESSAGE = ( + 'Target NDK API is {ndk_api}, ' + 'higher than the target Android API {android_api}.' +) def check_ndk_api(ndk_api, android_api): """Warn if the user's NDK is too high or low.""" if ndk_api > android_api: raise BuildInterruptingException( - 'Target NDK API is {}, higher than the target Android API {}.'.format( - ndk_api, android_api), + TARGET_NDK_API_GREATER_THAN_TARGET_API_MESSAGE.format( + ndk_api=ndk_api, android_api=android_api + ), instructions=('The NDK API is a minimum supported API number and must be lower ' 'than the target Android API')) if ndk_api < MIN_NDK_API: warning(OLD_NDK_API_MESSAGE) + + +MIN_PYTHON_MAJOR_VERSION = 3 +MIN_PYTHON_MINOR_VERSION = 6 +MIN_PYTHON_VERSION = LooseVersion('{major}.{minor}'.format(major=MIN_PYTHON_MAJOR_VERSION, + minor=MIN_PYTHON_MINOR_VERSION)) +PY2_ERROR_TEXT = ( + 'python-for-android no longer supports running under Python 2. Either upgrade to ' + 'Python {min_version} or higher (recommended), or revert to python-for-android 2019.07.08.' +).format(min_version=MIN_PYTHON_VERSION) + +PY_VERSION_ERROR_TEXT = ( + 'Your Python version {user_major}.{user_minor} is not supported by python-for-android, ' + 'please upgrade to {min_version} or higher.' + ).format( + user_major=sys.version_info.major, + user_minor=sys.version_info.minor, + min_version=MIN_PYTHON_VERSION) + + +def check_python_version(): + # Python 2 special cased because it's a major transition. In the + # future the major or minor versions can increment more quietly. + if sys.version_info.major == 2: + raise BuildInterruptingException(PY2_ERROR_TEXT) + + if ( + sys.version_info.major < MIN_PYTHON_MAJOR_VERSION or + sys.version_info.minor < MIN_PYTHON_MINOR_VERSION + ): + + raise BuildInterruptingException(PY_VERSION_ERROR_TEXT) + + +def print_recommendations(): + """ + Print the main recommended dependency versions as simple key-value pairs. + """ + print('Min supported NDK version: {}'.format(MIN_NDK_VERSION)) + print('Recommended NDK version: {}'.format(RECOMMENDED_NDK_VERSION)) + print('Min target API: {}'.format(MIN_TARGET_API)) + print('Recommended target API: {}'.format(RECOMMENDED_TARGET_API)) + print('Min NDK API: {}'.format(MIN_NDK_API)) + print('Recommended NDK API: {}'.format(RECOMMENDED_NDK_API)) diff --git a/p4a/pythonforandroid/toolchain.py b/p4a/pythonforandroid/toolchain.py index ddf745a..85404a2 100644 --- a/p4a/pythonforandroid/toolchain.py +++ b/p4a/pythonforandroid/toolchain.py @@ -6,12 +6,14 @@ Tool for packaging Python apps for Android This module defines the entry point for command line and programmatic use. """ -from __future__ import print_function from os import environ from pythonforandroid import __version__ +from pythonforandroid.pythonpackage import get_dep_names_of_package from pythonforandroid.recommendations import ( - RECOMMENDED_NDK_API, RECOMMENDED_TARGET_API) -from pythonforandroid.util import BuildInterruptingException, handle_build_exception + RECOMMENDED_NDK_API, RECOMMENDED_TARGET_API, print_recommendations) +from pythonforandroid.util import BuildInterruptingException, load_source +from pythonforandroid.entrypoints import main +from pythonforandroid.prerequisites import check_and_install_default_prerequisites def check_python_dependencies(): @@ -27,8 +29,7 @@ def check_python_dependencies(): ok = True - modules = [('colorama', '0.3.3'), 'appdirs', ('sh', '1.10'), 'jinja2', - 'six'] + modules = [('colorama', '0.3.3'), 'appdirs', ('sh', '1.10'), 'jinja2'] for module in modules: if isinstance(module, tuple): @@ -65,6 +66,8 @@ def check_python_dependencies(): exit(1) +if not environ.get('SKIP_PREREQUISITES_CHECK', '0') == '1': + check_and_install_default_prerequisites() check_python_dependencies() @@ -80,7 +83,6 @@ from functools import wraps import argparse import sh -import imp from appdirs import user_data_dir import logging from distutils.version import LooseVersion @@ -135,7 +137,7 @@ def require_prebuilt_dist(func): """ @wraps(func) - def wrapper_func(self, args): + def wrapper_func(self, args, **kw): ctx = self.ctx ctx.set_archs(self._archs) ctx.prepare_build_environment(user_sdk_dir=self.sdk_dir, @@ -149,7 +151,7 @@ def require_prebuilt_dist(func): info_notify('No dist exists that meets your requirements, ' 'so one will be built.') build_dist_from_args(ctx, dist, args) - func(self, args) + func(self, args, **kw) return wrapper_func @@ -161,6 +163,7 @@ def dist_from_args(ctx, args): ctx, name=args.dist_name, recipes=split_argument_list(args.requirements), + archs=args.arch, ndk_api=args.ndk_api, force_build=args.force_build, require_perfect_match=args.require_perfect_match, @@ -171,37 +174,59 @@ def build_dist_from_args(ctx, dist, args): """Parses out any bootstrap related arguments, and uses them to build a dist.""" bs = Bootstrap.get_bootstrap(args.bootstrap, ctx) - build_order, python_modules, bs \ - = get_recipe_order_and_bootstrap(ctx, dist.recipes, bs) + blacklist = getattr(args, "blacklist_requirements", "").split(",") + if len(blacklist) == 1 and blacklist[0] == "": + blacklist = [] + build_order, python_modules, bs = ( + get_recipe_order_and_bootstrap( + ctx, dist.recipes, bs, + blacklist=blacklist + )) + assert set(build_order).intersection(set(python_modules)) == set() ctx.recipe_build_order = build_order ctx.python_modules = python_modules info('The selected bootstrap is {}'.format(bs.name)) info_main('# Creating dist with {} bootstrap'.format(bs.name)) bs.distribution = dist - info_notify('Dist will have name {} and recipes ({})'.format( + info_notify('Dist will have name {} and requirements ({})'.format( dist.name, ', '.join(dist.recipes))) + info('Dist contains the following requirements as recipes: {}'.format( + ctx.recipe_build_order)) info('Dist will also contain modules ({}) installed from pip'.format( ', '.join(ctx.python_modules))) + info( + 'Dist will be build in mode {build_mode}{with_debug_symbols}'.format( + build_mode='debug' if ctx.build_as_debuggable else 'release', + with_debug_symbols=' (with debug symbols)' + if ctx.with_debug_symbols + else '', + ) + ) - ctx.dist_name = bs.distribution.name + ctx.distribution = dist ctx.prepare_bootstrap(bs) if dist.needs_build: - ctx.prepare_dist(ctx.dist_name) + ctx.prepare_dist() - build_recipes(build_order, python_modules, ctx) + build_recipes(build_order, python_modules, ctx, + getattr(args, "private", None), + ignore_project_setup_py=getattr( + args, "ignore_setup_py", False + ), + ) - ctx.bootstrap.run_distribute() + ctx.bootstrap.assemble_distribution() info_main('# Your distribution was created successfully, exiting.') info('Dist can be found at (for now) {}' - .format(join(ctx.dist_dir, ctx.dist_name))) + .format(join(ctx.dist_dir, ctx.distribution.dist_dir))) -def split_argument_list(l): - if not len(l): +def split_argument_list(arg_list): + if not len(arg_list): return [] - return re.split(r'[ ,]+', l) + return re.split(r'[ ,]+', arg_list) class NoAbbrevParser(argparse.ArgumentParser): @@ -216,11 +241,12 @@ class NoAbbrevParser(argparse.ArgumentParser): return [] -class ToolchainCL(object): +class ToolchainCL: def __init__(self): argv = sys.argv + self.warn_on_carriage_return_args(argv) # Buildozer used to pass these arguments in a now-invalid order # If that happens, apply this fix # This fix will be removed once a fixed buildozer is released @@ -270,11 +296,11 @@ class ToolchainCL(object): '*minimal supported* API, not normally the same as your --android-api. ' 'Defaults to min(ANDROID_API, {}) if not specified.').format(RECOMMENDED_NDK_API)) generic_parser.add_argument( - '--symlink-java-src', '--symlink_java_src', + '--symlink-bootstrap-files', '--ssymlink_bootstrap_files', action='store_true', - dest='symlink_java_src', + dest='symlink_bootstrap_files', default=False, - help=('If True, symlinks the java src folder during build and dist ' + help=('If True, symlinks the bootstrap files ' 'creation. This is useful for development only, it could also' ' cause weird problems.')) @@ -287,8 +313,8 @@ class ToolchainCL(object): '(default: {})'.format(default_storage_dir))) generic_parser.add_argument( - '--arch', help='The archs to build for, separated by commas.', - default='arm64-v8a') + '--arch', help='The archs to build for.', + action='append', default=[]) # Options for specifying the Distribution generic_parser.add_argument( @@ -298,7 +324,22 @@ class ToolchainCL(object): generic_parser.add_argument( '--requirements', help=('Dependencies of your app, should be recipe names or ' - 'Python modules'), + 'Python modules. NOT NECESSARY if you are using ' + 'Python 3 with --use-setup-py'), + default='') + + generic_parser.add_argument( + '--recipe-blacklist', + help=('Blacklist an internal recipe from use. Allows ' + 'disabling Python 3 core modules to save size'), + dest="recipe_blacklist", + default='') + + generic_parser.add_argument( + '--blacklist-requirements', + help=('Blacklist an internal recipe from use. Allows ' + 'disabling Python 3 core modules to save size'), + dest="blacklist_requirements", default='') generic_parser.add_argument( @@ -334,6 +375,16 @@ class ToolchainCL(object): dest='local_recipes', default='./p4a-recipes', help='Directory to look for local recipes') + generic_parser.add_argument( + '--activity-class-name', + dest='activity_class_name', default='org.kivy.android.PythonActivity', + help='The full java class name of the main activity') + + generic_parser.add_argument( + '--service-class-name', + dest='service_class_name', default='org.kivy.android.PythonService', + help='Full java package name of the PythonService class') + generic_parser.add_argument( '--java-build-tool', dest='java_build_tool', default='auto', @@ -361,7 +412,7 @@ class ToolchainCL(object): kwargs.pop('aliases') return subparsers.add_parser(*args, **kwargs) - parser_recommendations = add_parser( + add_parser( subparsers, 'recommendations', parents=[generic_parser], @@ -447,43 +498,94 @@ class ToolchainCL(object): action='store_true', help='Symlink the dist instead of copying') - parser_apk = add_parser( + parser_packaging = argparse.ArgumentParser( + parents=[generic_parser], + add_help=False, + description='common options for packaging (apk, aar)') - subparsers, - 'apk', help='Build an APK', - parents=[generic_parser]) - parser_apk.add_argument( + # This is actually an internal argument of the build.py + # (see pythonforandroid/bootstraps/common/build/build.py). + # However, it is also needed before the distribution is finally + # assembled for locating the setup.py / other build systems, which + # is why we also add it here: + parser_packaging.add_argument( + '--add-asset', dest='assets', + action="append", default=[], + help='Put this in the assets folder in the apk.') + parser_packaging.add_argument( + '--add-resource', dest='resources', + action="append", default=[], + help='Put this in the res folder in the apk.') + parser_packaging.add_argument( + '--private', dest='private', + help='the directory with the app source code files' + + ' (containing your main.py entrypoint)', + required=False, default=None) + parser_packaging.add_argument( + '--use-setup-py', dest="use_setup_py", + action='store_true', default=False, + help="Process the setup.py of a project if present. " + + "(Experimental!") + parser_packaging.add_argument( + '--ignore-setup-py', dest="ignore_setup_py", + action='store_true', default=False, + help="Don't run the setup.py of a project if present. " + + "This may be required if the setup.py is not " + + "designed to work inside p4a (e.g. by installing " + + "dependencies that won't work or aren't desired " + + "on Android") + parser_packaging.add_argument( '--release', dest='build_mode', action='store_const', const='release', default='debug', - help='Build the PARSER_APK. in Release mode') - parser_apk.add_argument( + help='Build your app as a non-debug release build. ' + '(Disables gdb debugging among other things)') + parser_packaging.add_argument( + '--with-debug-symbols', dest='with_debug_symbols', + action='store_const', const=True, default=False, + help='Will keep debug symbols from `.so` files.') + parser_packaging.add_argument( '--keystore', dest='keystore', action='store', default=None, help=('Keystore for JAR signing key, will use jarsigner ' 'default if not specified (release build only)')) - parser_apk.add_argument( + parser_packaging.add_argument( '--signkey', dest='signkey', action='store', default=None, help='Key alias to sign PARSER_APK. with (release build only)') - parser_apk.add_argument( + parser_packaging.add_argument( '--keystorepw', dest='keystorepw', action='store', default=None, help='Password for keystore') - parser_apk.add_argument( + parser_packaging.add_argument( '--signkeypw', dest='signkeypw', action='store', default=None, help='Password for key alias') - parser_create = add_parser( + add_parser( + subparsers, + 'aar', help='Build an AAR', + parents=[parser_packaging]) + + add_parser( + subparsers, + 'apk', help='Build an APK', + parents=[parser_packaging]) + + add_parser( + subparsers, + 'aab', help='Build an AAB', + parents=[parser_packaging]) + + add_parser( subparsers, 'create', help='Compile a set of requirements into a dist', parents=[generic_parser]) - parser_archs = add_parser( + add_parser( subparsers, 'archs', help='List the available target architectures', parents=[generic_parser]) - parser_distributions = add_parser( + add_parser( subparsers, 'distributions', aliases=['dists'], help='List the currently available (compiled) dists', parents=[generic_parser]) - parser_delete_dist = add_parser( + add_parser( subparsers, 'delete_dist', aliases=['delete-dist'], help='Delete a compiled dist', parents=[generic_parser]) @@ -496,15 +598,15 @@ class ToolchainCL(object): parser_sdk_tools.add_argument( 'tool', help='The binary tool name to run') - parser_adb = add_parser( + add_parser( subparsers, 'adb', help='Run adb from the given SDK', parents=[generic_parser]) - parser_logcat = add_parser( + add_parser( subparsers, 'logcat', help='Run logcat from the given SDK', parents=[generic_parser]) - parser_build_status = add_parser( + add_parser( subparsers, 'build_status', aliases=['build-status'], help='Print some debug information about current built components', @@ -516,6 +618,20 @@ class ToolchainCL(object): args, unknown = parser.parse_known_args(sys.argv[1:]) args.unknown_args = unknown + if hasattr(args, "private") and args.private is not None: + # Pass this value on to the internal bootstrap build.py: + args.unknown_args += ["--private", args.private] + if hasattr(args, "build_mode") and args.build_mode == "release": + args.unknown_args += ["--release"] + if hasattr(args, "with_debug_symbols") and args.with_debug_symbols: + args.unknown_args += ["--with-debug-symbols"] + if hasattr(args, "ignore_setup_py") and args.ignore_setup_py: + args.use_setup_py = False + if hasattr(args, "activity_class_name") and args.activity_class_name != 'org.kivy.android.PythonActivity': + args.unknown_args += ["--activity-class-name", args.activity_class_name] + if hasattr(args, "service_class_name") and args.service_class_name != 'org.kivy.android.PythonService': + args.unknown_args += ["--service-class-name", args.service_class_name] + self.args = args if args.subparser_name is None: @@ -527,9 +643,64 @@ class ToolchainCL(object): if args.debug: logger.setLevel(logging.DEBUG) - # strip version from requirements, and put them in environ + self.ctx = Context() + self.ctx.use_setup_py = getattr(args, "use_setup_py", True) + self.ctx.build_as_debuggable = getattr( + args, "build_mode", "debug" + ) == "debug" + self.ctx.with_debug_symbols = getattr( + args, "with_debug_symbols", False + ) + + have_setup_py_or_similar = False + if getattr(args, "private", None) is not None: + project_dir = getattr(args, "private") + if (os.path.exists(os.path.join(project_dir, "setup.py")) or + os.path.exists(os.path.join(project_dir, + "pyproject.toml"))): + have_setup_py_or_similar = True + + # Process requirements and put version in environ if hasattr(args, 'requirements'): requirements = [] + + # Add dependencies from setup.py, but only if they are recipes + # (because otherwise, setup.py itself will install them later) + if (have_setup_py_or_similar and + getattr(args, "use_setup_py", False)): + try: + info("Analyzing package dependencies. MAY TAKE A WHILE.") + # Get all the dependencies corresponding to a recipe: + dependencies = [ + dep.lower() for dep in + get_dep_names_of_package( + args.private, + keep_version_pins=True, + recursive=True, + verbose=True, + ) + ] + info("Dependencies obtained: " + str(dependencies)) + all_recipes = [ + recipe.lower() for recipe in + set(Recipe.list_recipes(self.ctx)) + ] + dependencies = set(dependencies).intersection( + set(all_recipes) + ) + # Add dependencies to argument list: + if len(dependencies) > 0: + if len(args.requirements) > 0: + args.requirements += u"," + args.requirements += u",".join(dependencies) + except ValueError: + # Not a python package, apparently. + warning( + "Processing failed, is this project a valid " + "package? Will continue WITHOUT setup.py deps." + ) + + # Parse --requirements argument list: for requirement in split_argument_list(args.requirements): if "==" in requirement: requirement, version = requirement.split(u"==", 1) @@ -541,29 +712,58 @@ class ToolchainCL(object): self.warn_on_deprecated_args(args) - self.ctx = Context() self.storage_dir = args.storage_dir self.ctx.setup_dirs(self.storage_dir) self.sdk_dir = args.sdk_dir self.ndk_dir = args.ndk_dir self.android_api = args.android_api self.ndk_api = args.ndk_api - self.ctx.symlink_java_src = args.symlink_java_src + self.ctx.symlink_bootstrap_files = args.symlink_bootstrap_files self.ctx.java_build_tool = args.java_build_tool - self._archs = split_argument_list(args.arch) + self._archs = args.arch - self.ctx.local_recipes = args.local_recipes + self.ctx.local_recipes = realpath(args.local_recipes) self.ctx.copy_libs = args.copy_libs + self.ctx.activity_class_name = args.activity_class_name + self.ctx.service_class_name = args.service_class_name + # Each subparser corresponds to a method - getattr(self, args.subparser_name.replace('-', '_'))(args) + command = args.subparser_name.replace('-', '_') + getattr(self, command)(args) + + @staticmethod + def warn_on_carriage_return_args(args): + for check_arg in args: + if '\r' in check_arg: + warning("Argument '{}' contains a carriage return (\\r).".format(str(check_arg.replace('\r', '')))) + warning("Invoking this program via scripts which use CRLF instead of LF line endings will have undefined behaviour.") def warn_on_deprecated_args(self, args): """ Print warning messages for any deprecated arguments that were passed. """ + # Output warning if setup.py is present and neither --ignore-setup-py + # nor --use-setup-py was specified. + if getattr(args, "private", None) is not None and \ + (os.path.exists(os.path.join(args.private, "setup.py")) or + os.path.exists(os.path.join(args.private, "pyproject.toml")) + ): + if not getattr(args, "use_setup_py", False) and \ + not getattr(args, "ignore_setup_py", False): + warning(" **** FUTURE BEHAVIOR CHANGE WARNING ****") + warning("Your project appears to contain a setup.py file.") + warning("Currently, these are ignored by default.") + warning("This will CHANGE in an upcoming version!") + warning("") + warning("To ensure your setup.py is ignored, please specify:") + warning(" --ignore-setup-py") + warning("") + warning("To enable what will some day be the default, specify:") + warning(" --use-setup-py") + # NDK version is now determined automatically if args.ndk_version is not None: warning('--ndk-version is deprecated and no longer necessary, ' @@ -577,8 +777,8 @@ class ToolchainCL(object): return if not hasattr(self, "hook_module"): # first time, try to load the hook module - self.hook_module = imp.load_source("pythonforandroid.hook", - self.args.hook) + self.hook_module = load_source( + "pythonforandroid.hook", self.args.hook) if hasattr(self.hook_module, name): info("Hook: execute {}".format(name)) getattr(self.hook_module, name)(self) @@ -607,6 +807,14 @@ class ToolchainCL(object): sys.argv.append(arg) def recipes(self, args): + """ + Prints recipes basic info, e.g. + .. code-block:: bash + python3 3.7.1 + depends: ['hostpython3', 'sqlite3', 'openssl', 'libffi'] + conflicts: [] + optional depends: ['sqlite3', 'libffi', 'openssl'] + """ ctx = self.ctx if args.compact: print(" ".join(set(Recipe.list_recipes(ctx)))) @@ -614,7 +822,7 @@ class ToolchainCL(object): for name in sorted(Recipe.list_recipes(ctx)): try: recipe = Recipe.get_recipe(name, ctx) - except IOError: + except (IOError, ValueError): warning('Recipe "{}" could not be loaded'.format(name)) except SyntaxError: import traceback @@ -640,7 +848,7 @@ class ToolchainCL(object): def bootstraps(self, _args): """List all the bootstraps available to build with.""" - for bs in Bootstrap.list_bootstraps(): + for bs in Bootstrap.all_bootstraps(): bs = Bootstrap.get_bootstrap(bs, self.ctx) print('{Fore.BLUE}{Style.BRIGHT}{bs.name}{Style.RESET_ALL}' .format(bs=bs, Fore=Out_Fore, Style=Out_Style)) @@ -683,7 +891,7 @@ class ToolchainCL(object): """Delete all the bootstrap builds.""" if exists(join(self.ctx.build_dir, 'bootstrap_builds')): shutil.rmtree(join(self.ctx.build_dir, 'bootstrap_builds')) - # for bs in Bootstrap.list_bootstraps(): + # for bs in Bootstrap.all_bootstraps(): # bs = Bootstrap.get_bootstrap(bs, self.ctx) # if bs.build_dir and exists(bs.build_dir): # info('Cleaning build for {} bootstrap.'.format(bs.name)) @@ -770,22 +978,40 @@ class ToolchainCL(object): def _dist(self): ctx = self.ctx dist = dist_from_args(ctx, self.args) + ctx.distribution = dist return dist - @require_prebuilt_dist - def apk(self, args): - """Create an APK using the given distribution.""" + @staticmethod + def _fix_args(args): + """ + Manually fixing these arguments at the string stage is + unsatisfactory and should probably be changed somehow, but + we can't leave it until later as the build.py scripts assume + they are in the current directory. + works in-place + :param args: parser args + """ - ctx = self.ctx - dist = self._dist - - # Manually fixing these arguments at the string stage is - # unsatisfactory and should probably be changed somehow, but - # we can't leave it until later as the build.py scripts assume - # they are in the current directory. fix_args = ('--dir', '--private', '--add-jar', '--add-source', - '--whitelist', '--blacklist', '--presplash', '--icon') + '--whitelist', '--blacklist', '--presplash', '--icon', + '--icon-bg', '--icon-fg') unknown_args = args.unknown_args + + for asset in args.assets: + if ":" in asset: + asset_src, asset_dest = asset.split(":") + else: + asset_src = asset_dest = asset + # take abspath now, because build.py will be run in bootstrap dir + unknown_args += ["--asset", os.path.abspath(asset_src)+":"+asset_dest] + for resource in args.resources: + if ":" in resource: + resource_src, resource_dest = resource.split(":") + else: + resource_src = resource + resource_dest = "" + # take abspath now, because build.py will be run in bootstrap dir + unknown_args += ["--resource", os.path.abspath(resource_src)+":"+resource_dest] for i, arg in enumerate(unknown_args): argx = arg.split('=') if argx[0] in fix_args: @@ -795,6 +1021,12 @@ class ToolchainCL(object): elif i + 1 < len(unknown_args): unknown_args[i+1] = realpath(expanduser(unknown_args[i+1])) + @staticmethod + def _prepare_release_env(args): + """ + prepares envitonment dict with the necessary flags for signing an apk + :param args: parser args + """ env = os.environ.copy() if args.build_mode == 'release': if args.keystore: @@ -808,125 +1040,152 @@ class ToolchainCL(object): elif args.keystorepw and 'P4A_RELEASE_KEYALIAS_PASSWD' not in env: env['P4A_RELEASE_KEYALIAS_PASSWD'] = args.keystorepw - build = imp.load_source('build', join(dist.dist_dir, 'build.py')) + return env + + def _build_package(self, args, package_type): + """ + Creates an android package using gradle + :param args: parser args + :param package_type: one of 'apk', 'aar', 'aab' + :return (gradle output, build_args) + """ + ctx = self.ctx + dist = self._dist + bs = Bootstrap.get_bootstrap(args.bootstrap, ctx) + ctx.prepare_bootstrap(bs) + self._fix_args(args) + env = self._prepare_release_env(args) + with current_directory(dist.dist_dir): self.hook("before_apk_build") os.environ["ANDROID_API"] = str(self.ctx.android_api) - build_args = build.parse_args(args.unknown_args) + build = load_source('build', join(dist.dist_dir, 'build.py')) + build_args = build.parse_args_and_make_package( + args.unknown_args + ) + self.hook("after_apk_build") self.hook("before_apk_assemble") + build_tools_versions = os.listdir(join(ctx.sdk_dir, + 'build-tools')) + build_tools_versions = sorted(build_tools_versions, + key=LooseVersion) + build_tools_version = build_tools_versions[-1] + info(('Detected highest available build tools ' + 'version to be {}').format(build_tools_version)) - build_type = ctx.java_build_tool - if build_type == 'auto': - info('Selecting java build tool:') + if build_tools_version < '25.0': + raise BuildInterruptingException( + 'build_tools >= 25 is required, but %s is installed' % build_tools_version) + if not exists("gradlew"): + raise BuildInterruptingException("gradlew file is missing") - build_tools_versions = os.listdir(join(ctx.sdk_dir, - 'build-tools')) - build_tools_versions.sort(key=LooseVersion) - build_tools_version = build_tools_versions[-1] - info(('Detected highest available build tools ' - 'version to be {}').format(build_tools_version)) + env["ANDROID_NDK_HOME"] = self.ctx.ndk_dir + env["ANDROID_HOME"] = self.ctx.sdk_dir - if build_tools_version >= '25.0' and exists('gradlew'): - build_type = 'gradle' - info(' Building with gradle, as gradle executable is ' - 'present') - else: - build_type = 'ant' - if build_tools_version < '25.0': - info((' Building with ant, as the highest ' - 'build-tools-version is only {}').format( - build_tools_version)) - else: - info(' Building with ant, as no gradle executable ' - 'detected') + gradlew = sh.Command('./gradlew') - if build_type == 'gradle': - # gradle-based build - env["ANDROID_NDK_HOME"] = self.ctx.ndk_dir - env["ANDROID_HOME"] = self.ctx.sdk_dir - - gradlew = sh.Command('./gradlew') - if exists('/usr/bin/dos2unix'): - # .../dists/bdisttest_python3/gradlew - # .../build/bootstrap_builds/sdl2-python3crystax/gradlew - # if docker on windows, gradle contains CRLF - output = shprint( - sh.Command('dos2unix'), gradlew._path.decode('utf8'), - _tail=20, _critical=True, _env=env + if exists('/usr/bin/dos2unix'): + # .../dists/bdisttest_python3/gradlew + # .../build/bootstrap_builds/sdl2-python3/gradlew + # if docker on windows, gradle contains CRLF + output = shprint( + sh.Command('dos2unix'), gradlew._path.decode('utf8'), + _tail=20, _critical=True, _env=env + ) + if args.build_mode == "debug": + if package_type == "aab": + raise BuildInterruptingException( + "aab is meant only for distribution and is not available in debug mode. " + "Instead, you can use apk while building for debugging purposes." ) - if args.build_mode == "debug": - gradle_task = "assembleDebug" - elif args.build_mode == "release": + gradle_task = "assembleDebug" + elif args.build_mode == "release": + if package_type in ["apk", "aar"]: gradle_task = "assembleRelease" - else: - raise BuildInterruptingException( - "Unknown build mode {} for apk()".format(args.build_mode)) - output = shprint(gradlew, "--console=plain", gradle_task, - "publishReleasePublicationToSonatypeRepository", - _tail=20, - _critical=True, _env=env) - - # gradle output apks somewhere else - # and don't have version in file - apk_dir = join(dist.dist_dir, - "build", "outputs", "aar") - apk_glob = "*-{}.aar" - apk_add_version = True - + elif package_type == "aab": + gradle_task = "bundleRelease" else: - # ant-based build - try: - ant = sh.Command('ant') - except sh.CommandNotFound: - raise BuildInterruptingException( - 'Could not find ant binary, please install it ' - 'and make sure it is in your $PATH.') - output = shprint(ant, args.build_mode, _tail=20, - _critical=True, _env=env) - apk_dir = join(dist.dist_dir, "bin") - apk_glob = "*-*-{}.aar" - apk_add_version = False + raise BuildInterruptingException( + "Unknown build mode {} for apk()".format(args.build_mode)) - self.hook("after_apk_assemble") + # WARNING: We should make sure to clean the build directory before building. + # See PR: kivy/python-for-android#2705 + output = shprint(gradlew, "clean", gradle_task, _tail=20, + _critical=True, _env=env) + return output, build_args + + def _finish_package(self, args, output, build_args, package_type, output_dir): + """ + Finishes the package after the gradle script run + :param args: the parser args + :param output: RunningCommand output + :param build_args: build args as returned by build.parse_args + :param package_type: one of 'apk', 'aar', 'aab' + :param output_dir: where to put the package file + """ + + package_glob = "*-{}.%s" % package_type + package_add_version = True + + self.hook("after_apk_assemble") info_main('# Copying android package to current directory') - apk_re = re.compile(r'.*Package: (.*\.aar)$') - apk_file = None + package_re = re.compile(r'.*Package: (.*\.apk)$') + package_file = None for line in reversed(output.splitlines()): - m = apk_re.match(line) + m = package_re.match(line) if m: - apk_file = m.groups()[0] + package_file = m.groups()[0] break - - if not apk_file: - info_main('# AAR not found in build output. Guessing...') + if not package_file: + info_main('# Android package filename not found in build output. Guessing...') if args.build_mode == "release": suffixes = ("release", "release-unsigned") else: suffixes = ("debug", ) for suffix in suffixes: - apks = glob.glob(join(apk_dir, apk_glob.format(suffix))) - if apks: - if len(apks) > 1: - info('More than one built AAR found... guessing you ' - 'just built {}'.format(apks[-1])) - apk_file = apks[-1] + + package_files = glob.glob(join(output_dir, package_glob.format(suffix))) + if package_files: + if len(package_files) > 1: + info('More than one built APK found... guessing you ' + 'just built {}'.format(package_files[-1])) + package_file = package_files[-1] break else: - raise BuildInterruptingException('Couldn\'t find the built AAR') + raise BuildInterruptingException('Couldn\'t find the built APK') - info_main('# Found AAR file: {}'.format(apk_file)) - if apk_add_version: - info('# Add version number to AAR') - apk_name, apk_suffix = basename(apk_file).split("-", 1) - apk_file_dest = "{}-{}-{}".format( - apk_name, build_args.version, apk_suffix) - info('# AAR renamed to {}'.format(apk_file_dest)) - shprint(sh.cp, apk_file, apk_file_dest) + info_main('# Found android package file: {}'.format(package_file)) + package_extension = f".{package_type}" + if package_add_version: + info('# Add version number to android package') + package_name = basename(package_file)[:-len(package_extension)] + package_file_dest = "{}-{}{}".format( + package_name, build_args.version, package_extension) + info('# Android package renamed to {}'.format(package_file_dest)) + shprint(sh.cp, package_file, package_file_dest) else: - shprint(sh.cp, apk_file, './') + shprint(sh.cp, package_file, './') + + @require_prebuilt_dist + def apk(self, args): + output, build_args = self._build_package(args, package_type='apk') + output_dir = join(self._dist.dist_dir, "build", "outputs", 'apk', args.build_mode) + self._finish_package(args, output, build_args, 'apk', output_dir) + + @require_prebuilt_dist + def aar(self, args): + output, build_args = self._build_package(args, package_type='aar') + output_dir = join(self._dist.dist_dir, "build", "outputs", 'aar') + self._finish_package(args, output, build_args, 'aar', output_dir) + + @require_prebuilt_dist + def aab(self, args): + output, build_args = self._build_package(args, package_type='aab') + output_dir = join(self._dist.dist_dir, "build", "outputs", 'bundle', args.build_mode) + self._finish_package(args, output, build_args, 'aab', output_dir) @require_prebuilt_dist def create(self, args): @@ -954,7 +1213,7 @@ class ToolchainCL(object): if dists: print('{Style.BRIGHT}Distributions currently installed are:' - '{Style.RESET_ALL}'.format(Style=Out_Style, Fore=Out_Fore)) + '{Style.RESET_ALL}'.format(Style=Out_Style)) pretty_log_dists(dists, print) else: print('{Style.BRIGHT}There are no dists currently built.' @@ -1017,6 +1276,9 @@ class ToolchainCL(object): sys.stdout.write(line) sys.stdout.flush() + def recommendations(self, args): + print_recommendations() + def build_status(self, _args): """Print the status of the specified build. """ print('{Style.BRIGHT}Bootstraps whose core components are probably ' @@ -1046,12 +1308,5 @@ class ToolchainCL(object): print(recipe_str) -def main(): - try: - ToolchainCL() - except BuildInterruptingException as exc: - handle_build_exception(exc) - - if __name__ == "__main__": main() diff --git a/p4a/pythonforandroid/tools/biglink b/p4a/pythonforandroid/tools/biglink index 6b86dbf..8a8e561 100755 --- a/p4a/pythonforandroid/tools/biglink +++ b/p4a/pythonforandroid/tools/biglink @@ -1,11 +1,10 @@ #!/usr/bin/env python -from __future__ import print_function import os import sys import subprocess -sofiles = [ ] +sofiles = [] for directory in sys.argv[2:]: @@ -20,7 +19,7 @@ for directory in sys.argv[2:]: sofiles.append(fn[:-2]) # The raw argument list. -args = [ ] +args = [] for fn in sofiles: afn = fn + ".o" @@ -31,7 +30,7 @@ for fn in sofiles: data = fd.read() args.extend(data.split(" ")) -unique_args = [ ] +unique_args = [] while args: a = args.pop() if a in ('-L', ): diff --git a/p4a/pythonforandroid/tools/liblink b/p4a/pythonforandroid/tools/liblink index 523eef9..de837e6 100755 --- a/p4a/pythonforandroid/tools/liblink +++ b/p4a/pythonforandroid/tools/liblink @@ -1,6 +1,5 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python -from __future__ import print_function import sys import subprocess from os import environ @@ -22,7 +21,7 @@ while i < len(sys.argv): i += 1 continue - if opt.startswith("-l") or opt.startswith("-L"): + if opt.startswith(("-l", "-L")): libs.append(opt) continue @@ -34,27 +33,8 @@ while i < len(sys.argv): i += 1 continue - if opt.startswith("-I"): - continue - - if opt.startswith("-m"): - continue - - if opt.startswith("-f"): - continue - - if opt.startswith("-O"): - continue - - if opt.startswith("-g"): - continue - - if opt.startswith("-D"): - continue - - if opt.startswith("-R"): - # for -rpath, not implemented yet. - continue + if opt.startswith( + ("-I", "-isystem", "-m", "-f", "-O", "-g", "-D", "-R")): if opt.startswith("-"): print(sys.argv) diff --git a/p4a/pythonforandroid/util.py b/p4a/pythonforandroid/util.py index 9c007c2..f290cdc 100644 --- a/p4a/pythonforandroid/util.py +++ b/p4a/pythonforandroid/util.py @@ -1,29 +1,11 @@ import contextlib from os.path import exists, join from os import getcwd, chdir, makedirs, walk, uname -import io -import json -import sh import shutil -import sys from fnmatch import fnmatch from tempfile import mkdtemp -try: - from urllib.request import FancyURLopener -except ImportError: - from urllib import FancyURLopener - from pythonforandroid.logger import (logger, Err_Fore, error, info) -IS_PY3 = sys.version_info[0] >= 3 - - -class WgetDownloader(FancyURLopener): - version = ('Wget/1.17.1') - - -urlretrieve = WgetDownloader().retrieve - build_platform = '{system}-{machine}'.format( system=uname()[0], machine=uname()[-1]).lower() @@ -62,90 +44,6 @@ def ensure_dir(filename): makedirs(filename) -class JsonStore(object): - """Replacement of shelve using json, needed for support python 2 and 3. - """ - - def __init__(self, filename): - super(JsonStore, self).__init__() - self.filename = filename - self.data = {} - if exists(filename): - try: - with io.open(filename, encoding='utf-8') as fd: - self.data = json.load(fd) - except ValueError: - print("Unable to read the state.db, content will be replaced.") - - def __getitem__(self, key): - return self.data[key] - - def __setitem__(self, key, value): - self.data[key] = value - self.sync() - - def __delitem__(self, key): - del self.data[key] - self.sync() - - def __contains__(self, item): - return item in self.data - - def get(self, item, default=None): - return self.data.get(item, default) - - def keys(self): - return self.data.keys() - - def remove_all(self, prefix): - for key in self.data.keys()[:]: - if not key.startswith(prefix): - continue - del self.data[key] - self.sync() - - def sync(self): - # http://stackoverflow.com/questions/12309269/write-json-data-to-file-in-python/14870531#14870531 - if IS_PY3: - with open(self.filename, 'w') as fd: - json.dump(self.data, fd, ensure_ascii=False) - else: - with io.open(self.filename, 'w', encoding='utf-8') as fd: - fd.write(unicode(json.dumps(self.data, ensure_ascii=False))) # noqa F821 - - -def which(program, path_env): - '''Locate an executable in the system.''' - import os - - def is_exe(fpath): - return os.path.isfile(fpath) and os.access(fpath, os.X_OK) - - fpath, fname = os.path.split(program) - if fpath: - if is_exe(program): - return program - else: - for path in path_env.split(os.pathsep): - path = path.strip('"') - exe_file = os.path.join(path, program) - if is_exe(exe_file): - return exe_file - - return None - - -def get_virtualenv_executable(): - virtualenv = None - if virtualenv is None: - virtualenv = sh.which('virtualenv2') - if virtualenv is None: - virtualenv = sh.which('virtualenv-2.7') - if virtualenv is None: - virtualenv = sh.which('virtualenv') - return virtualenv - - def walk_valid_filens(base_dir, invalid_dir_names, invalid_file_patterns): """Recursively walks all the files and directories in ``dirn``, ignoring directories that match any pattern in ``invalid_dirns`` @@ -176,9 +74,23 @@ def walk_valid_filens(base_dir, invalid_dir_names, invalid_file_patterns): yield join(dirn, filen) +def load_source(module, filename): + # Python 3.5+ + import importlib.util + if hasattr(importlib.util, 'module_from_spec'): + spec = importlib.util.spec_from_file_location(module, filename) + mod = importlib.util.module_from_spec(spec) + spec.loader.exec_module(mod) + return mod + else: + # Python 3.3 and 3.4: + from importlib.machinery import SourceFileLoader + return SourceFileLoader(module, filename).load_module() + + class BuildInterruptingException(Exception): def __init__(self, message, instructions=None): - super(BuildInterruptingException, self).__init__(message, instructions) + super().__init__(message, instructions) self.message = message self.instructions = instructions diff --git a/p4a/pythonforandroidold/__init__.py b/p4a/pythonforandroidold/__init__.py new file mode 100644 index 0000000..27f4493 --- /dev/null +++ b/p4a/pythonforandroidold/__init__.py @@ -0,0 +1,2 @@ + +__version__ = '0.5' diff --git a/p4a/pythonforandroidold/archs.py b/p4a/pythonforandroidold/archs.py new file mode 100644 index 0000000..09ebba4 --- /dev/null +++ b/p4a/pythonforandroidold/archs.py @@ -0,0 +1,253 @@ +from distutils.spawn import find_executable +from os import environ +from os.path import (exists, join, dirname, split) +from glob import glob + +from pythonforandroid.recipe import Recipe +from pythonforandroid.util import BuildInterruptingException, build_platform + + +class Arch(object): + + toolchain_prefix = None + '''The prefix for the toolchain dir in the NDK.''' + + command_prefix = None + '''The prefix for NDK commands such as gcc.''' + + def __init__(self, ctx): + super(Arch, self).__init__() + self.ctx = ctx + + # Allows injecting additional linker paths used by any recipe. + # This can also be modified by recipes (like the librt recipe) + # to make sure that some sort of global resource is available & + # linked for all others. + self.extra_global_link_paths = [] + + def __str__(self): + return self.arch + + @property + def include_dirs(self): + return [ + "{}/{}".format( + self.ctx.include_dir, + d.format(arch=self)) + for d in self.ctx.include_dirs] + + @property + def target(self): + target_data = self.command_prefix.split('-') + return '-'.join( + [target_data[0], 'none', target_data[1], target_data[2]]) + + def get_env(self, with_flags_in_cc=True, clang=False): + env = {} + + cflags = [ + '-DANDROID', + '-fomit-frame-pointer', + '-D__ANDROID_API__={}'.format(self.ctx.ndk_api)] + if not clang: + cflags.append('-mandroid') + else: + cflags.append('-target ' + self.target) + toolchain = '{android_host}-{toolchain_version}'.format( + android_host=self.ctx.toolchain_prefix, + toolchain_version=self.ctx.toolchain_version) + toolchain = join(self.ctx.ndk_dir, 'toolchains', toolchain, + 'prebuilt', build_platform) + cflags.append('-gcc-toolchain {}'.format(toolchain)) + + env['CFLAGS'] = ' '.join(cflags) + + # Link the extra global link paths first before anything else + # (such that overriding system libraries with them is possible) + env['LDFLAGS'] = ' ' + " ".join([ + "-L'" + l.replace("'", "'\"'\"'") + "'" # no shlex.quote in py2 + for l in self.extra_global_link_paths + ]) + ' ' + + sysroot = join(self.ctx._ndk_dir, 'sysroot') + if exists(sysroot): + # post-15 NDK per + # https://android.googlesource.com/platform/ndk/+/ndk-r15-release/docs/UnifiedHeaders.md + env['CFLAGS'] += ' -isystem {}/sysroot/usr/include/{}'.format( + self.ctx.ndk_dir, self.ctx.toolchain_prefix) + env['CFLAGS'] += ' -I{}/sysroot/usr/include/{}'.format( + self.ctx.ndk_dir, self.command_prefix) + else: + sysroot = self.ctx.ndk_platform + env['CFLAGS'] += ' -I{}'.format(self.ctx.ndk_platform) + env['CFLAGS'] += ' -isysroot {} '.format(sysroot) + env['CFLAGS'] += '-I' + join(self.ctx.get_python_install_dir(), + 'include/python{}'.format( + self.ctx.python_recipe.version[0:3]) + ) + + env['LDFLAGS'] += '--sysroot={} '.format(self.ctx.ndk_platform) + + env["CXXFLAGS"] = env["CFLAGS"] + + env["LDFLAGS"] += " ".join(['-lm', '-L' + self.ctx.get_libs_dir(self.arch)]) + + if self.ctx.ndk == 'crystax': + env['LDFLAGS'] += ' -L{}/sources/crystax/libs/{} -lcrystax'.format(self.ctx.ndk_dir, self.arch) + + toolchain_prefix = self.ctx.toolchain_prefix + toolchain_version = self.ctx.toolchain_version + command_prefix = self.command_prefix + + env['TOOLCHAIN_PREFIX'] = toolchain_prefix + env['TOOLCHAIN_VERSION'] = toolchain_version + + ccache = '' + if self.ctx.ccache and bool(int(environ.get('USE_CCACHE', '1'))): + # print('ccache found, will optimize builds') + ccache = self.ctx.ccache + ' ' + env['USE_CCACHE'] = '1' + env['NDK_CCACHE'] = self.ctx.ccache + env.update({k: v for k, v in environ.items() if k.startswith('CCACHE_')}) + + if clang: + llvm_dirname = split( + glob(join(self.ctx.ndk_dir, 'toolchains', 'llvm*'))[-1])[-1] + clang_path = join(self.ctx.ndk_dir, 'toolchains', llvm_dirname, + 'prebuilt', build_platform, 'bin') + environ['PATH'] = '{clang_path}:{path}'.format( + clang_path=clang_path, path=environ['PATH']) + exe = join(clang_path, 'clang') + execxx = join(clang_path, 'clang++') + else: + exe = '{command_prefix}-gcc'.format(command_prefix=command_prefix) + execxx = '{command_prefix}-g++'.format(command_prefix=command_prefix) + + cc = find_executable(exe, path=environ['PATH']) + if cc is None: + print('Searching path are: {!r}'.format(environ['PATH'])) + raise BuildInterruptingException( + 'Couldn\'t find executable for CC. This indicates a ' + 'problem locating the {} executable in the Android ' + 'NDK, not that you don\'t have a normal compiler ' + 'installed. Exiting.'.format(exe)) + + if with_flags_in_cc: + env['CC'] = '{ccache}{exe} {cflags}'.format( + exe=exe, + ccache=ccache, + cflags=env['CFLAGS']) + env['CXX'] = '{ccache}{execxx} {cxxflags}'.format( + execxx=execxx, + ccache=ccache, + cxxflags=env['CXXFLAGS']) + else: + env['CC'] = '{ccache}{exe}'.format( + exe=exe, + ccache=ccache) + env['CXX'] = '{ccache}{execxx}'.format( + execxx=execxx, + ccache=ccache) + + env['AR'] = '{}-ar'.format(command_prefix) + env['RANLIB'] = '{}-ranlib'.format(command_prefix) + env['LD'] = '{}-ld'.format(command_prefix) + env['LDSHARED'] = env["CC"] + " -pthread -shared " +\ + "-Wl,-O1 -Wl,-Bsymbolic-functions " + if self.ctx.python_recipe and self.ctx.python_recipe.from_crystax: + # For crystax python, we can't use the host python headers: + env["CFLAGS"] += ' -I{}/sources/python/{}/include/python/'.\ + format(self.ctx.ndk_dir, self.ctx.python_recipe.version[0:3]) + env['STRIP'] = '{}-strip --strip-unneeded'.format(command_prefix) + env['MAKE'] = 'make -j5' + env['READELF'] = '{}-readelf'.format(command_prefix) + env['NM'] = '{}-nm'.format(command_prefix) + + hostpython_recipe = Recipe.get_recipe( + 'host' + self.ctx.python_recipe.name, self.ctx) + env['BUILDLIB_PATH'] = join( + hostpython_recipe.get_build_dir(self.arch), + 'build', 'lib.{}-{}'.format( + build_platform, self.ctx.python_recipe.major_minor_version_string) + ) + + env['PATH'] = environ['PATH'] + + env['ARCH'] = self.arch + env['NDK_API'] = 'android-{}'.format(str(self.ctx.ndk_api)) + + if self.ctx.python_recipe and self.ctx.python_recipe.from_crystax: + env['CRYSTAX_PYTHON_VERSION'] = self.ctx.python_recipe.version + + return env + + +class ArchARM(Arch): + arch = "armeabi" + toolchain_prefix = 'arm-linux-androideabi' + command_prefix = 'arm-linux-androideabi' + platform_dir = 'arch-arm' + + @property + def target(self): + target_data = self.command_prefix.split('-') + return '-'.join( + ['armv7a', 'none', target_data[1], target_data[2]]) + + +class ArchARMv7_a(ArchARM): + arch = 'armeabi-v7a' + + def get_env(self, with_flags_in_cc=True, clang=False): + env = super(ArchARMv7_a, self).get_env(with_flags_in_cc, clang=clang) + env['CFLAGS'] = (env['CFLAGS'] + + (' -march=armv7-a -mfloat-abi=softfp ' + '-mfpu=vfp -mthumb')) + env['CXXFLAGS'] = env['CFLAGS'] + return env + + +class Archx86(Arch): + arch = 'x86' + toolchain_prefix = 'x86' + command_prefix = 'i686-linux-android' + platform_dir = 'arch-x86' + + def get_env(self, with_flags_in_cc=True, clang=False): + env = super(Archx86, self).get_env(with_flags_in_cc, clang=clang) + env['CFLAGS'] = (env['CFLAGS'] + + ' -march=i686 -mtune=intel -mssse3 -mfpmath=sse -m32') + env['CXXFLAGS'] = env['CFLAGS'] + return env + + +class Archx86_64(Arch): + arch = 'x86_64' + toolchain_prefix = 'x86_64' + command_prefix = 'x86_64-linux-android' + platform_dir = 'arch-x86_64' + + def get_env(self, with_flags_in_cc=True, clang=False): + env = super(Archx86_64, self).get_env(with_flags_in_cc, clang=clang) + env['CFLAGS'] = (env['CFLAGS'] + + ' -march=x86-64 -msse4.2 -mpopcnt -m64 -mtune=intel') + env['CXXFLAGS'] = env['CFLAGS'] + return env + + +class ArchAarch_64(Arch): + arch = 'arm64-v8a' + toolchain_prefix = 'aarch64-linux-android' + command_prefix = 'aarch64-linux-android' + platform_dir = 'arch-arm64' + + def get_env(self, with_flags_in_cc=True, clang=False): + env = super(ArchAarch_64, self).get_env(with_flags_in_cc, clang=clang) + incpath = ' -I' + join(dirname(__file__), 'includes', 'arm64-v8a') + env['EXTRA_CFLAGS'] = incpath + env['CFLAGS'] += incpath + env['CXXFLAGS'] += incpath + if with_flags_in_cc: + env['CC'] += incpath + env['CXX'] += incpath + return env diff --git a/p4a/pythonforandroidold/bdistapk.py b/p4a/pythonforandroidold/bdistapk.py new file mode 100644 index 0000000..a27f4d1 --- /dev/null +++ b/p4a/pythonforandroidold/bdistapk.py @@ -0,0 +1,148 @@ +from __future__ import print_function +from setuptools import Command +from pythonforandroid import toolchain + +import sys +from os.path import realpath, join, exists, dirname, curdir, basename, split +from os import makedirs +from glob import glob +from shutil import rmtree, copyfile + + +def argv_contains(t): + for arg in sys.argv: + if arg.startswith(t): + return True + return False + + +class BdistAPK(Command): + description = 'Create an APK with python-for-android' + + user_options = [] + + def initialize_options(self): + for option in self.user_options: + setattr(self, option[0].strip('=').replace('-', '_'), None) + + option_dict = self.distribution.get_option_dict('apk') + + # This is a hack, we probably aren't supposed to loop through + # the option_dict so early because distutils does exactly the + # same thing later to check that we support the + # options. However, it works... + for (option, (source, value)) in option_dict.items(): + setattr(self, option, str(value)) + + + def finalize_options(self): + + setup_options = self.distribution.get_option_dict('apk') + for (option, (source, value)) in setup_options.items(): + if source == 'command line': + continue + if not argv_contains('--' + option): + # allow 'permissions': ['permission', 'permission] in apk + if option == 'permissions': + for perm in value: + sys.argv.append('--permission={}'.format(perm)) + elif value in (None, 'None'): + sys.argv.append('--{}'.format(option)) + else: + sys.argv.append('--{}={}'.format(option, value)) + + # Inject some argv options from setup.py if the user did not + # provide them + if not argv_contains('--name'): + name = self.distribution.get_name() + sys.argv.append('--name="{}"'.format(name)) + self.name = name + + if not argv_contains('--package'): + package = 'org.test.{}'.format(self.name.lower().replace(' ', '')) + print('WARNING: You did not supply an Android package ' + 'identifier, trying {} instead.'.format(package)) + print(' This may fail if this is not a valid identifier') + sys.argv.append('--package={}'.format(package)) + + if not argv_contains('--version'): + version = self.distribution.get_version() + sys.argv.append('--version={}'.format(version)) + + if not argv_contains('--arch'): + arch = 'arm64-v8a' + self.arch = arch + sys.argv.append('--arch={}'.format(arch)) + + def run(self): + + self.prepare_build_dir() + + from pythonforandroid.toolchain import main + sys.argv[1] = 'apk' + main() + + def prepare_build_dir(self): + + if argv_contains('--private') and not argv_contains('--launcher'): + print('WARNING: Received --private argument when this would ' + 'normally be generated automatically.') + print(' This is probably bad unless you meant to do ' + 'that.') + + bdist_dir = 'build/bdist.android-{}'.format(self.arch) + if exists(bdist_dir): + rmtree(bdist_dir) + makedirs(bdist_dir) + + globs = [] + for directory, patterns in self.distribution.package_data.items(): + for pattern in patterns: + globs.append(join(directory, pattern)) + + filens = [] + for pattern in globs: + filens.extend(glob(pattern)) + + main_py_dirs = [] + if not argv_contains('--launcher'): + for filen in filens: + new_dir = join(bdist_dir, dirname(filen)) + if not exists(new_dir): + makedirs(new_dir) + print('Including {}'.format(filen)) + copyfile(filen, join(bdist_dir, filen)) + if basename(filen) in ('main.py', 'main.pyo'): + main_py_dirs.append(filen) + + # This feels ridiculous, but how else to define the main.py dir? + # Maybe should just fail? + if not main_py_dirs and not argv_contains('--launcher'): + print('ERROR: Could not find main.py, so no app build dir defined') + print('You should name your app entry point main.py') + exit(1) + if len(main_py_dirs) > 1: + print('WARNING: Multiple main.py dirs found, using the shortest path') + main_py_dirs.sort(key=lambda j: len(split(j))) + + if not argv_contains('--launcher'): + sys.argv.append('--private={}'.format( + join(realpath(curdir), bdist_dir, dirname(main_py_dirs[0]))) + ) + + +def _set_user_options(): + # This seems like a silly way to do things, but not sure if there's a + # better way to pass arbitrary options onwards to p4a + user_options = [('requirements=', None, None),] + for i, arg in enumerate(sys.argv): + if arg.startswith('--'): + if ('=' in arg or + (i < (len(sys.argv) - 1) and not sys.argv[i+1].startswith('-'))): + user_options.append((arg[2:].split('=')[0] + '=', None, None)) + else: + user_options.append((arg[2:], None, None)) + + BdistAPK.user_options = user_options + +_set_user_options() diff --git a/p4a/pythonforandroidold/bootstrap.py b/p4a/pythonforandroidold/bootstrap.py new file mode 100644 index 0000000..b4a9a9e --- /dev/null +++ b/p4a/pythonforandroidold/bootstrap.py @@ -0,0 +1,318 @@ +from os.path import (join, dirname, isdir, normpath, splitext, basename) +from os import listdir, walk, sep +import sh +import shlex +import glob +import importlib +import os +import shutil + +from pythonforandroid.logger import (warning, shprint, info, logger, + debug) +from pythonforandroid.util import (current_directory, ensure_dir, + temp_directory) +from pythonforandroid.recipe import Recipe + + +def copy_files(src_root, dest_root, override=True): + for root, dirnames, filenames in walk(src_root): + for filename in filenames: + subdir = normpath(root.replace(src_root, "")) + if subdir.startswith(sep): # ensure it is relative + subdir = subdir[1:] + dest_dir = join(dest_root, subdir) + if not os.path.exists(dest_dir): + os.makedirs(dest_dir) + src_file = join(root, filename) + dest_file = join(dest_dir, filename) + if os.path.isfile(src_file): + if override and os.path.exists(dest_file): + os.unlink(dest_file) + if not os.path.exists(dest_file): + shutil.copy(src_file, dest_file) + else: + os.makedirs(dest_file) + + +class Bootstrap(object): + '''An Android project template, containing recipe stuff for + compilation and templated fields for APK info. + ''' + name = '' + jni_subdir = '/jni' + ctx = None + + bootstrap_dir = None + + build_dir = None + dist_dir = None + dist_name = None + distribution = None + + # All bootstraps should include Python in some way: + recipe_depends = [ + ("python2", "python2legacy", "python3", "python3crystax"), + 'android', + ] + + can_be_chosen_automatically = True + '''Determines whether the bootstrap can be chosen as one that + satisfies user requirements. If False, it will not be returned + from Bootstrap.get_bootstrap_from_recipes. + ''' + + # Other things a Bootstrap might need to track (maybe separately): + # ndk_main.c + # whitelist.txt + # blacklist.txt + + @property + def dist_dir(self): + '''The dist dir at which to place the finished distribution.''' + if self.distribution is None: + warning('Tried to access {}.dist_dir, but {}.distribution ' + 'is None'.format(self, self)) + exit(1) + return self.distribution.dist_dir + + @property + def jni_dir(self): + return self.name + self.jni_subdir + + def check_recipe_choices(self): + '''Checks what recipes are being built to see which of the alternative + and optional dependencies are being used, + and returns a list of these.''' + recipes = [] + built_recipes = self.ctx.recipe_build_order + for recipe in self.recipe_depends: + if isinstance(recipe, (tuple, list)): + for alternative in recipe: + if alternative in built_recipes: + recipes.append(alternative) + break + return sorted(recipes) + + def get_build_dir_name(self): + choices = self.check_recipe_choices() + dir_name = '-'.join([self.name] + choices) + return dir_name + + def get_build_dir(self): + return join(self.ctx.build_dir, 'bootstrap_builds', self.get_build_dir_name()) + + def get_dist_dir(self, name): + return join(self.ctx.dist_dir, name) + + def get_common_dir(self): + return os.path.abspath(join(self.bootstrap_dir, "..", 'common')) + + @property + def name(self): + modname = self.__class__.__module__ + return modname.split(".", 2)[-1] + + def prepare_build_dir(self): + '''Ensure that a build dir exists for the recipe. This same single + dir will be used for building all different archs.''' + self.build_dir = self.get_build_dir() + self.common_dir = self.get_common_dir() + copy_files(join(self.bootstrap_dir, 'build'), self.build_dir) + copy_files(join(self.common_dir, 'build'), self.build_dir, + override=False) + if self.ctx.symlink_java_src: + info('Symlinking java src instead of copying') + shprint(sh.rm, '-r', join(self.build_dir, 'src')) + shprint(sh.mkdir, join(self.build_dir, 'src')) + for dirn in listdir(join(self.bootstrap_dir, 'build', 'src')): + shprint(sh.ln, '-s', join(self.bootstrap_dir, 'build', 'src', dirn), + join(self.build_dir, 'src')) + with current_directory(self.build_dir): + with open('project.properties', 'w') as fileh: + fileh.write('target=android-{}'.format(self.ctx.android_api)) + + def prepare_dist_dir(self, name): + ensure_dir(self.dist_dir) + + def run_distribute(self): + self.distribution.save_info(self.dist_dir) + + @classmethod + def list_bootstraps(cls): + '''Find all the available bootstraps and return them.''' + forbidden_dirs = ('__pycache__', 'common') + bootstraps_dir = join(dirname(__file__), 'bootstraps') + for name in listdir(bootstraps_dir): + if name in forbidden_dirs: + continue + filen = join(bootstraps_dir, name) + if isdir(filen): + yield name + + @classmethod + def get_bootstrap_from_recipes(cls, recipes, ctx): + '''Returns a bootstrap whose recipe requirements do not conflict with + the given recipes.''' + info('Trying to find a bootstrap that matches the given recipes.') + bootstraps = [cls.get_bootstrap(name, ctx) + for name in cls.list_bootstraps()] + acceptable_bootstraps = [] + for bs in bootstraps: + if not bs.can_be_chosen_automatically: + continue + possible_dependency_lists = expand_dependencies(bs.recipe_depends) + for possible_dependencies in possible_dependency_lists: + ok = True + for recipe in possible_dependencies: + recipe = Recipe.get_recipe(recipe, ctx) + if any([conflict in recipes for conflict in recipe.conflicts]): + ok = False + break + for recipe in recipes: + try: + recipe = Recipe.get_recipe(recipe, ctx) + except ValueError: + conflicts = [] + else: + conflicts = recipe.conflicts + if any([conflict in possible_dependencies + for conflict in conflicts]): + ok = False + break + if ok and bs not in acceptable_bootstraps: + acceptable_bootstraps.append(bs) + info('Found {} acceptable bootstraps: {}'.format( + len(acceptable_bootstraps), + [bs.name for bs in acceptable_bootstraps])) + if acceptable_bootstraps: + info('Using the first of these: {}' + .format(acceptable_bootstraps[0].name)) + return acceptable_bootstraps[0] + return None + + @classmethod + def get_bootstrap(cls, name, ctx): + '''Returns an instance of a bootstrap with the given name. + + This is the only way you should access a bootstrap class, as + it sets the bootstrap directory correctly. + ''' + if name is None: + return None + if not hasattr(cls, 'bootstraps'): + cls.bootstraps = {} + if name in cls.bootstraps: + return cls.bootstraps[name] + mod = importlib.import_module('pythonforandroid.bootstraps.{}' + .format(name)) + if len(logger.handlers) > 1: + logger.removeHandler(logger.handlers[1]) + bootstrap = mod.bootstrap + bootstrap.bootstrap_dir = join(ctx.root_dir, 'bootstraps', name) + bootstrap.ctx = ctx + return bootstrap + + def distribute_libs(self, arch, src_dirs, wildcard='*', dest_dir="libs"): + '''Copy existing arch libs from build dirs to current dist dir.''' + info('Copying libs') + tgt_dir = join(dest_dir, arch.arch) + ensure_dir(tgt_dir) + for src_dir in src_dirs: + for lib in glob.glob(join(src_dir, wildcard)): + shprint(sh.cp, '-a', lib, tgt_dir) + + def distribute_javaclasses(self, javaclass_dir, dest_dir="src"): + '''Copy existing javaclasses from build dir to current dist dir.''' + info('Copying java files') + ensure_dir(dest_dir) + for filename in glob.glob(javaclass_dir): + shprint(sh.cp, '-a', filename, dest_dir) + + def distribute_aars(self, arch): + '''Process existing .aar bundles and copy to current dist dir.''' + info('Unpacking aars') + for aar in glob.glob(join(self.ctx.aars_dir, '*.aar')): + self._unpack_aar(aar, arch) + + def _unpack_aar(self, aar, arch): + '''Unpack content of .aar bundle and copy to current dist dir.''' + with temp_directory() as temp_dir: + name = splitext(basename(aar))[0] + jar_name = name + '.jar' + info("unpack {} aar".format(name)) + debug(" from {}".format(aar)) + debug(" to {}".format(temp_dir)) + shprint(sh.unzip, '-o', aar, '-d', temp_dir) + + jar_src = join(temp_dir, 'classes.jar') + jar_tgt = join('libs', jar_name) + debug("copy {} jar".format(name)) + debug(" from {}".format(jar_src)) + debug(" to {}".format(jar_tgt)) + ensure_dir('libs') + shprint(sh.cp, '-a', jar_src, jar_tgt) + + so_src_dir = join(temp_dir, 'jni', arch.arch) + so_tgt_dir = join('libs', arch.arch) + debug("copy {} .so".format(name)) + debug(" from {}".format(so_src_dir)) + debug(" to {}".format(so_tgt_dir)) + ensure_dir(so_tgt_dir) + so_files = glob.glob(join(so_src_dir, '*.so')) + for f in so_files: + shprint(sh.cp, '-a', f, so_tgt_dir) + + def strip_libraries(self, arch): + info('Stripping libraries') + if self.ctx.python_recipe.from_crystax: + info('Python was loaded from CrystaX, skipping strip') + return + env = arch.get_env() + tokens = shlex.split(env['STRIP']) + strip = sh.Command(tokens[0]) + if len(tokens) > 1: + strip = strip.bake(tokens[1:]) + + libs_dir = join(self.dist_dir, '_python_bundle', + '_python_bundle', 'modules') + if self.ctx.python_recipe.name == 'python2legacy': + libs_dir = join(self.dist_dir, 'private') + filens = shprint(sh.find, libs_dir, join(self.dist_dir, 'libs'), + '-iname', '*.so', _env=env).stdout.decode('utf-8') + + logger.info('Stripping libraries in private dir') + for filen in filens.split('\n'): + if not filen: + continue # skip the last '' + try: + strip(filen, _env=env) + except sh.ErrorReturnCode_1: + logger.debug('Failed to strip ' + filen) + + def fry_eggs(self, sitepackages): + info('Frying eggs in {}'.format(sitepackages)) + for d in listdir(sitepackages): + rd = join(sitepackages, d) + if isdir(rd) and d.endswith('.egg'): + info(' ' + d) + files = [join(rd, f) for f in listdir(rd) if f != 'EGG-INFO'] + if files: + shprint(sh.mv, '-t', sitepackages, *files) + shprint(sh.rm, '-rf', d) + + +def expand_dependencies(recipes): + recipe_lists = [[]] + for recipe in recipes: + if isinstance(recipe, (tuple, list)): + new_recipe_lists = [] + for alternative in recipe: + for old_list in recipe_lists: + new_list = [i for i in old_list] + new_list.append(alternative) + new_recipe_lists.append(new_list) + recipe_lists = new_recipe_lists + else: + for old_list in recipe_lists: + old_list.append(recipe) + return recipe_lists diff --git a/p4a/pythonforandroidold/bootstraps/__init__.py b/p4a/pythonforandroidold/bootstraps/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/ant.properties b/p4a/pythonforandroidold/bootstraps/common/build/ant.properties similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/ant.properties rename to p4a/pythonforandroidold/bootstraps/common/build/ant.properties diff --git a/p4a/pythonforandroidold/bootstraps/common/build/build.py b/p4a/pythonforandroidold/bootstraps/common/build/build.py new file mode 100644 index 0000000..342115e --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/common/build/build.py @@ -0,0 +1,794 @@ +#!/usr/bin/env python2.7 + +from __future__ import print_function + +import json +from os.path import ( + dirname, join, isfile, realpath, + relpath, split, exists, basename +) +from os import listdir, makedirs, remove +import os +import shlex +import shutil +import subprocess +import sys +import tarfile +import tempfile +import time +from zipfile import ZipFile + +from distutils.version import LooseVersion +from fnmatch import fnmatch +import jinja2 + + +def get_dist_info_for(key): + try: + with open(join(dirname(__file__), 'dist_info.json'), 'r') as fileh: + info = json.load(fileh) + value = str(info[key]) + except (OSError, KeyError) as e: + print("BUILD FAILURE: Couldn't extract the key `" + key + "` " + + "from dist_info.json: " + str(e)) + sys.exit(1) + return value + + +def get_hostpython(): + return get_dist_info_for('hostpython') + + +def get_python_version(): + return get_dist_info_for('python_version') + + +def get_bootstrap_name(): + return get_dist_info_for('bootstrap') + + +if os.name == 'nt': + ANDROID = 'android.bat' + ANT = 'ant.bat' +else: + ANDROID = 'android' + ANT = 'ant' + +curdir = dirname(__file__) + +PYTHON = get_hostpython() +PYTHON_VERSION = get_python_version() +if PYTHON is not None and not exists(PYTHON): + PYTHON = None + +BLACKLIST_PATTERNS = [ + # code versionning + '^*.hg/*', + '^*.git/*', + '^*.bzr/*', + '^*.svn/*', + + # temp files + '~', + '*.bak', + '*.swp', +] +# pyc/py +if PYTHON is not None: + BLACKLIST_PATTERNS.append('*.py') + if PYTHON_VERSION and int(PYTHON_VERSION[0]) == 2: + # we only blacklist `.pyc` for python2 because in python3 the compiled + # extension is `.pyc` (.pyo files not exists for python >= 3.6) + BLACKLIST_PATTERNS.append('*.pyc') + +WHITELIST_PATTERNS = [] +if get_bootstrap_name() in ('sdl2', 'webview', 'service_only'): + WHITELIST_PATTERNS.append('pyconfig.h') + +python_files = [] + + +environment = jinja2.Environment(loader=jinja2.FileSystemLoader( + join(curdir, 'templates'))) + + +def try_unlink(fn): + if exists(fn): + os.unlink(fn) + + +def ensure_dir(path): + if not exists(path): + makedirs(path) + + +def render(template, dest, **kwargs): + '''Using jinja2, render `template` to the filename `dest`, supplying the + + keyword arguments as template parameters. + ''' + + dest_dir = dirname(dest) + if dest_dir and not exists(dest_dir): + makedirs(dest_dir) + + template = environment.get_template(template) + text = template.render(**kwargs) + + f = open(dest, 'wb') + f.write(text.encode('utf-8')) + f.close() + + +def is_whitelist(name): + return match_filename(WHITELIST_PATTERNS, name) + + +def is_blacklist(name): + if is_whitelist(name): + return False + return match_filename(BLACKLIST_PATTERNS, name) + + +def match_filename(pattern_list, name): + for pattern in pattern_list: + if pattern.startswith('^'): + pattern = pattern[1:] + else: + pattern = '*/' + pattern + if fnmatch(name, pattern): + return True + + +def listfiles(d): + basedir = d + subdirlist = [] + for item in os.listdir(d): + fn = join(d, item) + if isfile(fn): + yield fn + else: + subdirlist.append(join(basedir, item)) + for subdir in subdirlist: + for fn in listfiles(subdir): + yield fn + + +def make_python_zip(): + ''' + Search for all the python related files, and construct the pythonXX.zip + According to + # http://randomsplat.com/id5-cross-compiling-python-for-embedded-linux.html + site-packages, config and lib-dynload will be not included. + ''' + + if not exists('private'): + print('No compiled python is present to zip, skipping.') + return + + global python_files + d = realpath(join('private', 'lib', 'python2.7')) + + def select(fn): + if is_blacklist(fn): + return False + fn = realpath(fn) + assert(fn.startswith(d)) + fn = fn[len(d):] + if (fn.startswith('/site-packages/') + or fn.startswith('/config/') + or fn.startswith('/lib-dynload/') + or fn.startswith('/libpymodules.so')): + return False + return fn + + # get a list of all python file + python_files = [x for x in listfiles(d) if select(x)] + + # create the final zipfile + zfn = join('private', 'lib', 'python27.zip') + zf = ZipFile(zfn, 'w') + + # put all the python files in it + for fn in python_files: + afn = fn[len(d):] + zf.write(fn, afn) + zf.close() + + +def make_tar(tfn, source_dirs, ignore_path=[], optimize_python=True): + ''' + Make a zip file `fn` from the contents of source_dis. + ''' + + # selector function + def select(fn): + rfn = realpath(fn) + for p in ignore_path: + if p.endswith('/'): + p = p[:-1] + if rfn.startswith(p): + return False + if rfn in python_files: + return False + return not is_blacklist(fn) + + # get the files and relpath file of all the directory we asked for + files = [] + for sd in source_dirs: + sd = realpath(sd) + compile_dir(sd, optimize_python=optimize_python) + files += [(x, relpath(realpath(x), sd)) for x in listfiles(sd) + if select(x)] + + # create tar.gz of thoses files + tf = tarfile.open(tfn, 'w:gz', format=tarfile.USTAR_FORMAT) + dirs = [] + for fn, afn in files: + dn = dirname(afn) + if dn not in dirs: + # create every dirs first if not exist yet + d = '' + for component in split(dn): + d = join(d, component) + if d.startswith('/'): + d = d[1:] + if d == '' or d in dirs: + continue + dirs.append(d) + tinfo = tarfile.TarInfo(d) + tinfo.type = tarfile.DIRTYPE + tf.addfile(tinfo) + + # put the file + tf.add(fn, afn) + tf.close() + + +def compile_dir(dfn, optimize_python=True): + ''' + Compile *.py in directory `dfn` to *.pyo + ''' + + if PYTHON is None: + return + + if int(PYTHON_VERSION[0]) >= 3: + args = [PYTHON, '-m', 'compileall', '-b', '-f', dfn] + else: + args = [PYTHON, '-m', 'compileall', '-f', dfn] + if optimize_python: + # -OO = strip docstrings + args.insert(1, '-OO') + return_code = subprocess.call(args) + + if return_code != 0: + print('Error while running "{}"'.format(' '.join(args))) + print('This probably means one of your Python files has a syntax ' + 'error, see logs above') + exit(1) + + +def make_package(args): + # If no launcher is specified, require a main.py/main.pyo: + if (get_bootstrap_name() != "sdl" or args.launcher is None) and \ + get_bootstrap_name() != "webview": + # (webview doesn't need an entrypoint, apparently) + if args.private is None or ( + not exists(join(realpath(args.private), 'main.py')) and + not exists(join(realpath(args.private), 'main.pyo'))): + print('''BUILD FAILURE: No main.py(o) found in your app directory. This +file must exist to act as the entry point for you app. If your app is +started by a file with a different name, rename it to main.py or add a +main.py that loads it.''') + sys.exit(1) + + assets_dir = "src/main/assets" + + # Delete the old assets. + try_unlink(join(assets_dir, 'public.mp3')) + try_unlink(join(assets_dir, 'private.mp3')) + ensure_dir(assets_dir) + + # In order to speedup import and initial depack, + # construct a python27.zip + make_python_zip() + + # Add extra environment variable file into tar-able directory: + env_vars_tarpath = tempfile.mkdtemp(prefix="p4a-extra-env-") + with open(os.path.join(env_vars_tarpath, "p4a_env_vars.txt"), "w") as f: + f.write("P4A_IS_WINDOWED=" + str(args.window) + "\n") + if hasattr(args, "orientation"): + f.write("P4A_ORIENTATION=" + str(args.orientation) + "\n") + f.write("P4A_NUMERIC_VERSION=" + str(args.numeric_version) + "\n") + f.write("P4A_MINSDK=" + str(args.min_sdk_version) + "\n") + + # Package up the private data (public not supported). + tar_dirs = [env_vars_tarpath] + if args.private: + tar_dirs.append(args.private) + for python_bundle_dir in ('private', 'crystax_python', '_python_bundle'): + if exists(python_bundle_dir): + tar_dirs.append(python_bundle_dir) + if get_bootstrap_name() == "webview": + tar_dirs.append('webview_includes') + if args.private or args.launcher: + make_tar( + join(assets_dir, 'private.mp3'), tar_dirs, args.ignore_path, + optimize_python=args.optimize_python) + + # Remove extra env vars tar-able directory: + shutil.rmtree(env_vars_tarpath) + + # Prepare some variables for templating process + res_dir = "src/main/res" + default_icon = 'templates/kivy-icon.png' + default_presplash = 'templates/kivy-presplash.jpg' + shutil.copy( + args.icon or default_icon, + join(res_dir, 'drawable/icon.png') + ) + if get_bootstrap_name() != "service_only": + shutil.copy( + args.presplash or default_presplash, + join(res_dir, 'drawable/presplash.jpg') + ) + + # If extra Java jars were requested, copy them into the libs directory + jars = [] + if args.add_jar: + for jarname in args.add_jar: + if not exists(jarname): + print('Requested jar does not exist: {}'.format(jarname)) + sys.exit(-1) + shutil.copy(jarname, 'src/main/libs') + jars.append(basename(jarname)) + + # If extra aar were requested, copy them into the libs directory + aars = [] + if args.add_aar: + ensure_dir("libs") + for aarname in args.add_aar: + if not exists(aarname): + print('Requested aar does not exists: {}'.format(aarname)) + sys.exit(-1) + shutil.copy(aarname, 'libs') + aars.append(basename(aarname).rsplit('.', 1)[0]) + + versioned_name = (args.name.replace(' ', '').replace('\'', '') + + '-' + args.version) + + version_code = 0 + if not args.numeric_version: + # Set version code in format (arch-minsdk-app_version) + with open(join(dirname(__file__), 'dist_info.json'), 'r') as dist_info: + dist_data = json.load(dist_info) + arch = dist_data["archs"][0] + arch_dict = {"x86_64": "9", "arm64-v8a": "8", "armeabi-v7a": "7", "x86": "6"} + arch_code = arch_dict.get(arch, '1') + min_sdk = args.min_sdk_version + for i in args.version.split('.'): + version_code *= 100 + version_code += int(i) + args.numeric_version = "{}{}{}".format(arch_code, min_sdk, version_code) + + if args.intent_filters: + with open(args.intent_filters) as fd: + args.intent_filters = fd.read() + + if not args.add_activity: + args.add_activity = [] + + if not args.activity_launch_mode: + args.activity_launch_mode = '' + + if args.extra_source_dirs: + esd = [] + for spec in args.extra_source_dirs: + if ':' in spec: + specdir, specincludes = spec.split(':') + else: + specdir = spec + specincludes = '**' + esd.append((realpath(specdir), specincludes)) + args.extra_source_dirs = esd + else: + args.extra_source_dirs = [] + + service = False + if args.private: + service_main = join(realpath(args.private), 'service', 'main.py') + if exists(service_main) or exists(service_main + 'o'): + service = True + + service_names = [] + for sid, spec in enumerate(args.services): + spec = spec.split(':') + name = spec[0] + entrypoint = spec[1] + options = spec[2:] + + foreground = 'foreground' in options + sticky = 'sticky' in options + + service_names.append(name) + service_target_path =\ + 'src/main/java/{}/Service{}.java'.format( + args.package.replace(".", "/"), + name.capitalize() + ) + render( + 'Service.tmpl.java', + service_target_path, + name=name, + entrypoint=entrypoint, + args=args, + foreground=foreground, + sticky=sticky, + service_id=sid + 1, + ) + + # Find the SDK directory and target API + with open('project.properties', 'r') as fileh: + target = fileh.read().strip() + android_api = target.split('-')[1] + try: + int(android_api) + except (ValueError, TypeError): + raise ValueError( + "failed to extract the Android API level from " + + "build.properties. expected int, got: '" + + str(android_api) + "'" + ) + with open('local.properties', 'r') as fileh: + sdk_dir = fileh.read().strip() + sdk_dir = sdk_dir[8:] + + # Try to build with the newest available build tools + ignored = {".DS_Store", ".ds_store"} + build_tools_versions = [x for x in listdir(join(sdk_dir, 'build-tools')) if x not in ignored] + build_tools_versions.sort(key=LooseVersion) + build_tools_version = build_tools_versions[-1] + + # Folder name for launcher (used by SDL2 bootstrap) + url_scheme = 'kivy' + + # Render out android manifest: + manifest_path = "src/main/AndroidManifest.xml" + render_args = { + "args": args, + "service": service, + "service_names": service_names, + "android_api": android_api + } + if get_bootstrap_name() == "sdl2": + render_args["url_scheme"] = url_scheme + render( + 'AndroidManifest.tmpl.xml', + manifest_path, + **render_args) + + # Copy the AndroidManifest.xml to the dist root dir so that ant + # can also use it + if exists('AndroidManifest.xml'): + remove('AndroidManifest.xml') + shutil.copy(manifest_path, 'AndroidManifest.xml') + + # gradle build templates + render( + 'build.tmpl.gradle', + 'build.gradle', + args=args, + aars=aars, + jars=jars, + android_api=android_api, + build_tools_version=build_tools_version + ) + + # ant build templates + render( + 'build.tmpl.xml', + 'build.xml', + args=args, + versioned_name=versioned_name) + + # String resources: + render_args = { + "args": args, + "private_version": str(time.time()) + } + if get_bootstrap_name() == "sdl2": + render_args["url_scheme"] = url_scheme + render( + 'strings.tmpl.xml', + join(res_dir, 'values/strings.xml'), + **render_args) + + if exists(join("templates", "custom_rules.tmpl.xml")): + render( + 'custom_rules.tmpl.xml', + 'custom_rules.xml', + args=args) + + if get_bootstrap_name() == "webview": + render('WebViewLoader.tmpl.java', + 'src/main/java/org/kivy/android/WebViewLoader.java', + args=args) + + if args.sign: + render('build.properties', 'build.properties') + else: + if exists('build.properties'): + os.remove('build.properties') + + # Apply java source patches if any are present: + if exists(join('src', 'patches')): + print("Applying Java source code patches...") + for patch_name in os.listdir(join('src', 'patches')): + patch_path = join('src', 'patches', patch_name) + print("Applying patch: " + str(patch_path)) + try: + subprocess.check_output([ + # -N: insist this is FORWARd patch, don't reverse apply + # -p1: strip first path component + # -t: batch mode, don't ask questions + "patch", "-N", "-p1", "-t", "-i", patch_path + ]) + except subprocess.CalledProcessError as e: + if e.returncode == 1: + # Return code 1 means it didn't apply, this will + # usually mean it is already applied. + print("Warning: failed to apply patch (" + + "exit code 1), " + + "assuming it is already applied: " + + str(patch_path) + ) + else: + raise e + + +def parse_args(args=None): + global BLACKLIST_PATTERNS, WHITELIST_PATTERNS, PYTHON + + # Get the default minsdk, equal to the NDK API that this dist is built against + try: + with open('dist_info.json', 'r') as fileh: + info = json.load(fileh) + default_min_api = int(info['ndk_api']) + ndk_api = default_min_api + except (OSError, KeyError, ValueError, TypeError): + print('WARNING: Failed to read ndk_api from dist info, defaulting to 12') + default_min_api = 12 # The old default before ndk_api was introduced + ndk_api = 12 + + import argparse + ap = argparse.ArgumentParser(description='''\ +Package a Python application for Android (using +bootstrap ''' + get_bootstrap_name() + '''). + +For this to work, Java and Ant need to be in your path, as does the +tools directory of the Android SDK. +''') + + # --private is required unless for sdl2, where there's also --launcher + ap.add_argument('--private', dest='private', + help='the directory with the app source code files' + + ' (containing your main.py entrypoint)', + required=(get_bootstrap_name() != "sdl2")) + ap.add_argument('--package', dest='package', + help=('The name of the java package the project will be' + ' packaged under.'), + required=True) + ap.add_argument('--name', dest='name', + help=('The human-readable name of the project.'), + required=True) + ap.add_argument('--numeric-version', dest='numeric_version', + help=('The numeric version number of the project. If not ' + 'given, this is automatically computed from the ' + 'version.')) + ap.add_argument('--version', dest='version', + help=('The version number of the project. This should ' + 'consist of numbers and dots, and should have the ' + 'same number of groups of numbers as previous ' + 'versions.'), + required=True) + if get_bootstrap_name() == "sdl2": + ap.add_argument('--launcher', dest='launcher', action='store_true', + help=('Provide this argument to build a multi-app ' + 'launcher, rather than a single app.')) + ap.add_argument('--permission', dest='permissions', action='append', default=[], + help='The permissions to give this app.', nargs='+') + ap.add_argument('--meta-data', dest='meta_data', action='append', default=[], + help='Custom key=value to add in application metadata') + ap.add_argument('--uses-library', dest='android_used_libs', action='append', default=[], + help='Used shared libraries included using tag in AndroidManifest.xml') + ap.add_argument('--icon', dest='icon', + help=('A png file to use as the icon for ' + 'the application.')) + ap.add_argument('--service', dest='services', action='append', default=[], + help='Declare a new service entrypoint: ' + 'NAME:PATH_TO_PY[:foreground]') + if get_bootstrap_name() != "service_only": + ap.add_argument('--presplash', dest='presplash', + help=('A jpeg file to use as a screen while the ' + 'application is loading.')) + ap.add_argument('--presplash-color', + dest='presplash_color', + default='#000000', + help=('A string to set the loading screen ' + 'background color. ' + 'Supported formats are: ' + '#RRGGBB #AARRGGBB or color names ' + 'like red, green, blue, etc.')) + ap.add_argument('--window', dest='window', action='store_true', + default=False, + help='Indicate if the application will be windowed') + ap.add_argument('--orientation', dest='orientation', + default='portrait', + help=('The orientation that the game will ' + 'display in. ' + 'Usually one of "landscape", "portrait", ' + '"sensor", or "user" (the same as "sensor" ' + 'but obeying the ' + 'user\'s Android rotation setting). ' + 'The full list of options is given under ' + 'android_screenOrientation at ' + 'https://developer.android.com/guide/' + 'topics/manifest/' + 'activity-element.html')) + ap.add_argument('--wakelock', dest='wakelock', action='store_true', + help=('Indicate if the application needs the device ' + 'to stay on')) + ap.add_argument('--blacklist', dest='blacklist', + default=join(curdir, 'blacklist.txt'), + help=('Use a blacklist file to match unwanted file in ' + 'the final APK')) + ap.add_argument('--whitelist', dest='whitelist', + default=join(curdir, 'whitelist.txt'), + help=('Use a whitelist file to prevent blacklisting of ' + 'file in the final APK')) + ap.add_argument('--add-jar', dest='add_jar', action='append', + help=('Add a Java .jar to the libs, so you can access its ' + 'classes with pyjnius. You can specify this ' + 'argument more than once to include multiple jars')) + ap.add_argument('--add-aar', dest='add_aar', action='append', + help=('Add an aar dependency manually')) + ap.add_argument('--depend', dest='depends', action='append', + help=('Add a external dependency ' + '(eg: com.android.support:appcompat-v7:19.0.1)')) + # The --sdk option has been removed, it is ignored in favour of + # --android-api handled by toolchain.py + ap.add_argument('--sdk', dest='sdk_version', default=-1, + type=int, help=('Deprecated argument, does nothing')) + ap.add_argument('--minsdk', dest='min_sdk_version', + default=default_min_api, type=int, + help=('Minimum Android SDK version that the app supports. ' + 'Defaults to {}.'.format(default_min_api))) + ap.add_argument('--allow-minsdk-ndkapi-mismatch', default=False, + action='store_true', + help=('Allow the --minsdk argument to be different from ' + 'the discovered ndk_api in the dist')) + ap.add_argument('--intent-filters', dest='intent_filters', + help=('Add intent-filters xml rules to the ' + 'AndroidManifest.xml file. The argument is a ' + 'filename containing xml. The filename should be ' + 'located relative to the python-for-android ' + 'directory')) + ap.add_argument('--with-billing', dest='billing_pubkey', + help='If set, the billing service will be added (not implemented)') + ap.add_argument('--add-source', dest='extra_source_dirs', action='append', + help='Include additional source dirs in Java build') + if get_bootstrap_name() == "webview": + ap.add_argument('--port', + help='The port on localhost that the WebView will access', + default='5000') + ap.add_argument('--try-system-python-compile', dest='try_system_python_compile', + action='store_true', + help='Use the system python during compileall if possible.') + ap.add_argument('--no-compile-pyo', dest='no_compile_pyo', action='store_true', + help='Do not optimise .py files to .pyo.') + ap.add_argument('--sign', action='store_true', + help=('Try to sign the APK with your credentials. You must set ' + 'the appropriate environment variables.')) + ap.add_argument('--add-activity', dest='add_activity', action='append', + help='Add this Java class as an Activity to the manifest.') + ap.add_argument('--activity-launch-mode', + dest='activity_launch_mode', + default='singleTask', + help='Set the launch mode of the main activity in the manifest.') + ap.add_argument('--allow-backup', dest='allow_backup', default='true', + help="if set to 'false', then android won't backup the application.") + ap.add_argument('--no-optimize-python', dest='optimize_python', + action='store_false', default=True, + help=('Whether to compile to optimised .pyo files, using -OO ' + '(strips docstrings and asserts)')) + + # Put together arguments, and add those from .p4a config file: + if args is None: + args = sys.argv[1:] + + def _read_configuration(): + if not exists(".p4a"): + return + print("Reading .p4a configuration") + with open(".p4a") as fd: + lines = fd.readlines() + lines = [shlex.split(line) + for line in lines if not line.startswith("#")] + for line in lines: + for arg in line: + args.append(arg) + _read_configuration() + + args = ap.parse_args(args) + args.ignore_path = [] + + if args.name and args.name[0] == '"' and args.name[-1] == '"': + args.name = args.name[1:-1] + + if ndk_api != args.min_sdk_version: + print(('WARNING: --minsdk argument does not match the api that is ' + 'compiled against. Only proceed if you know what you are ' + 'doing, otherwise use --minsdk={} or recompile against api ' + '{}').format(ndk_api, args.min_sdk_version)) + if not args.allow_minsdk_ndkapi_mismatch: + print('You must pass --allow-minsdk-ndkapi-mismatch to build ' + 'with --minsdk different to the target NDK api from the ' + 'build step') + sys.exit(1) + else: + print('Proceeding with --minsdk not matching build target api') + + if args.billing_pubkey: + print('Billing not yet supported!') + sys.exit(1) + + if args.sdk_version == -1: + print('WARNING: Received a --sdk argument, but this argument is ' + 'deprecated and does nothing.') + args.sdk_version = -1 # ensure it is not used + + if args.permissions and isinstance(args.permissions[0], list): + args.permissions = [p for perm in args.permissions for p in perm] + + if args.try_system_python_compile: + # Hardcoding python2.7 is okay for now, as python3 skips the + # compilation anyway + if not exists('crystax_python'): + python_executable = 'python2.7' + try: + subprocess.call([python_executable, '--version']) + except (OSError, subprocess.CalledProcessError): + pass + else: + PYTHON = python_executable + + if args.no_compile_pyo: + PYTHON = None + BLACKLIST_PATTERNS.remove('*.py') + + if args.blacklist: + with open(args.blacklist) as fd: + patterns = [x.strip() for x in fd.read().splitlines() + if x.strip() and not x.strip().startswith('#')] + BLACKLIST_PATTERNS += patterns + + if args.whitelist: + with open(args.whitelist) as fd: + patterns = [x.strip() for x in fd.read().splitlines() + if x.strip() and not x.strip().startswith('#')] + WHITELIST_PATTERNS += patterns + + if args.private is None and \ + get_bootstrap_name() == 'sdl2' and args.launcher is None: + print('Need --private directory or ' + + '--launcher (SDL2 bootstrap only)' + + 'to have something to launch inside the .apk!') + sys.exit(1) + make_package(args) + + return args + + +if __name__ == "__main__": + parse_args() diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/gradle/wrapper/gradle-wrapper.jar b/p4a/pythonforandroidold/bootstraps/common/build/gradle/wrapper/gradle-wrapper.jar similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/gradle/wrapper/gradle-wrapper.jar rename to p4a/pythonforandroidold/bootstraps/common/build/gradle/wrapper/gradle-wrapper.jar diff --git a/p4a/pythonforandroidold/bootstraps/common/build/gradle/wrapper/gradle-wrapper.properties b/p4a/pythonforandroidold/bootstraps/common/build/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 0000000..efc019a --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/common/build/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,6 @@ +#Mon Mar 09 17:19:02 CET 2015 +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-4.4-all.zip diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/gradlew b/p4a/pythonforandroidold/bootstraps/common/build/gradlew similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/gradlew rename to p4a/pythonforandroidold/bootstraps/common/build/gradlew diff --git a/p4a/pythonforandroidold/bootstraps/common/build/gradlew.bat b/p4a/pythonforandroidold/bootstraps/common/build/gradlew.bat new file mode 100644 index 0000000..8a0b282 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/common/build/gradlew.bat @@ -0,0 +1,90 @@ +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS= + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto init + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto init + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:init +@rem Get command-line arguments, handling Windowz variants + +if not "%OS%" == "Windows_NT" goto win9xME_args +if "%@eval[2+2]" == "4" goto 4NT_args + +:win9xME_args +@rem Slurp the command line arguments. +set CMD_LINE_ARGS= +set _SKIP=2 + +:win9xME_args_slurp +if "x%~1" == "x" goto execute + +set CMD_LINE_ARGS=%* +goto execute + +:4NT_args +@rem Get arguments from the 4NT Shell from JP Software +set CMD_LINE_ARGS=%$ + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/p4a/pythonforandroidold/bootstraps/common/build/jni/Android.mk b/p4a/pythonforandroidold/bootstraps/common/build/jni/Android.mk new file mode 100644 index 0000000..5053e7d --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/common/build/jni/Android.mk @@ -0,0 +1 @@ +include $(call all-subdir-makefiles) diff --git a/p4a/pythonforandroidold/bootstraps/common/build/jni/application/Android.mk b/p4a/pythonforandroidold/bootstraps/common/build/jni/application/Android.mk new file mode 100644 index 0000000..5053e7d --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/common/build/jni/application/Android.mk @@ -0,0 +1 @@ +include $(call all-subdir-makefiles) diff --git a/p4a/pythonforandroidold/bootstraps/common/build/jni/application/src/Android.mk b/p4a/pythonforandroidold/bootstraps/common/build/jni/application/src/Android.mk new file mode 100644 index 0000000..4a442ee --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/common/build/jni/application/src/Android.mk @@ -0,0 +1,27 @@ +LOCAL_PATH := $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_MODULE := main + +SDL_PATH := ../../SDL + +LOCAL_C_INCLUDES := $(LOCAL_PATH)/$(SDL_PATH)/include + +# Add your application source files here... +LOCAL_SRC_FILES := $(SDL_PATH)/src/main/android/SDL_android_main.c \ + start.c + +LOCAL_CFLAGS += -I$(PYTHON_INCLUDE_ROOT) $(EXTRA_CFLAGS) + +LOCAL_SHARED_LIBRARIES := SDL2 python_shared + +LOCAL_LDLIBS := -lGLESv1_CM -lGLESv2 -llog $(EXTRA_LDLIBS) + +LOCAL_LDFLAGS += -L$(PYTHON_LINK_ROOT) $(APPLICATION_ADDITIONAL_LDFLAGS) + +include $(BUILD_SHARED_LIBRARY) + +ifdef CRYSTAX_PYTHON_VERSION + $(call import-module,python/$(CRYSTAX_PYTHON_VERSION)) +endif diff --git a/p4a/pythonforandroidold/bootstraps/common/build/jni/application/src/start.c b/p4a/pythonforandroidold/bootstraps/common/build/jni/application/src/start.c new file mode 100644 index 0000000..3429118 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/common/build/jni/application/src/start.c @@ -0,0 +1,501 @@ + +#define PY_SSIZE_T_CLEAN +#include "Python.h" +#ifndef Py_PYTHON_H +#error Python headers needed to compile C extensions, please install development version of Python. +#else + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "bootstrap_name.h" +#ifndef BOOTSTRAP_USES_NO_SDL_HEADERS +#include "SDL.h" +#ifndef BOOTSTRAP_NAME_PYGAME +#include "SDL_opengles2.h" +#endif +#endif +#ifdef BOOTSTRAP_NAME_PYGAME +#include "jniwrapperstuff.h" +#endif +#include "android/log.h" + +#define ENTRYPOINT_MAXLEN 128 +#define LOG(n, x) __android_log_write(ANDROID_LOG_INFO, (n), (x)) +#define LOGP(x) LOG("python", (x)) + +static PyObject *androidembed_log(PyObject *self, PyObject *args) { + char *logstr = NULL; + if (!PyArg_ParseTuple(args, "s", &logstr)) { + return NULL; + } + LOG(getenv("PYTHON_NAME"), logstr); + Py_RETURN_NONE; +} + +static PyMethodDef AndroidEmbedMethods[] = { + {"log", androidembed_log, METH_VARARGS, "Log on android platform"}, + {NULL, NULL, 0, NULL}}; + +#if PY_MAJOR_VERSION >= 3 +static struct PyModuleDef androidembed = {PyModuleDef_HEAD_INIT, "androidembed", + "", -1, AndroidEmbedMethods}; + +PyMODINIT_FUNC initandroidembed(void) { + return PyModule_Create(&androidembed); +} +#else +PyMODINIT_FUNC initandroidembed(void) { + (void)Py_InitModule("androidembed", AndroidEmbedMethods); +} +#endif + +int dir_exists(char *filename) { + struct stat st; + if (stat(filename, &st) == 0) { + if (S_ISDIR(st.st_mode)) + return 1; + } + return 0; +} + +int file_exists(const char *filename) { + FILE *file; + if ((file = fopen(filename, "r"))) { + fclose(file); + return 1; + } + return 0; +} + +/* int main(int argc, char **argv) { */ +int main(int argc, char *argv[]) { + + char *env_argument = NULL; + char *env_entrypoint = NULL; + char *env_logname = NULL; + char entrypoint[ENTRYPOINT_MAXLEN]; + int ret = 0; + FILE *fd; + + LOGP("Initializing Python for Android"); + + // Set a couple of built-in environment vars: + setenv("P4A_BOOTSTRAP", bootstrap_name, 1); // env var to identify p4a to applications + env_argument = getenv("ANDROID_ARGUMENT"); + setenv("ANDROID_APP_PATH", env_argument, 1); + env_entrypoint = getenv("ANDROID_ENTRYPOINT"); + env_logname = getenv("PYTHON_NAME"); + if (!getenv("ANDROID_UNPACK")) { + /* ANDROID_UNPACK currently isn't set in services */ + setenv("ANDROID_UNPACK", env_argument, 1); + } + if (env_logname == NULL) { + env_logname = "python"; + setenv("PYTHON_NAME", "python", 1); + } + + // Set additional file-provided environment vars: + LOGP("Setting additional env vars from p4a_env_vars.txt"); + char env_file_path[256]; + snprintf(env_file_path, sizeof(env_file_path), + "%s/p4a_env_vars.txt", getenv("ANDROID_UNPACK")); + FILE *env_file_fd = fopen(env_file_path, "r"); + if (env_file_fd) { + char* line = NULL; + size_t len = 0; + while (getline(&line, &len, env_file_fd) != -1) { + if (strlen(line) > 0) { + char *eqsubstr = strstr(line, "="); + if (eqsubstr) { + size_t eq_pos = eqsubstr - line; + + // Extract name: + char env_name[256]; + strncpy(env_name, line, sizeof(env_name)); + env_name[eq_pos] = '\0'; + + // Extract value (with line break removed: + char env_value[256]; + strncpy(env_value, (char*)(line + eq_pos + 1), sizeof(env_value)); + if (strlen(env_value) > 0 && + env_value[strlen(env_value)-1] == '\n') { + env_value[strlen(env_value)-1] = '\0'; + if (strlen(env_value) > 0 && + env_value[strlen(env_value)-1] == '\r') { + // Also remove windows line breaks (\r\n) + env_value[strlen(env_value)-1] = '\0'; + } + } + + // Set value: + setenv(env_name, env_value, 1); + } + } + } + fclose(env_file_fd); + } else { + LOGP("Warning: no p4a_env_vars.txt found / failed to open!"); + } + + LOGP("Changing directory to the one provided by ANDROID_ARGUMENT"); + LOGP(env_argument); + chdir(env_argument); + +#if PY_MAJOR_VERSION < 3 + Py_NoSiteFlag=1; +#endif + +#if PY_MAJOR_VERSION < 3 + Py_SetProgramName("android_python"); +#else + Py_SetProgramName(L"android_python"); +#endif + +#if PY_MAJOR_VERSION >= 3 + /* our logging module for android + */ + PyImport_AppendInittab("androidembed", initandroidembed); +#endif + + LOGP("Preparing to initialize python"); + + // Set up the python path + char paths[256]; + + char crystax_python_dir[256]; + snprintf(crystax_python_dir, 256, + "%s/crystax_python", getenv("ANDROID_UNPACK")); + char python_bundle_dir[256]; + snprintf(python_bundle_dir, 256, + "%s/_python_bundle", getenv("ANDROID_UNPACK")); + if (dir_exists(crystax_python_dir) || dir_exists(python_bundle_dir)) { + if (dir_exists(crystax_python_dir)) { + LOGP("crystax_python exists"); + snprintf(paths, 256, + "%s/stdlib.zip:%s/modules", + crystax_python_dir, crystax_python_dir); + } + + if (dir_exists(python_bundle_dir)) { + LOGP("_python_bundle dir exists"); + snprintf(paths, 256, + "%s/stdlib.zip:%s/modules", + python_bundle_dir, python_bundle_dir); + } + + LOGP("calculated paths to be..."); + LOGP(paths); + + #if PY_MAJOR_VERSION >= 3 + wchar_t *wchar_paths = Py_DecodeLocale(paths, NULL); + Py_SetPath(wchar_paths); + #endif + + LOGP("set wchar paths..."); + } else { + // We do not expect to see crystax_python any more, so no point + // reminding the user about it. If it does exist, we'll have + // logged it earlier. + LOGP("_python_bundle does not exist"); + } + + Py_Initialize(); + +#if PY_MAJOR_VERSION < 3 + // Can't Py_SetPath in python2 but we can set PySys_SetPath, which must + // be applied after Py_Initialize rather than before like Py_SetPath + #if PY_MICRO_VERSION >= 15 + // Only for python native-build + PySys_SetPath(paths); + #endif + PySys_SetArgv(argc, argv); +#endif + + LOGP("Initialized python"); + + /* ensure threads will work. + */ + LOGP("AND: Init threads"); + PyEval_InitThreads(); + +#if PY_MAJOR_VERSION < 3 + initandroidembed(); +#endif + + PyRun_SimpleString("import androidembed\nandroidembed.log('testing python " + "print redirection')"); + + /* inject our bootstrap code to redirect python stdin/stdout + * replace sys.path with our path + */ + PyRun_SimpleString("import sys, posix\n"); + if (dir_exists("lib")) { + /* If we built our own python, set up the paths correctly. + * This is only the case if we are using the python2legacy recipe + */ + LOGP("Setting up python from ANDROID_APP_PATH"); + PyRun_SimpleString("private = posix.environ['ANDROID_APP_PATH']\n" + "argument = posix.environ['ANDROID_ARGUMENT']\n" + "sys.path[:] = [ \n" + " private + '/lib/python27.zip', \n" + " private + '/lib/python2.7/', \n" + " private + '/lib/python2.7/lib-dynload/', \n" + " private + '/lib/python2.7/site-packages/', \n" + " argument ]\n"); + } + + char add_site_packages_dir[256]; + if (dir_exists(crystax_python_dir)) { + snprintf(add_site_packages_dir, 256, + "sys.path.append('%s/site-packages')", + crystax_python_dir); + + PyRun_SimpleString("import sys\n" + "sys.argv = ['notaninterpreterreally']\n" + "from os.path import realpath, join, dirname"); + PyRun_SimpleString(add_site_packages_dir); + /* "sys.path.append(join(dirname(realpath(__file__)), 'site-packages'))") */ + PyRun_SimpleString("sys.path = ['.'] + sys.path"); + } + + if (dir_exists(python_bundle_dir)) { + snprintf(add_site_packages_dir, 256, + "sys.path.append('%s/site-packages')", + python_bundle_dir); + + PyRun_SimpleString("import sys\n" + "sys.argv = ['notaninterpreterreally']\n" + "from os.path import realpath, join, dirname"); + PyRun_SimpleString(add_site_packages_dir); + /* "sys.path.append(join(dirname(realpath(__file__)), 'site-packages'))") */ + PyRun_SimpleString("sys.path = ['.'] + sys.path"); + } + + PyRun_SimpleString( + "class LogFile(object):\n" + " def __init__(self):\n" + " self.buffer = ''\n" + " def write(self, s):\n" + " s = self.buffer + s\n" + " lines = s.split(\"\\n\")\n" + " for l in lines[:-1]:\n" + " androidembed.log(l)\n" + " self.buffer = lines[-1]\n" + " def flush(self):\n" + " return\n" + "sys.stdout = sys.stderr = LogFile()\n" + "print('Android path', sys.path)\n" + "import os\n" + "print('os.environ is', os.environ)\n" + "print('Android kivy bootstrap done. __name__ is', __name__)"); + +#if PY_MAJOR_VERSION < 3 + PyRun_SimpleString("import site; print site.getsitepackages()\n"); +#endif + + LOGP("AND: Ran string"); + + /* run it ! + */ + LOGP("Run user program, change dir and execute entrypoint"); + + /* Get the entrypoint, search the .pyo then .py + */ + char *dot = strrchr(env_entrypoint, '.'); +#if PY_MAJOR_VERSION > 2 + char *ext = ".pyc"; +#else + char *ext = ".pyo"; +#endif + if (dot <= 0) { + LOGP("Invalid entrypoint, abort."); + return -1; + } + if (strlen(env_entrypoint) > ENTRYPOINT_MAXLEN - 2) { + LOGP("Entrypoint path is too long, try increasing ENTRYPOINT_MAXLEN."); + return -1; + } + if (!strcmp(dot, ext)) { + if (!file_exists(env_entrypoint)) { + /* fallback on .py */ + strcpy(entrypoint, env_entrypoint); + entrypoint[strlen(env_entrypoint) - 1] = '\0'; + LOGP(entrypoint); + if (!file_exists(entrypoint)) { + LOGP("Entrypoint not found (.pyc/.pyo, fallback on .py), abort"); + return -1; + } + } else { + strcpy(entrypoint, env_entrypoint); + } + } else if (!strcmp(dot, ".py")) { + /* if .py is passed, check the pyo version first */ + strcpy(entrypoint, env_entrypoint); + entrypoint[strlen(env_entrypoint) + 1] = '\0'; +#if PY_MAJOR_VERSION > 2 + entrypoint[strlen(env_entrypoint)] = 'c'; +#else + entrypoint[strlen(env_entrypoint)] = 'o'; +#endif + if (!file_exists(entrypoint)) { + /* fallback on pure python version */ + if (!file_exists(env_entrypoint)) { + LOGP("Entrypoint not found (.py), abort."); + return -1; + } + strcpy(entrypoint, env_entrypoint); + } + } else { + LOGP("Entrypoint have an invalid extension (must be .py or .pyc/.pyo), abort."); + return -1; + } + // LOGP("Entrypoint is:"); + // LOGP(entrypoint); + fd = fopen(entrypoint, "r"); + if (fd == NULL) { + LOGP("Open the entrypoint failed"); + LOGP(entrypoint); + return -1; + } + + /* run python ! + */ + ret = PyRun_SimpleFile(fd, entrypoint); + fclose(fd); + + if (PyErr_Occurred() != NULL) { + ret = 1; + PyErr_Print(); /* This exits with the right code if SystemExit. */ + PyObject *f = PySys_GetObject("stdout"); + if (PyFile_WriteString( + "\n", f)) /* python2 used Py_FlushLine, but this no longer exists */ + PyErr_Clear(); + } + + LOGP("Python for android ended."); + + /* Shut down: since regular shutdown causes issues sometimes + (seems to be an incomplete shutdown breaking next launch) + we'll use sys.exit(ret) to shutdown, since that one works. + + Reference discussion: + + https://github.com/kivy/kivy/pull/6107#issue-246120816 + */ + char terminatecmd[256]; + snprintf( + terminatecmd, sizeof(terminatecmd), + "import sys; sys.exit(%d)\n", ret + ); + PyRun_SimpleString(terminatecmd); + + /* This should never actually be reached, but we'll leave the clean-up + * here just to be safe. + */ +#if PY_MAJOR_VERSION < 3 + Py_Finalize(); + LOGP("Unexpectedly reached Py_FinalizeEx(), but was successful."); +#else + if (Py_FinalizeEx() != 0) // properly check success on Python 3 + LOGP("Unexpectedly reached Py_FinalizeEx(), and got error!"); + else + LOGP("Unexpectedly reached Py_FinalizeEx(), but was successful."); +#endif + + return ret; +} + +JNIEXPORT void JNICALL Java_org_kivy_android_PythonService_nativeStart( + JNIEnv *env, + jobject thiz, + jstring j_android_private, + jstring j_android_argument, + jstring j_service_entrypoint, + jstring j_python_name, + jstring j_python_home, + jstring j_python_path, + jstring j_arg) { + jboolean iscopy; + const char *android_private = + (*env)->GetStringUTFChars(env, j_android_private, &iscopy); + const char *android_argument = + (*env)->GetStringUTFChars(env, j_android_argument, &iscopy); + const char *service_entrypoint = + (*env)->GetStringUTFChars(env, j_service_entrypoint, &iscopy); + const char *python_name = + (*env)->GetStringUTFChars(env, j_python_name, &iscopy); + const char *python_home = + (*env)->GetStringUTFChars(env, j_python_home, &iscopy); + const char *python_path = + (*env)->GetStringUTFChars(env, j_python_path, &iscopy); + const char *arg = (*env)->GetStringUTFChars(env, j_arg, &iscopy); + + setenv("ANDROID_PRIVATE", android_private, 1); + setenv("ANDROID_ARGUMENT", android_argument, 1); + setenv("ANDROID_APP_PATH", android_argument, 1); + setenv("ANDROID_ENTRYPOINT", service_entrypoint, 1); + setenv("PYTHONOPTIMIZE", "2", 1); + setenv("PYTHON_NAME", python_name, 1); + setenv("PYTHONHOME", python_home, 1); + setenv("PYTHONPATH", python_path, 1); + setenv("PYTHON_SERVICE_ARGUMENT", arg, 1); + setenv("P4A_BOOTSTRAP", bootstrap_name, 1); + + char *argv[] = {"."}; + /* ANDROID_ARGUMENT points to service subdir, + * so main() will run main.py from this dir + */ + main(1, argv); +} + +#if defined(BOOTSTRAP_NAME_WEBVIEW) || defined(BOOTSTRAP_NAME_SERVICEONLY) +// Webview and service_only uses some more functions: + +void Java_org_kivy_android_PythonActivity_nativeSetenv( + JNIEnv* env, jclass cls, + jstring name, jstring value) +//JNIEXPORT void JNICALL SDL_JAVA_INTERFACE(nativeSetenv)( +// JNIEnv* env, jclass cls, +// jstring name, jstring value) +{ + const char *utfname = (*env)->GetStringUTFChars(env, name, NULL); + const char *utfvalue = (*env)->GetStringUTFChars(env, value, NULL); + + setenv(utfname, utfvalue, 1); + + (*env)->ReleaseStringUTFChars(env, name, utfname); + (*env)->ReleaseStringUTFChars(env, value, utfvalue); +} + + +void Java_org_kivy_android_PythonActivity_nativeInit(JNIEnv* env, jclass cls, jobject obj) +{ + /* This nativeInit follows SDL2 */ + + /* This interface could expand with ABI negotiation, calbacks, etc. */ + /* SDL_Android_Init(env, cls); */ + + /* SDL_SetMainReady(); */ + + /* Run the application code! */ + int status; + char *argv[2]; + argv[0] = "Python_app"; + argv[1] = NULL; + /* status = SDL_main(1, argv); */ + + main(1, argv); + + /* Do not issue an exit or the whole application will terminate instead of just the SDL thread */ + /* exit(status); */ +} +#endif + +#endif diff --git a/p4a/pythonforandroidold/bootstraps/common/build/src/main/java/org/kivy/android/PythonService.java b/p4a/pythonforandroidold/bootstraps/common/build/src/main/java/org/kivy/android/PythonService.java new file mode 100644 index 0000000..4f20fb7 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/common/build/src/main/java/org/kivy/android/PythonService.java @@ -0,0 +1,164 @@ +package org.kivy.android; + +import android.os.Build; +import java.lang.reflect.Method; +import java.lang.reflect.InvocationTargetException; +import android.app.Service; +import android.os.IBinder; +import android.os.Bundle; +import android.content.Intent; +import android.content.Context; +import android.util.Log; +import android.app.Notification; +import android.app.PendingIntent; +import android.os.Process; +import java.io.File; + +import org.kivy.android.PythonUtil; + +import org.renpy.android.Hardware; + + +public class PythonService extends Service implements Runnable { + + // Thread for Python code + private Thread pythonThread = null; + + // Python environment variables + private String androidPrivate; + private String androidArgument; + private String pythonName; + private String pythonHome; + private String pythonPath; + private String serviceEntrypoint; + // Argument to pass to Python code, + private String pythonServiceArgument; + public static PythonService mService = null; + private Intent startIntent = null; + + private boolean autoRestartService = false; + + public void setAutoRestartService(boolean restart) { + autoRestartService = restart; + } + + public boolean canDisplayNotification() { + return true; + } + + public int startType() { + return START_NOT_STICKY; + } + + @Override + public IBinder onBind(Intent arg0) { + return null; + } + + @Override + public void onCreate() { + super.onCreate(); + } + + @Override + public int onStartCommand(Intent intent, int flags, int startId) { + if (pythonThread != null) { + Log.v("python service", "service exists, do not start again"); + return START_NOT_STICKY; + } + + startIntent = intent; + Bundle extras = intent.getExtras(); + androidPrivate = extras.getString("androidPrivate"); + androidArgument = extras.getString("androidArgument"); + serviceEntrypoint = extras.getString("serviceEntrypoint"); + pythonName = extras.getString("pythonName"); + pythonHome = extras.getString("pythonHome"); + pythonPath = extras.getString("pythonPath"); + pythonServiceArgument = extras.getString("pythonServiceArgument"); + + pythonThread = new Thread(this); + pythonThread.start(); + + if (canDisplayNotification()) { + doStartForeground(extras); + } + + return startType(); + } + + protected void doStartForeground(Bundle extras) { + String serviceTitle = extras.getString("serviceTitle"); + String serviceDescription = extras.getString("serviceDescription"); + + Notification notification; + Context context = getApplicationContext(); + Intent contextIntent = new Intent(context, PythonActivity.class); + PendingIntent pIntent = PendingIntent.getActivity(context, 0, contextIntent, + PendingIntent.FLAG_UPDATE_CURRENT); + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.HONEYCOMB) { + notification = new Notification( + context.getApplicationInfo().icon, serviceTitle, System.currentTimeMillis()); + try { + // prevent using NotificationCompat, this saves 100kb on apk + Method func = notification.getClass().getMethod( + "setLatestEventInfo", Context.class, CharSequence.class, + CharSequence.class, PendingIntent.class); + func.invoke(notification, context, serviceTitle, serviceDescription, pIntent); + } catch (NoSuchMethodException | IllegalAccessException | + IllegalArgumentException | InvocationTargetException e) { + } + } else { + Notification.Builder builder = new Notification.Builder(context); + builder.setContentTitle(serviceTitle); + builder.setContentText(serviceDescription); + builder.setContentIntent(pIntent); + builder.setSmallIcon(context.getApplicationInfo().icon); + notification = builder.build(); + } + startForeground(1, notification); + } + + @Override + public void onDestroy() { + super.onDestroy(); + pythonThread = null; + if (autoRestartService && startIntent != null) { + Log.v("python service", "service restart requested"); + startService(startIntent); + } + Process.killProcess(Process.myPid()); + } + + /** + * Stops the task gracefully when killed. + * Calling stopSelf() will trigger a onDestroy() call from the system. + */ + @Override + public void onTaskRemoved(Intent rootIntent) { + super.onTaskRemoved(rootIntent); + stopSelf(); + } + + @Override + public void run(){ + String app_root = getFilesDir().getAbsolutePath() + "/app"; + File app_root_file = new File(app_root); + PythonUtil.loadLibraries(app_root_file, + new File(getApplicationInfo().nativeLibraryDir)); + this.mService = this; + nativeStart( + androidPrivate, androidArgument, + serviceEntrypoint, pythonName, + pythonHome, pythonPath, + pythonServiceArgument); + stopSelf(); + } + + // Native part + public static native void nativeStart( + String androidPrivate, String androidArgument, + String serviceEntrypoint, String pythonName, + String pythonHome, String pythonPath, + String pythonServiceArgument); +} diff --git a/p4a/pythonforandroidold/bootstraps/common/build/src/main/java/org/kivy/android/PythonUtil.java b/p4a/pythonforandroidold/bootstraps/common/build/src/main/java/org/kivy/android/PythonUtil.java new file mode 100644 index 0000000..1f26738 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/common/build/src/main/java/org/kivy/android/PythonUtil.java @@ -0,0 +1,77 @@ +package org.kivy.android; + +import java.io.File; + +import android.util.Log; +import java.util.ArrayList; +import java.io.FilenameFilter; +import java.util.regex.Pattern; + + +public class PythonUtil { + private static final String TAG = "pythonutil"; + + protected static void addLibraryIfExists(ArrayList libsList, String pattern, File libsDir) { + // pattern should be the name of the lib file, without the + // preceding "lib" or suffix ".so", for instance "ssl.*" will + // match files of the form "libssl.*.so". + File [] files = libsDir.listFiles(); + + pattern = "lib" + pattern + "\\.so"; + Pattern p = Pattern.compile(pattern); + for (int i = 0; i < files.length; ++i) { + File file = files[i]; + String name = file.getName(); + Log.v(TAG, "Checking pattern " + pattern + " against " + name); + if (p.matcher(name).matches()) { + Log.v(TAG, "Pattern " + pattern + " matched file " + name); + libsList.add(name.substring(3, name.length() - 3)); + } + } + } + + protected static ArrayList getLibraries(File libsDir) { + ArrayList libsList = new ArrayList(); + addLibraryIfExists(libsList, "crystax", libsDir); + addLibraryIfExists(libsList, "sqlite3", libsDir); + addLibraryIfExists(libsList, "ffi", libsDir); + addLibraryIfExists(libsList, "ssl.*", libsDir); + addLibraryIfExists(libsList, "crypto.*", libsDir); + libsList.add("python2.7"); + libsList.add("python3.5m"); + libsList.add("python3.6m"); + libsList.add("python3.7m"); + libsList.add("main"); + return libsList; + } + + public static void loadLibraries(File filesDir, File libsDir) { + String filesDirPath = filesDir.getAbsolutePath(); + boolean foundPython = false; + + for (String lib : getLibraries(libsDir)) { + Log.v(TAG, "Loading library: " + lib); + try { + System.loadLibrary(lib); + if (lib.startsWith("python")) { + foundPython = true; + } + } catch(UnsatisfiedLinkError e) { + // If this is the last possible libpython + // load, and it has failed, give a more + // general error + Log.v(TAG, "Library loading error: " + e.getMessage()); + if (lib.startsWith("python3.7") && !foundPython) { + throw new java.lang.RuntimeException("Could not load any libpythonXXX.so"); + } else if (lib.startsWith("python")) { + continue; + } else { + Log.v(TAG, "An UnsatisfiedLinkError occurred loading " + lib); + throw e; + } + } + } + + Log.v(TAG, "Loaded everything!"); + } +} diff --git a/p4a/pythonforandroidold/bootstraps/common/build/src/main/java/org/renpy/android/AssetExtract.java b/p4a/pythonforandroidold/bootstraps/common/build/src/main/java/org/renpy/android/AssetExtract.java new file mode 100644 index 0000000..52d6424 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/common/build/src/main/java/org/renpy/android/AssetExtract.java @@ -0,0 +1,115 @@ +// This string is autogenerated by ChangeAppSettings.sh, do not change +// spaces amount +package org.renpy.android; + +import java.io.*; + +import android.app.Activity; +import android.util.Log; + +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.File; + +import java.util.zip.GZIPInputStream; + +import android.content.res.AssetManager; + +import org.kamranzafar.jtar.*; + +public class AssetExtract { + + private AssetManager mAssetManager = null; + private Activity mActivity = null; + + public AssetExtract(Activity act) { + mActivity = act; + mAssetManager = act.getAssets(); + } + + public boolean extractTar(String asset, String target) { + + byte buf[] = new byte[1024 * 1024]; + + InputStream assetStream = null; + TarInputStream tis = null; + + try { + assetStream = mAssetManager.open(asset, AssetManager.ACCESS_STREAMING); + tis = new TarInputStream(new BufferedInputStream(new GZIPInputStream(new BufferedInputStream(assetStream, 8192)), 8192)); + } catch (IOException e) { + Log.e("python", "opening up extract tar", e); + return false; + } + + while (true) { + TarEntry entry = null; + + try { + entry = tis.getNextEntry(); + } catch ( java.io.IOException e ) { + Log.e("python", "extracting tar", e); + return false; + } + + if ( entry == null ) { + break; + } + + Log.v("python", "extracting " + entry.getName()); + + if (entry.isDirectory()) { + + try { + new File(target +"/" + entry.getName()).mkdirs(); + } catch ( SecurityException e ) { }; + + continue; + } + + OutputStream out = null; + String path = target + "/" + entry.getName(); + + try { + out = new BufferedOutputStream(new FileOutputStream(path), 8192); + } catch ( FileNotFoundException e ) { + } catch ( SecurityException e ) { }; + + if ( out == null ) { + Log.e("python", "could not open " + path); + return false; + } + + try { + while (true) { + int len = tis.read(buf); + + if (len == -1) { + break; + } + + out.write(buf, 0, len); + } + + out.flush(); + out.close(); + } catch ( java.io.IOException e ) { + Log.e("python", "extracting zip", e); + return false; + } + } + + try { + tis.close(); + assetStream.close(); + } catch (IOException e) { + // pass + } + + return true; + } +} diff --git a/p4a/pythonforandroidold/bootstraps/common/build/src/main/java/org/renpy/android/ResourceManager.java b/p4a/pythonforandroidold/bootstraps/common/build/src/main/java/org/renpy/android/ResourceManager.java new file mode 100644 index 0000000..47455ab --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/common/build/src/main/java/org/renpy/android/ResourceManager.java @@ -0,0 +1,54 @@ +/** + * This class takes care of managing resources for us. In our code, we + * can't use R, since the name of the package containing R will + * change. (This same code is used in both org.renpy.android and + * org.renpy.pygame.) So this is the next best thing. + */ + +package org.renpy.android; + +import android.app.Activity; +import android.content.res.Resources; +import android.view.View; + +import android.util.Log; + +public class ResourceManager { + + private Activity act; + private Resources res; + + public ResourceManager(Activity activity) { + act = activity; + res = act.getResources(); + } + + public int getIdentifier(String name, String kind) { + Log.v("SDL", "getting identifier"); + Log.v("SDL", "kind is " + kind + " and name " + name); + Log.v("SDL", "result is " + res.getIdentifier(name, kind, act.getPackageName())); + return res.getIdentifier(name, kind, act.getPackageName()); + } + + public String getString(String name) { + + try { + Log.v("SDL", "asked to get string " + name); + return res.getString(getIdentifier(name, "string")); + } catch (Exception e) { + Log.v("SDL", "got exception looking for string!"); + return null; + } + } + + public View inflateView(String name) { + int id = getIdentifier(name, "layout"); + return act.getLayoutInflater().inflate(id, null); + } + + public View getViewById(View v, String name) { + int id = getIdentifier(name, "id"); + return v.findViewById(id); + } + +} diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/templates/Service.tmpl.java b/p4a/pythonforandroidold/bootstraps/common/build/templates/Service.tmpl.java similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/templates/Service.tmpl.java rename to p4a/pythonforandroidold/bootstraps/common/build/templates/Service.tmpl.java diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/templates/build.properties b/p4a/pythonforandroidold/bootstraps/common/build/templates/build.properties similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/templates/build.properties rename to p4a/pythonforandroidold/bootstraps/common/build/templates/build.properties diff --git a/p4a/pythonforandroidold/bootstraps/common/build/templates/build.tmpl.gradle b/p4a/pythonforandroidold/bootstraps/common/build/templates/build.tmpl.gradle new file mode 100644 index 0000000..32bd091 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/common/build/templates/build.tmpl.gradle @@ -0,0 +1,80 @@ +// Top-level build file where you can add configuration options common to all sub-projects/modules. +buildscript { + repositories { + google() + jcenter() + } + dependencies { + classpath 'com.android.tools.build:gradle:3.1.4' + } +} + +allprojects { + repositories { + google() + jcenter() + flatDir { + dirs 'libs' + } + } +} + +apply plugin: 'com.android.application' + +android { + compileSdkVersion {{ android_api }} + buildToolsVersion '{{ build_tools_version }}' + defaultConfig { + minSdkVersion {{ args.min_sdk_version }} + targetSdkVersion {{ android_api }} + versionCode {{ args.numeric_version }} + versionName '{{ args.version }}' + } + + {% if args.sign -%} + signingConfigs { + release { + storeFile file(System.getenv("P4A_RELEASE_KEYSTORE")) + keyAlias System.getenv("P4A_RELEASE_KEYALIAS") + storePassword System.getenv("P4A_RELEASE_KEYSTORE_PASSWD") + keyPassword System.getenv("P4A_RELEASE_KEYALIAS_PASSWD") + } + } + {%- endif %} + + buildTypes { + debug { + } + release { + {% if args.sign -%} + signingConfig signingConfigs.release + {%- endif %} + } + } + + compileOptions { + sourceCompatibility JavaVersion.VERSION_1_7 + targetCompatibility JavaVersion.VERSION_1_7 + } + + sourceSets { + main { + jniLibs.srcDir 'libs' + } + } + +} + +dependencies { + {%- for aar in aars %} + compile(name: '{{ aar }}', ext: 'aar') + {%- endfor -%} + {%- for jar in jars %} + compile files('src/main/libs/{{ jar }}') + {%- endfor -%} + {%- if args.depends -%} + {%- for depend in args.depends %} + compile '{{ depend }}' + {%- endfor %} + {%- endif %} +} diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/templates/build.tmpl.xml b/p4a/pythonforandroidold/bootstraps/common/build/templates/build.tmpl.xml similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/templates/build.tmpl.xml rename to p4a/pythonforandroidold/bootstraps/common/build/templates/build.tmpl.xml diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/templates/custom_rules.tmpl.xml b/p4a/pythonforandroidold/bootstraps/common/build/templates/custom_rules.tmpl.xml similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/templates/custom_rules.tmpl.xml rename to p4a/pythonforandroidold/bootstraps/common/build/templates/custom_rules.tmpl.xml diff --git a/p4a/pythonforandroidold/bootstraps/common/build/templates/kivy-icon.png b/p4a/pythonforandroidold/bootstraps/common/build/templates/kivy-icon.png new file mode 100644 index 0000000..6ecb013 Binary files /dev/null and b/p4a/pythonforandroidold/bootstraps/common/build/templates/kivy-icon.png differ diff --git a/p4a/pythonforandroidold/bootstraps/common/build/templates/kivy-presplash.jpg b/p4a/pythonforandroidold/bootstraps/common/build/templates/kivy-presplash.jpg new file mode 100644 index 0000000..c61efa2 Binary files /dev/null and b/p4a/pythonforandroidold/bootstraps/common/build/templates/kivy-presplash.jpg differ diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/whitelist.txt b/p4a/pythonforandroidold/bootstraps/common/build/whitelist.txt similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/whitelist.txt rename to p4a/pythonforandroidold/bootstraps/common/build/whitelist.txt diff --git a/p4a/pythonforandroid/bootstraps/lbry/__init__.py b/p4a/pythonforandroidold/bootstraps/lbry/__init__.py similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/__init__.py rename to p4a/pythonforandroidold/bootstraps/lbry/__init__.py diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/.gitignore b/p4a/pythonforandroidold/bootstraps/lbry/build/.gitignore new file mode 100644 index 0000000..a1fc39c --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/.gitignore @@ -0,0 +1,14 @@ +.gradle +/build/ + +# Ignore Gradle GUI config +gradle-app.setting + +# Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored) +!gradle-wrapper.jar + +# Cache of project +.gradletasknamecache + +# # Work around https://youtrack.jetbrains.com/issue/IDEA-116898 +# gradle/wrapper/gradle-wrapper.properties diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/ant.properties b/p4a/pythonforandroidold/bootstraps/lbry/build/ant.properties new file mode 100644 index 0000000..0dee5c8 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/ant.properties @@ -0,0 +1,22 @@ +# This file is used to override default values used by the Ant build system. +# +# This file must be checked into Version Control Systems, as it is +# integral to the build system of your project. + +# This file is only used by the Ant script. + +# You can use this to override default values such as +# 'source.dir' for the location of your java source folder and +# 'out.dir' for the location of your output folder. + +# You can also use it define how the release builds are signed by declaring +# the following properties: +# 'key.store' for the location of your keystore and +# 'key.alias' for the name of the key to use. +# The password will be asked during the build when you use the 'release' target. + +source.absolute.dir = tmp-src + +resource.absolute.dir = src/main/res + +asset.absolute.dir = src/main/assets diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/blacklist.txt b/p4a/pythonforandroidold/bootstraps/lbry/build/blacklist.txt similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/blacklist.txt rename to p4a/pythonforandroidold/bootstraps/lbry/build/blacklist.txt diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/build.py b/p4a/pythonforandroidold/bootstraps/lbry/build/build.py similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/build.py rename to p4a/pythonforandroidold/bootstraps/lbry/build/build.py diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/build.xml b/p4a/pythonforandroidold/bootstraps/lbry/build/build.xml similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/build.xml rename to p4a/pythonforandroidold/bootstraps/lbry/build/build.xml diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/gradle/wrapper/gradle-wrapper.jar b/p4a/pythonforandroidold/bootstraps/lbry/build/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000..3d0dee6 Binary files /dev/null and b/p4a/pythonforandroidold/bootstraps/lbry/build/gradle/wrapper/gradle-wrapper.jar differ diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/gradle/wrapper/gradle-wrapper.properties b/p4a/pythonforandroidold/bootstraps/lbry/build/gradle/wrapper/gradle-wrapper.properties similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/gradle/wrapper/gradle-wrapper.properties rename to p4a/pythonforandroidold/bootstraps/lbry/build/gradle/wrapper/gradle-wrapper.properties diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/gradlew b/p4a/pythonforandroidold/bootstraps/lbry/build/gradlew new file mode 100755 index 0000000..91a7e26 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/gradlew @@ -0,0 +1,164 @@ +#!/usr/bin/env bash + +############################################################################## +## +## Gradle start up script for UN*X +## +############################################################################## + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS="" + +APP_NAME="Gradle" +APP_BASE_NAME=`basename "$0"` + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD="maximum" + +warn ( ) { + echo "$*" +} + +die ( ) { + echo + echo "$*" + echo + exit 1 +} + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +case "`uname`" in + CYGWIN* ) + cygwin=true + ;; + Darwin* ) + darwin=true + ;; + MINGW* ) + msys=true + ;; +esac + +# For Cygwin, ensure paths are in UNIX format before anything is touched. +if $cygwin ; then + [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"` +fi + +# Attempt to set APP_HOME +# Resolve links: $0 may be a link +PRG="$0" +# Need this for relative symlinks. +while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "$PRG"`"/$link" + fi +done +SAVED="`pwd`" +cd "`dirname \"$PRG\"`/" >&- +APP_HOME="`pwd -P`" +cd "$SAVED" >&- + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD="java" + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then + MAX_FD_LIMIT=`ulimit -H -n` + if [ $? -eq 0 ] ; then + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then + MAX_FD="$MAX_FD_LIMIT" + fi + ulimit -n $MAX_FD + if [ $? -ne 0 ] ; then + warn "Could not set maximum file descriptor limit: $MAX_FD" + fi + else + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" + fi +fi + +# For Darwin, add options to specify how the application appears in the dock +if $darwin; then + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" +fi + +# For Cygwin, switch paths to Windows format before running java +if $cygwin ; then + APP_HOME=`cygpath --path --mixed "$APP_HOME"` + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + + # We build the pattern for arguments to be converted via cygpath + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` + SEP="" + for dir in $ROOTDIRSRAW ; do + ROOTDIRS="$ROOTDIRS$SEP$dir" + SEP="|" + done + OURCYGPATTERN="(^($ROOTDIRS))" + # Add a user-defined pattern to the cygpath arguments + if [ "$GRADLE_CYGPATTERN" != "" ] ; then + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" + fi + # Now convert the arguments - kludge to limit ourselves to /bin/sh + i=0 + for arg in "$@" ; do + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option + + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` + else + eval `echo args$i`="\"$arg\"" + fi + i=$((i+1)) + done + case $i in + (0) set -- ;; + (1) set -- "$args0" ;; + (2) set -- "$args0" "$args1" ;; + (3) set -- "$args0" "$args1" "$args2" ;; + (4) set -- "$args0" "$args1" "$args2" "$args3" ;; + (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + esac +fi + +# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules +function splitJvmOpts() { + JVM_OPTS=("$@") +} +eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS +JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME" + +exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@" diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/gradlew.bat b/p4a/pythonforandroidold/bootstraps/lbry/build/gradlew.bat similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/gradlew.bat rename to p4a/pythonforandroidold/bootstraps/lbry/build/gradlew.bat diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/jni/Android.mk b/p4a/pythonforandroidold/bootstraps/lbry/build/jni/Android.mk new file mode 100644 index 0000000..5053e7d --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/jni/Android.mk @@ -0,0 +1 @@ +include $(call all-subdir-makefiles) diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/jni/Application.mk b/p4a/pythonforandroidold/bootstraps/lbry/build/jni/Application.mk new file mode 100644 index 0000000..e79e378 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/jni/Application.mk @@ -0,0 +1,7 @@ + +# Uncomment this if you're using STL in your project +# See CPLUSPLUS-SUPPORT.html in the NDK documentation for more information +# APP_STL := stlport_static + +# APP_ABI := armeabi armeabi-v7a x86 +APP_ABI := $(ARCH) diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/jni/application/src/Android.mk b/p4a/pythonforandroidold/bootstraps/lbry/build/jni/application/src/Android.mk similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/jni/application/src/Android.mk rename to p4a/pythonforandroidold/bootstraps/lbry/build/jni/application/src/Android.mk diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/jni/application/src/Android_static.mk b/p4a/pythonforandroidold/bootstraps/lbry/build/jni/application/src/Android_static.mk new file mode 100644 index 0000000..2de278e --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/jni/application/src/Android_static.mk @@ -0,0 +1,10 @@ +LOCAL_PATH := $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_MODULE := main + +LOCAL_SRC_FILES := YourSourceHere.c + +include $(BUILD_SHARED_LIBRARY) +$(call import-module,SDL)LOCAL_PATH := $(call my-dir) diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/jni/application/src/bootstrap_name.h b/p4a/pythonforandroidold/bootstraps/lbry/build/jni/application/src/bootstrap_name.h new file mode 100644 index 0000000..b93a4ae --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/jni/application/src/bootstrap_name.h @@ -0,0 +1,6 @@ + +#define BOOTSTRAP_NAME_SERVICEONLY +#define BOOTSTRAP_USES_NO_SDL_HEADERS + +const char bootstrap_name[] = "service_only"; + diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/jni/application/src/pyjniusjni.c b/p4a/pythonforandroidold/bootstraps/lbry/build/jni/application/src/pyjniusjni.c new file mode 100644 index 0000000..d67972a --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/jni/application/src/pyjniusjni.c @@ -0,0 +1,103 @@ + +#include +#include + +#define LOGI(...) do {} while (0) +#define LOGE(...) do {} while (0) + +#include "android/log.h" + +/* These JNI management functions are taken from SDL2, but modified to refer to pyjnius */ + +/* #define LOG(n, x) __android_log_write(ANDROID_LOG_INFO, (n), (x)) */ +/* #define LOGP(x) LOG("python", (x)) */ +#define LOG_TAG "Python_android" +#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__) +#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__) + + +/* Function headers */ +JNIEnv* Android_JNI_GetEnv(void); +static void Android_JNI_ThreadDestroyed(void*); + +static pthread_key_t mThreadKey; +static JavaVM* mJavaVM; + +int Android_JNI_SetupThread(void) +{ + Android_JNI_GetEnv(); + return 1; +} + +/* Library init */ +JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM* vm, void* reserved) +{ + JNIEnv *env; + mJavaVM = vm; + LOGI("JNI_OnLoad called"); + if ((*mJavaVM)->GetEnv(mJavaVM, (void**) &env, JNI_VERSION_1_4) != JNI_OK) { + LOGE("Failed to get the environment using GetEnv()"); + return -1; + } + /* + * Create mThreadKey so we can keep track of the JNIEnv assigned to each thread + * Refer to http://developer.android.com/guide/practices/design/jni.html for the rationale behind this + */ + if (pthread_key_create(&mThreadKey, Android_JNI_ThreadDestroyed) != 0) { + + __android_log_print(ANDROID_LOG_ERROR, "pyjniusjni", "Error initializing pthread key"); + } + Android_JNI_SetupThread(); + + return JNI_VERSION_1_4; +} + +JNIEnv* Android_JNI_GetEnv(void) +{ + /* From http://developer.android.com/guide/practices/jni.html + * All threads are Linux threads, scheduled by the kernel. + * They're usually started from managed code (using Thread.start), but they can also be created elsewhere and then + * attached to the JavaVM. For example, a thread started with pthread_create can be attached with the + * JNI AttachCurrentThread or AttachCurrentThreadAsDaemon functions. Until a thread is attached, it has no JNIEnv, + * and cannot make JNI calls. + * Attaching a natively-created thread causes a java.lang.Thread object to be constructed and added to the "main" + * ThreadGroup, making it visible to the debugger. Calling AttachCurrentThread on an already-attached thread + * is a no-op. + * Note: You can call this function any number of times for the same thread, there's no harm in it + */ + + JNIEnv *env; + int status = (*mJavaVM)->AttachCurrentThread(mJavaVM, &env, NULL); + if(status < 0) { + LOGE("failed to attach current thread"); + return 0; + } + + /* From http://developer.android.com/guide/practices/jni.html + * Threads attached through JNI must call DetachCurrentThread before they exit. If coding this directly is awkward, + * in Android 2.0 (Eclair) and higher you can use pthread_key_create to define a destructor function that will be + * called before the thread exits, and call DetachCurrentThread from there. (Use that key with pthread_setspecific + * to store the JNIEnv in thread-local-storage; that way it'll be passed into your destructor as the argument.) + * Note: The destructor is not called unless the stored value is != NULL + * Note: You can call this function any number of times for the same thread, there's no harm in it + * (except for some lost CPU cycles) + */ + pthread_setspecific(mThreadKey, (void*) env); + + return env; +} + +static void Android_JNI_ThreadDestroyed(void* value) +{ + /* The thread is being destroyed, detach it from the Java VM and set the mThreadKey value to NULL as required */ + JNIEnv *env = (JNIEnv*) value; + if (env != NULL) { + (*mJavaVM)->DetachCurrentThread(mJavaVM); + pthread_setspecific(mThreadKey, NULL); + } +} + +void *WebView_AndroidGetJNIEnv() +{ + return Android_JNI_GetEnv(); +} diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/proguard-project.txt b/p4a/pythonforandroidold/bootstraps/lbry/build/proguard-project.txt new file mode 100644 index 0000000..f2fe155 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/proguard-project.txt @@ -0,0 +1,20 @@ +# To enable ProGuard in your project, edit project.properties +# to define the proguard.config property as described in that file. +# +# Add project specific ProGuard rules here. +# By default, the flags in this file are appended to flags specified +# in ${sdk.dir}/tools/proguard/proguard-android.txt +# You can edit the include path and order by changing the ProGuard +# include property in project.properties. +# +# For more details, see +# http://developer.android.com/guide/developing/tools/proguard.html + +# Add any project specific keep options here: + +# If your project uses WebView with JS, uncomment the following +# and specify the fully qualified class name to the JavaScript interface +# class: +#-keepclassmembers class fqcn.of.javascript.interface.for.webview { +# public *; +#} diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/res/drawable-hdpi/ic_launcher.png b/p4a/pythonforandroidold/bootstraps/lbry/build/res/drawable-hdpi/ic_launcher.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/res/drawable-hdpi/ic_launcher.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/res/drawable-hdpi/ic_launcher.png diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/res/drawable-mdpi/ic_launcher.png b/p4a/pythonforandroidold/bootstraps/lbry/build/res/drawable-mdpi/ic_launcher.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/res/drawable-mdpi/ic_launcher.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/res/drawable-mdpi/ic_launcher.png diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/res/drawable-xhdpi/ic_launcher.png b/p4a/pythonforandroidold/bootstraps/lbry/build/res/drawable-xhdpi/ic_launcher.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/res/drawable-xhdpi/ic_launcher.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/res/drawable-xhdpi/ic_launcher.png diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/res/drawable-xxhdpi/ic_launcher.png b/p4a/pythonforandroidold/bootstraps/lbry/build/res/drawable-xxhdpi/ic_launcher.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/res/drawable-xxhdpi/ic_launcher.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/res/drawable-xxhdpi/ic_launcher.png diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/res/drawable-xxxhdpi/lbry-icon.png b/p4a/pythonforandroidold/bootstraps/lbry/build/res/drawable-xxxhdpi/lbry-icon.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/res/drawable-xxxhdpi/lbry-icon.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/res/drawable-xxxhdpi/lbry-icon.png diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/res/drawable/.gitkeep b/p4a/pythonforandroidold/bootstraps/lbry/build/res/drawable/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/res/drawable/icon.png b/p4a/pythonforandroidold/bootstraps/lbry/build/res/drawable/icon.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/res/drawable/icon.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/res/drawable/icon.png diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/layout/chooser_item.xml b/p4a/pythonforandroidold/bootstraps/lbry/build/res/layout/chooser_item.xml similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/layout/chooser_item.xml rename to p4a/pythonforandroidold/bootstraps/lbry/build/res/layout/chooser_item.xml diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/res/layout/main.xml b/p4a/pythonforandroidold/bootstraps/lbry/build/res/layout/main.xml new file mode 100644 index 0000000..123c4b6 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/res/layout/main.xml @@ -0,0 +1,13 @@ + + + + + diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/layout/project_chooser.xml b/p4a/pythonforandroidold/bootstraps/lbry/build/res/layout/project_chooser.xml similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/layout/project_chooser.xml rename to p4a/pythonforandroidold/bootstraps/lbry/build/res/layout/project_chooser.xml diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/layout/project_empty.xml b/p4a/pythonforandroidold/bootstraps/lbry/build/res/layout/project_empty.xml similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/layout/project_empty.xml rename to p4a/pythonforandroidold/bootstraps/lbry/build/res/layout/project_empty.xml diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/res/values/strings.xml b/p4a/pythonforandroidold/bootstraps/lbry/build/res/values/strings.xml new file mode 100644 index 0000000..daebceb --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/res/values/strings.xml @@ -0,0 +1,5 @@ + + + SDL App + 0.1 + diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/assets/.gitkeep b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/assets/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/.gitkeep b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/Octal.java b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/Octal.java new file mode 100755 index 0000000..dd10624 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/Octal.java @@ -0,0 +1,141 @@ +/** + * Copyright 2012 Kamran Zafar + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.kamranzafar.jtar; + +/** + * @author Kamran Zafar + * + */ +public class Octal { + + /** + * Parse an octal string from a header buffer. This is used for the file + * permission mode value. + * + * @param header + * The header buffer from which to parse. + * @param offset + * The offset into the buffer from which to parse. + * @param length + * The number of header bytes to parse. + * + * @return The long value of the octal string. + */ + public static long parseOctal(byte[] header, int offset, int length) { + long result = 0; + boolean stillPadding = true; + + int end = offset + length; + for (int i = offset; i < end; ++i) { + if (header[i] == 0) + break; + + if (header[i] == (byte) ' ' || header[i] == '0') { + if (stillPadding) + continue; + + if (header[i] == (byte) ' ') + break; + } + + stillPadding = false; + + result = ( result << 3 ) + ( header[i] - '0' ); + } + + return result; + } + + /** + * Parse an octal integer from a header buffer. + * + * @param value + * @param buf + * The header buffer from which to parse. + * @param offset + * The offset into the buffer from which to parse. + * @param length + * The number of header bytes to parse. + * + * @return The integer value of the octal bytes. + */ + public static int getOctalBytes(long value, byte[] buf, int offset, int length) { + int idx = length - 1; + + buf[offset + idx] = 0; + --idx; + buf[offset + idx] = (byte) ' '; + --idx; + + if (value == 0) { + buf[offset + idx] = (byte) '0'; + --idx; + } else { + for (long val = value; idx >= 0 && val > 0; --idx) { + buf[offset + idx] = (byte) ( (byte) '0' + (byte) ( val & 7 ) ); + val = val >> 3; + } + } + + for (; idx >= 0; --idx) { + buf[offset + idx] = (byte) ' '; + } + + return offset + length; + } + + /** + * Parse the checksum octal integer from a header buffer. + * + * @param value + * @param buf + * The header buffer from which to parse. + * @param offset + * The offset into the buffer from which to parse. + * @param length + * The number of header bytes to parse. + * @return The integer value of the entry's checksum. + */ + public static int getCheckSumOctalBytes(long value, byte[] buf, int offset, int length) { + getOctalBytes( value, buf, offset, length ); + buf[offset + length - 1] = (byte) ' '; + buf[offset + length - 2] = 0; + return offset + length; + } + + /** + * Parse an octal long integer from a header buffer. + * + * @param value + * @param buf + * The header buffer from which to parse. + * @param offset + * The offset into the buffer from which to parse. + * @param length + * The number of header bytes to parse. + * + * @return The long value of the octal bytes. + */ + public static int getLongOctalBytes(long value, byte[] buf, int offset, int length) { + byte[] temp = new byte[length + 1]; + getOctalBytes( value, temp, 0, length + 1 ); + System.arraycopy( temp, 0, buf, offset, length ); + return offset + length; + } + +} diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarConstants.java b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarConstants.java new file mode 100755 index 0000000..4611e20 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarConstants.java @@ -0,0 +1,28 @@ +/** + * Copyright 2012 Kamran Zafar + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.kamranzafar.jtar; + +/** + * @author Kamran Zafar + * + */ +public class TarConstants { + public static final int EOF_BLOCK = 1024; + public static final int DATA_BLOCK = 512; + public static final int HEADER_BLOCK = 512; +} diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarEntry.java b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarEntry.java new file mode 100755 index 0000000..fe01db4 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarEntry.java @@ -0,0 +1,284 @@ +/** + * Copyright 2012 Kamran Zafar + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.kamranzafar.jtar; + +import java.io.File; +import java.util.Date; + +/** + * @author Kamran Zafar + * + */ +public class TarEntry { + protected File file; + protected TarHeader header; + + private TarEntry() { + this.file = null; + header = new TarHeader(); + } + + public TarEntry(File file, String entryName) { + this(); + this.file = file; + this.extractTarHeader(entryName); + } + + public TarEntry(byte[] headerBuf) { + this(); + this.parseTarHeader(headerBuf); + } + + /** + * Constructor to create an entry from an existing TarHeader object. + * + * This method is useful to add new entries programmatically (e.g. for + * adding files or directories that do not exist in the file system). + * + * @param header + * + */ + public TarEntry(TarHeader header) { + this.file = null; + this.header = header; + } + + public boolean equals(TarEntry it) { + return header.name.toString().equals(it.header.name.toString()); + } + + public boolean isDescendent(TarEntry desc) { + return desc.header.name.toString().startsWith(header.name.toString()); + } + + public TarHeader getHeader() { + return header; + } + + public String getName() { + String name = header.name.toString(); + if (header.namePrefix != null && !header.namePrefix.toString().equals("")) { + name = header.namePrefix.toString() + "/" + name; + } + + return name; + } + + public void setName(String name) { + header.name = new StringBuffer(name); + } + + public int getUserId() { + return header.userId; + } + + public void setUserId(int userId) { + header.userId = userId; + } + + public int getGroupId() { + return header.groupId; + } + + public void setGroupId(int groupId) { + header.groupId = groupId; + } + + public String getUserName() { + return header.userName.toString(); + } + + public void setUserName(String userName) { + header.userName = new StringBuffer(userName); + } + + public String getGroupName() { + return header.groupName.toString(); + } + + public void setGroupName(String groupName) { + header.groupName = new StringBuffer(groupName); + } + + public void setIds(int userId, int groupId) { + this.setUserId(userId); + this.setGroupId(groupId); + } + + public void setModTime(long time) { + header.modTime = time / 1000; + } + + public void setModTime(Date time) { + header.modTime = time.getTime() / 1000; + } + + public Date getModTime() { + return new Date(header.modTime * 1000); + } + + public File getFile() { + return this.file; + } + + public long getSize() { + return header.size; + } + + public void setSize(long size) { + header.size = size; + } + + /** + * Checks if the org.kamrazafar.jtar entry is a directory + * + * @return + */ + public boolean isDirectory() { + if (this.file != null) + return this.file.isDirectory(); + + if (header != null) { + if (header.linkFlag == TarHeader.LF_DIR) + return true; + + if (header.name.toString().endsWith("/")) + return true; + } + + return false; + } + + /** + * Extract header from File + * + * @param entryName + */ + public void extractTarHeader(String entryName) { + header = TarHeader.createHeader(entryName, file.length(), file.lastModified() / 1000, file.isDirectory()); + } + + /** + * Calculate checksum + * + * @param buf + * @return + */ + public long computeCheckSum(byte[] buf) { + long sum = 0; + + for (int i = 0; i < buf.length; ++i) { + sum += 255 & buf[i]; + } + + return sum; + } + + /** + * Writes the header to the byte buffer + * + * @param outbuf + */ + public void writeEntryHeader(byte[] outbuf) { + int offset = 0; + + offset = TarHeader.getNameBytes(header.name, outbuf, offset, TarHeader.NAMELEN); + offset = Octal.getOctalBytes(header.mode, outbuf, offset, TarHeader.MODELEN); + offset = Octal.getOctalBytes(header.userId, outbuf, offset, TarHeader.UIDLEN); + offset = Octal.getOctalBytes(header.groupId, outbuf, offset, TarHeader.GIDLEN); + + long size = header.size; + + offset = Octal.getLongOctalBytes(size, outbuf, offset, TarHeader.SIZELEN); + offset = Octal.getLongOctalBytes(header.modTime, outbuf, offset, TarHeader.MODTIMELEN); + + int csOffset = offset; + for (int c = 0; c < TarHeader.CHKSUMLEN; ++c) + outbuf[offset++] = (byte) ' '; + + outbuf[offset++] = header.linkFlag; + + offset = TarHeader.getNameBytes(header.linkName, outbuf, offset, TarHeader.NAMELEN); + offset = TarHeader.getNameBytes(header.magic, outbuf, offset, TarHeader.USTAR_MAGICLEN); + offset = TarHeader.getNameBytes(header.userName, outbuf, offset, TarHeader.USTAR_USER_NAMELEN); + offset = TarHeader.getNameBytes(header.groupName, outbuf, offset, TarHeader.USTAR_GROUP_NAMELEN); + offset = Octal.getOctalBytes(header.devMajor, outbuf, offset, TarHeader.USTAR_DEVLEN); + offset = Octal.getOctalBytes(header.devMinor, outbuf, offset, TarHeader.USTAR_DEVLEN); + offset = TarHeader.getNameBytes(header.namePrefix, outbuf, offset, TarHeader.USTAR_FILENAME_PREFIX); + + for (; offset < outbuf.length;) + outbuf[offset++] = 0; + + long checkSum = this.computeCheckSum(outbuf); + + Octal.getCheckSumOctalBytes(checkSum, outbuf, csOffset, TarHeader.CHKSUMLEN); + } + + /** + * Parses the tar header to the byte buffer + * + * @param header + * @param bh + */ + public void parseTarHeader(byte[] bh) { + int offset = 0; + + header.name = TarHeader.parseName(bh, offset, TarHeader.NAMELEN); + offset += TarHeader.NAMELEN; + + header.mode = (int) Octal.parseOctal(bh, offset, TarHeader.MODELEN); + offset += TarHeader.MODELEN; + + header.userId = (int) Octal.parseOctal(bh, offset, TarHeader.UIDLEN); + offset += TarHeader.UIDLEN; + + header.groupId = (int) Octal.parseOctal(bh, offset, TarHeader.GIDLEN); + offset += TarHeader.GIDLEN; + + header.size = Octal.parseOctal(bh, offset, TarHeader.SIZELEN); + offset += TarHeader.SIZELEN; + + header.modTime = Octal.parseOctal(bh, offset, TarHeader.MODTIMELEN); + offset += TarHeader.MODTIMELEN; + + header.checkSum = (int) Octal.parseOctal(bh, offset, TarHeader.CHKSUMLEN); + offset += TarHeader.CHKSUMLEN; + + header.linkFlag = bh[offset++]; + + header.linkName = TarHeader.parseName(bh, offset, TarHeader.NAMELEN); + offset += TarHeader.NAMELEN; + + header.magic = TarHeader.parseName(bh, offset, TarHeader.USTAR_MAGICLEN); + offset += TarHeader.USTAR_MAGICLEN; + + header.userName = TarHeader.parseName(bh, offset, TarHeader.USTAR_USER_NAMELEN); + offset += TarHeader.USTAR_USER_NAMELEN; + + header.groupName = TarHeader.parseName(bh, offset, TarHeader.USTAR_GROUP_NAMELEN); + offset += TarHeader.USTAR_GROUP_NAMELEN; + + header.devMajor = (int) Octal.parseOctal(bh, offset, TarHeader.USTAR_DEVLEN); + offset += TarHeader.USTAR_DEVLEN; + + header.devMinor = (int) Octal.parseOctal(bh, offset, TarHeader.USTAR_DEVLEN); + offset += TarHeader.USTAR_DEVLEN; + + header.namePrefix = TarHeader.parseName(bh, offset, TarHeader.USTAR_FILENAME_PREFIX); + } +} \ No newline at end of file diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarHeader.java b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarHeader.java new file mode 100755 index 0000000..b9d3a86 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarHeader.java @@ -0,0 +1,243 @@ +/** + * Copyright 2012 Kamran Zafar + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.kamranzafar.jtar; + +import java.io.File; + +/** + * Header + * + *
+ * Offset  Size     Field
+ * 0       100      File name
+ * 100     8        File mode
+ * 108     8        Owner's numeric user ID
+ * 116     8        Group's numeric user ID
+ * 124     12       File size in bytes
+ * 136     12       Last modification time in numeric Unix time format
+ * 148     8        Checksum for header block
+ * 156     1        Link indicator (file type)
+ * 157     100      Name of linked file
+ * 
+ * + * + * File Types + * + *
+ * Value        Meaning
+ * '0'          Normal file
+ * (ASCII NUL)  Normal file (now obsolete)
+ * '1'          Hard link
+ * '2'          Symbolic link
+ * '3'          Character special
+ * '4'          Block special
+ * '5'          Directory
+ * '6'          FIFO
+ * '7'          Contigous
+ * 
+ * + * + * + * Ustar header + * + *
+ * Offset  Size    Field
+ * 257     6       UStar indicator "ustar"
+ * 263     2       UStar version "00"
+ * 265     32      Owner user name
+ * 297     32      Owner group name
+ * 329     8       Device major number
+ * 337     8       Device minor number
+ * 345     155     Filename prefix
+ * 
+ */ + +public class TarHeader { + + /* + * Header + */ + public static final int NAMELEN = 100; + public static final int MODELEN = 8; + public static final int UIDLEN = 8; + public static final int GIDLEN = 8; + public static final int SIZELEN = 12; + public static final int MODTIMELEN = 12; + public static final int CHKSUMLEN = 8; + public static final byte LF_OLDNORM = 0; + + /* + * File Types + */ + public static final byte LF_NORMAL = (byte) '0'; + public static final byte LF_LINK = (byte) '1'; + public static final byte LF_SYMLINK = (byte) '2'; + public static final byte LF_CHR = (byte) '3'; + public static final byte LF_BLK = (byte) '4'; + public static final byte LF_DIR = (byte) '5'; + public static final byte LF_FIFO = (byte) '6'; + public static final byte LF_CONTIG = (byte) '7'; + + /* + * Ustar header + */ + + public static final String USTAR_MAGIC = "ustar"; // POSIX + + public static final int USTAR_MAGICLEN = 8; + public static final int USTAR_USER_NAMELEN = 32; + public static final int USTAR_GROUP_NAMELEN = 32; + public static final int USTAR_DEVLEN = 8; + public static final int USTAR_FILENAME_PREFIX = 155; + + // Header values + public StringBuffer name; + public int mode; + public int userId; + public int groupId; + public long size; + public long modTime; + public int checkSum; + public byte linkFlag; + public StringBuffer linkName; + public StringBuffer magic; // ustar indicator and version + public StringBuffer userName; + public StringBuffer groupName; + public int devMajor; + public int devMinor; + public StringBuffer namePrefix; + + public TarHeader() { + this.magic = new StringBuffer(TarHeader.USTAR_MAGIC); + + this.name = new StringBuffer(); + this.linkName = new StringBuffer(); + + String user = System.getProperty("user.name", ""); + + if (user.length() > 31) + user = user.substring(0, 31); + + this.userId = 0; + this.groupId = 0; + this.userName = new StringBuffer(user); + this.groupName = new StringBuffer(""); + this.namePrefix = new StringBuffer(); + } + + /** + * Parse an entry name from a header buffer. + * + * @param name + * @param header + * The header buffer from which to parse. + * @param offset + * The offset into the buffer from which to parse. + * @param length + * The number of header bytes to parse. + * @return The header's entry name. + */ + public static StringBuffer parseName(byte[] header, int offset, int length) { + StringBuffer result = new StringBuffer(length); + + int end = offset + length; + for (int i = offset; i < end; ++i) { + if (header[i] == 0) + break; + result.append((char) header[i]); + } + + return result; + } + + /** + * Determine the number of bytes in an entry name. + * + * @param name + * @param header + * The header buffer from which to parse. + * @param offset + * The offset into the buffer from which to parse. + * @param length + * The number of header bytes to parse. + * @return The number of bytes in a header's entry name. + */ + public static int getNameBytes(StringBuffer name, byte[] buf, int offset, int length) { + int i; + + for (i = 0; i < length && i < name.length(); ++i) { + buf[offset + i] = (byte) name.charAt(i); + } + + for (; i < length; ++i) { + buf[offset + i] = 0; + } + + return offset + length; + } + + /** + * Creates a new header for a file/directory entry. + * + * + * @param name + * File name + * @param size + * File size in bytes + * @param modTime + * Last modification time in numeric Unix time format + * @param dir + * Is directory + * + * @return + */ + public static TarHeader createHeader(String entryName, long size, long modTime, boolean dir) { + String name = entryName; + name = TarUtils.trim(name.replace(File.separatorChar, '/'), '/'); + + TarHeader header = new TarHeader(); + header.linkName = new StringBuffer(""); + + if (name.length() > 100) { + header.namePrefix = new StringBuffer(name.substring(0, name.lastIndexOf('/'))); + header.name = new StringBuffer(name.substring(name.lastIndexOf('/') + 1)); + } else { + header.name = new StringBuffer(name); + } + + if (dir) { + header.mode = 040755; + header.linkFlag = TarHeader.LF_DIR; + if (header.name.charAt(header.name.length() - 1) != '/') { + header.name.append("/"); + } + header.size = 0; + } else { + header.mode = 0100644; + header.linkFlag = TarHeader.LF_NORMAL; + header.size = size; + } + + header.modTime = modTime; + header.checkSum = 0; + header.devMajor = 0; + header.devMinor = 0; + + return header; + } +} \ No newline at end of file diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarInputStream.java b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarInputStream.java new file mode 100755 index 0000000..ec50a1b --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarInputStream.java @@ -0,0 +1,249 @@ +/** + * Copyright 2012 Kamran Zafar + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.kamranzafar.jtar; + +import java.io.FilterInputStream; +import java.io.IOException; +import java.io.InputStream; + +/** + * @author Kamran Zafar + * + */ +public class TarInputStream extends FilterInputStream { + + private static final int SKIP_BUFFER_SIZE = 2048; + private TarEntry currentEntry; + private long currentFileSize; + private long bytesRead; + private boolean defaultSkip = false; + + public TarInputStream(InputStream in) { + super(in); + currentFileSize = 0; + bytesRead = 0; + } + + @Override + public boolean markSupported() { + return false; + } + + /** + * Not supported + * + */ + @Override + public synchronized void mark(int readlimit) { + } + + /** + * Not supported + * + */ + @Override + public synchronized void reset() throws IOException { + throw new IOException("mark/reset not supported"); + } + + /** + * Read a byte + * + * @see java.io.FilterInputStream#read() + */ + @Override + public int read() throws IOException { + byte[] buf = new byte[1]; + + int res = this.read(buf, 0, 1); + + if (res != -1) { + return 0xFF & buf[0]; + } + + return res; + } + + /** + * Checks if the bytes being read exceed the entry size and adjusts the byte + * array length. Updates the byte counters + * + * + * @see java.io.FilterInputStream#read(byte[], int, int) + */ + @Override + public int read(byte[] b, int off, int len) throws IOException { + if (currentEntry != null) { + if (currentFileSize == currentEntry.getSize()) { + return -1; + } else if ((currentEntry.getSize() - currentFileSize) < len) { + len = (int) (currentEntry.getSize() - currentFileSize); + } + } + + int br = super.read(b, off, len); + + if (br != -1) { + if (currentEntry != null) { + currentFileSize += br; + } + + bytesRead += br; + } + + return br; + } + + /** + * Returns the next entry in the tar file + * + * @return TarEntry + * @throws IOException + */ + public TarEntry getNextEntry() throws IOException { + closeCurrentEntry(); + + byte[] header = new byte[TarConstants.HEADER_BLOCK]; + byte[] theader = new byte[TarConstants.HEADER_BLOCK]; + int tr = 0; + + // Read full header + while (tr < TarConstants.HEADER_BLOCK) { + int res = read(theader, 0, TarConstants.HEADER_BLOCK - tr); + + if (res < 0) { + break; + } + + System.arraycopy(theader, 0, header, tr, res); + tr += res; + } + + // Check if record is null + boolean eof = true; + for (byte b : header) { + if (b != 0) { + eof = false; + break; + } + } + + if (!eof) { + currentEntry = new TarEntry(header); + } + + return currentEntry; + } + + /** + * Returns the current offset (in bytes) from the beginning of the stream. + * This can be used to find out at which point in a tar file an entry's content begins, for instance. + */ + public long getCurrentOffset() { + return bytesRead; + } + + /** + * Closes the current tar entry + * + * @throws IOException + */ + protected void closeCurrentEntry() throws IOException { + if (currentEntry != null) { + if (currentEntry.getSize() > currentFileSize) { + // Not fully read, skip rest of the bytes + long bs = 0; + while (bs < currentEntry.getSize() - currentFileSize) { + long res = skip(currentEntry.getSize() - currentFileSize - bs); + + if (res == 0 && currentEntry.getSize() - currentFileSize > 0) { + // I suspect file corruption + throw new IOException("Possible tar file corruption"); + } + + bs += res; + } + } + + currentEntry = null; + currentFileSize = 0L; + skipPad(); + } + } + + /** + * Skips the pad at the end of each tar entry file content + * + * @throws IOException + */ + protected void skipPad() throws IOException { + if (bytesRead > 0) { + int extra = (int) (bytesRead % TarConstants.DATA_BLOCK); + + if (extra > 0) { + long bs = 0; + while (bs < TarConstants.DATA_BLOCK - extra) { + long res = skip(TarConstants.DATA_BLOCK - extra - bs); + bs += res; + } + } + } + } + + /** + * Skips 'n' bytes on the InputStream
+ * Overrides default implementation of skip + * + */ + @Override + public long skip(long n) throws IOException { + if (defaultSkip) { + // use skip method of parent stream + // may not work if skip not implemented by parent + long bs = super.skip(n); + bytesRead += bs; + + return bs; + } + + if (n <= 0) { + return 0; + } + + long left = n; + byte[] sBuff = new byte[SKIP_BUFFER_SIZE]; + + while (left > 0) { + int res = read(sBuff, 0, (int) (left < SKIP_BUFFER_SIZE ? left : SKIP_BUFFER_SIZE)); + if (res < 0) { + break; + } + left -= res; + } + + return n - left; + } + + public boolean isDefaultSkip() { + return defaultSkip; + } + + public void setDefaultSkip(boolean defaultSkip) { + this.defaultSkip = defaultSkip; + } +} diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarOutputStream.java b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarOutputStream.java new file mode 100755 index 0000000..ffdfe87 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarOutputStream.java @@ -0,0 +1,163 @@ +/** + * Copyright 2012 Kamran Zafar + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.kamranzafar.jtar; + +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.io.RandomAccessFile; + +/** + * @author Kamran Zafar + * + */ +public class TarOutputStream extends OutputStream { + private final OutputStream out; + private long bytesWritten; + private long currentFileSize; + private TarEntry currentEntry; + + public TarOutputStream(OutputStream out) { + this.out = out; + bytesWritten = 0; + currentFileSize = 0; + } + + public TarOutputStream(final File fout) throws FileNotFoundException { + this.out = new BufferedOutputStream(new FileOutputStream(fout)); + bytesWritten = 0; + currentFileSize = 0; + } + + /** + * Opens a file for writing. + */ + public TarOutputStream(final File fout, final boolean append) throws IOException { + @SuppressWarnings("resource") + RandomAccessFile raf = new RandomAccessFile(fout, "rw"); + final long fileSize = fout.length(); + if (append && fileSize > TarConstants.EOF_BLOCK) { + raf.seek(fileSize - TarConstants.EOF_BLOCK); + } + out = new BufferedOutputStream(new FileOutputStream(raf.getFD())); + } + + /** + * Appends the EOF record and closes the stream + * + * @see java.io.FilterOutputStream#close() + */ + @Override + public void close() throws IOException { + closeCurrentEntry(); + write( new byte[TarConstants.EOF_BLOCK] ); + out.close(); + } + /** + * Writes a byte to the stream and updates byte counters + * + * @see java.io.FilterOutputStream#write(int) + */ + @Override + public void write(int b) throws IOException { + out.write( b ); + bytesWritten += 1; + + if (currentEntry != null) { + currentFileSize += 1; + } + } + + /** + * Checks if the bytes being written exceed the current entry size. + * + * @see java.io.FilterOutputStream#write(byte[], int, int) + */ + @Override + public void write(byte[] b, int off, int len) throws IOException { + if (currentEntry != null && !currentEntry.isDirectory()) { + if (currentEntry.getSize() < currentFileSize + len) { + throw new IOException( "The current entry[" + currentEntry.getName() + "] size[" + + currentEntry.getSize() + "] is smaller than the bytes[" + ( currentFileSize + len ) + + "] being written." ); + } + } + + out.write( b, off, len ); + + bytesWritten += len; + + if (currentEntry != null) { + currentFileSize += len; + } + } + + /** + * Writes the next tar entry header on the stream + * + * @param entry + * @throws IOException + */ + public void putNextEntry(TarEntry entry) throws IOException { + closeCurrentEntry(); + + byte[] header = new byte[TarConstants.HEADER_BLOCK]; + entry.writeEntryHeader( header ); + + write( header ); + + currentEntry = entry; + } + + /** + * Closes the current tar entry + * + * @throws IOException + */ + protected void closeCurrentEntry() throws IOException { + if (currentEntry != null) { + if (currentEntry.getSize() > currentFileSize) { + throw new IOException( "The current entry[" + currentEntry.getName() + "] of size[" + + currentEntry.getSize() + "] has not been fully written." ); + } + + currentEntry = null; + currentFileSize = 0; + + pad(); + } + } + + /** + * Pads the last content block + * + * @throws IOException + */ + protected void pad() throws IOException { + if (bytesWritten > 0) { + int extra = (int) ( bytesWritten % TarConstants.DATA_BLOCK ); + + if (extra > 0) { + write( new byte[TarConstants.DATA_BLOCK - extra] ); + } + } + } +} diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarUtils.java b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarUtils.java new file mode 100755 index 0000000..5016576 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kamranzafar/jtar/TarUtils.java @@ -0,0 +1,96 @@ +/** + * Copyright 2012 Kamran Zafar + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.kamranzafar.jtar; + +import java.io.File; + +/** + * @author Kamran + * + */ +public class TarUtils { + /** + * Determines the tar file size of the given folder/file path + * + * @param path + * @return + */ + public static long calculateTarSize(File path) { + return tarSize(path) + TarConstants.EOF_BLOCK; + } + + private static long tarSize(File dir) { + long size = 0; + + if (dir.isFile()) { + return entrySize(dir.length()); + } else { + File[] subFiles = dir.listFiles(); + + if (subFiles != null && subFiles.length > 0) { + for (File file : subFiles) { + if (file.isFile()) { + size += entrySize(file.length()); + } else { + size += tarSize(file); + } + } + } else { + // Empty folder header + return TarConstants.HEADER_BLOCK; + } + } + + return size; + } + + private static long entrySize(long fileSize) { + long size = 0; + size += TarConstants.HEADER_BLOCK; // Header + size += fileSize; // File size + + long extra = size % TarConstants.DATA_BLOCK; + + if (extra > 0) { + size += (TarConstants.DATA_BLOCK - extra); // pad + } + + return size; + } + + public static String trim(String s, char c) { + StringBuffer tmp = new StringBuffer(s); + for (int i = 0; i < tmp.length(); i++) { + if (tmp.charAt(i) != c) { + break; + } else { + tmp.deleteCharAt(i); + } + } + + for (int i = tmp.length() - 1; i >= 0; i--) { + if (tmp.charAt(i) != c) { + break; + } else { + tmp.deleteCharAt(i); + } + } + + return tmp.toString(); + } +} diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kivy/android/GenericBroadcastReceiver.java b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kivy/android/GenericBroadcastReceiver.java new file mode 100644 index 0000000..58a1c5e --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kivy/android/GenericBroadcastReceiver.java @@ -0,0 +1,19 @@ +package org.kivy.android; + +import android.content.BroadcastReceiver; +import android.content.Intent; +import android.content.Context; + +public class GenericBroadcastReceiver extends BroadcastReceiver { + + GenericBroadcastReceiverCallback listener; + + public GenericBroadcastReceiver(GenericBroadcastReceiverCallback listener) { + super(); + this.listener = listener; + } + + public void onReceive(Context context, Intent intent) { + this.listener.onReceive(context, intent); + } +} diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kivy/android/GenericBroadcastReceiverCallback.java b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kivy/android/GenericBroadcastReceiverCallback.java new file mode 100644 index 0000000..1a87c98 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kivy/android/GenericBroadcastReceiverCallback.java @@ -0,0 +1,8 @@ +package org.kivy.android; + +import android.content.Intent; +import android.content.Context; + +public interface GenericBroadcastReceiverCallback { + void onReceive(Context context, Intent intent); +}; diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kivy/android/PythonActivity.java b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kivy/android/PythonActivity.java similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kivy/android/PythonActivity.java rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kivy/android/PythonActivity.java diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kivy/android/PythonService.java b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kivy/android/PythonService.java similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kivy/android/PythonService.java rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kivy/android/PythonService.java diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kivy/android/PythonUtil.java b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kivy/android/PythonUtil.java similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kivy/android/PythonUtil.java rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kivy/android/PythonUtil.java diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kivy/android/concurrency/PythonEvent.java b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kivy/android/concurrency/PythonEvent.java similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kivy/android/concurrency/PythonEvent.java rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kivy/android/concurrency/PythonEvent.java diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kivy/android/concurrency/PythonLock.java b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kivy/android/concurrency/PythonLock.java similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kivy/android/concurrency/PythonLock.java rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kivy/android/concurrency/PythonLock.java diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kivy/android/launcher/Project.java b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kivy/android/launcher/Project.java new file mode 100644 index 0000000..9177b43 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kivy/android/launcher/Project.java @@ -0,0 +1,99 @@ +package org.kivy.android.launcher; + +import java.io.UnsupportedEncodingException; +import java.io.File; +import java.io.FileInputStream; +import java.util.Properties; + +import android.util.Log; +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; + + +/** + * This represents a project we've scanned for. + */ +public class Project { + + public String dir = null; + String title = null; + String author = null; + Bitmap icon = null; + public boolean landscape = false; + + static String decode(String s) { + try { + return new String(s.getBytes("ISO-8859-1"), "UTF-8"); + } catch (UnsupportedEncodingException e) { + return s; + } + } + + /** + * Scans directory for a android.txt file. If it finds one, + * and it looks valid enough, then it creates a new Project, + * and returns that. Otherwise, returns null. + */ + public static Project scanDirectory(File dir) { + + // We might have a link file. + if (dir.getAbsolutePath().endsWith(".link")) { + try { + + // Scan the android.txt file. + File propfile = new File(dir, "android.txt"); + FileInputStream in = new FileInputStream(propfile); + Properties p = new Properties(); + p.load(in); + in.close(); + + String directory = p.getProperty("directory", null); + + if (directory == null) { + return null; + } + + dir = new File(directory); + + } catch (Exception e) { + Log.i("Project", "Couldn't open link file " + dir, e); + } + } + + // Make sure we're dealing with a directory. + if (! dir.isDirectory()) { + return null; + } + + try { + + // Scan the android.txt file. + File propfile = new File(dir, "android.txt"); + FileInputStream in = new FileInputStream(propfile); + Properties p = new Properties(); + p.load(in); + in.close(); + + // Get the various properties. + String title = decode(p.getProperty("title", "Untitled")); + String author = decode(p.getProperty("author", "")); + boolean landscape = p.getProperty("orientation", "portrait").equals("landscape"); + + // Create the project object. + Project rv = new Project(); + rv.title = title; + rv.author = author; + rv.icon = BitmapFactory.decodeFile(new File(dir, "icon.png").getAbsolutePath()); + rv.landscape = landscape; + rv.dir = dir.getAbsolutePath(); + + return rv; + + } catch (Exception e) { + Log.i("Project", "Couldn't open android.txt", e); + } + + return null; + + } +} diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kivy/android/launcher/ProjectAdapter.java b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kivy/android/launcher/ProjectAdapter.java similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kivy/android/launcher/ProjectAdapter.java rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kivy/android/launcher/ProjectAdapter.java diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kivy/android/launcher/ProjectChooser.java b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kivy/android/launcher/ProjectChooser.java similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/kivy/android/launcher/ProjectChooser.java rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/kivy/android/launcher/ProjectChooser.java diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/libsdl/app/SDLActivity.java b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/libsdl/app/SDLActivity.java similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/libsdl/app/SDLActivity.java rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/libsdl/app/SDLActivity.java diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/renpy/android/AssetExtract.java b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/renpy/android/AssetExtract.java similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/renpy/android/AssetExtract.java rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/renpy/android/AssetExtract.java diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/renpy/android/Hardware.java b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/renpy/android/Hardware.java similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/renpy/android/Hardware.java rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/renpy/android/Hardware.java diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/renpy/android/PythonActivity.java b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/renpy/android/PythonActivity.java similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/renpy/android/PythonActivity.java rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/renpy/android/PythonActivity.java diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/renpy/android/PythonService.java b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/renpy/android/PythonService.java similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/renpy/android/PythonService.java rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/renpy/android/PythonService.java diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/renpy/android/ResourceManager.java b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/renpy/android/ResourceManager.java similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/java/org/renpy/android/ResourceManager.java rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/java/org/renpy/android/ResourceManager.java diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/jniLibs/.gitkeep b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/jniLibs/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/libs/.gitkeep b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/libs/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-hdpi/baseline_search_black_24.png b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-hdpi/baseline_search_black_24.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-hdpi/baseline_search_black_24.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-hdpi/baseline_search_black_24.png diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-hdpi/ic_file_download_black_24dp.png b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-hdpi/ic_file_download_black_24dp.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-hdpi/ic_file_download_black_24dp.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-hdpi/ic_file_download_black_24dp.png diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/templates/res/drawable-hdpi/ic_launcher.png b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-hdpi/ic_launcher.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/templates/res/drawable-hdpi/ic_launcher.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-hdpi/ic_launcher.png diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-mdpi/baseline_search_black_24.png b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-mdpi/baseline_search_black_24.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-mdpi/baseline_search_black_24.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-mdpi/baseline_search_black_24.png diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-mdpi/ic_file_download_black_24dp.png b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-mdpi/ic_file_download_black_24dp.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-mdpi/ic_file_download_black_24dp.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-mdpi/ic_file_download_black_24dp.png diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/templates/res/drawable-mdpi/ic_launcher.png b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-mdpi/ic_launcher.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/templates/res/drawable-mdpi/ic_launcher.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-mdpi/ic_launcher.png diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-xhdpi/baseline_search_black_24.png b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-xhdpi/baseline_search_black_24.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-xhdpi/baseline_search_black_24.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-xhdpi/baseline_search_black_24.png diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-xhdpi/ic_file_download_black_24dp.png b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-xhdpi/ic_file_download_black_24dp.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-xhdpi/ic_file_download_black_24dp.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-xhdpi/ic_file_download_black_24dp.png diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/templates/res/drawable-xhdpi/ic_launcher.png b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-xhdpi/ic_launcher.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/templates/res/drawable-xhdpi/ic_launcher.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-xhdpi/ic_launcher.png diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-xxhdpi/baseline_search_black_24.png b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-xxhdpi/baseline_search_black_24.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-xxhdpi/baseline_search_black_24.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-xxhdpi/baseline_search_black_24.png diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-xxhdpi/ic_file_download_black_24dp.png b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-xxhdpi/ic_file_download_black_24dp.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-xxhdpi/ic_file_download_black_24dp.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-xxhdpi/ic_file_download_black_24dp.png diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/templates/res/drawable-xxhdpi/ic_launcher.png b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-xxhdpi/ic_launcher.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/templates/res/drawable-xxhdpi/ic_launcher.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-xxhdpi/ic_launcher.png diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-xxxhdpi/baseline_search_black_24.png b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-xxxhdpi/baseline_search_black_24.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-xxxhdpi/baseline_search_black_24.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-xxxhdpi/baseline_search_black_24.png diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-xxxhdpi/ic_file_download_black_24dp.png b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-xxxhdpi/ic_file_download_black_24dp.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable-xxxhdpi/ic_file_download_black_24dp.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable-xxxhdpi/ic_file_download_black_24dp.png diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable/.gitkeep b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable/baseline_search_24.xml b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable/baseline_search_24.xml similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/src/main/res/drawable/baseline_search_24.xml rename to p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/drawable/baseline_search_24.xml diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/layout/chooser_item.xml b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/layout/chooser_item.xml new file mode 100644 index 0000000..1823b13 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/layout/chooser_item.xml @@ -0,0 +1,39 @@ + + + + + + + + + + + + + diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/layout/main.xml b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/layout/main.xml new file mode 100644 index 0000000..123c4b6 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/layout/main.xml @@ -0,0 +1,13 @@ + + + + + diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/layout/project_chooser.xml b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/layout/project_chooser.xml new file mode 100644 index 0000000..23828e6 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/layout/project_chooser.xml @@ -0,0 +1,22 @@ + + + + + + + + + + diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/layout/project_empty.xml b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/layout/project_empty.xml new file mode 100644 index 0000000..ee54814 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/src/main/res/layout/project_empty.xml @@ -0,0 +1,15 @@ + + + + + + + diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/templates/AndroidManifest.tmpl.xml b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/AndroidManifest.tmpl.xml similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/templates/AndroidManifest.tmpl.xml rename to p4a/pythonforandroidold/bootstraps/lbry/build/templates/AndroidManifest.tmpl.xml diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/templates/Service.tmpl.java b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/Service.tmpl.java new file mode 100644 index 0000000..3ed10c2 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/Service.tmpl.java @@ -0,0 +1,77 @@ +package {{ args.package }}; + +import android.os.Build; +import java.lang.reflect.Method; +import java.lang.reflect.InvocationTargetException; +import android.content.Intent; +import android.content.Context; +import android.app.Notification; +import android.app.PendingIntent; +import android.os.Bundle; +import org.kivy.android.PythonService; +import org.kivy.android.PythonActivity; + + +public class Service{{ name|capitalize }} extends PythonService { + {% if sticky %} + @Override + public int startType() { + return START_STICKY; + } + {% endif %} + + {% if not foreground %} + @Override + public boolean canDisplayNotification() { + return false; + } + {% endif %} + + @Override + protected void doStartForeground(Bundle extras) { + Notification notification; + Context context = getApplicationContext(); + Intent contextIntent = new Intent(context, PythonActivity.class); + PendingIntent pIntent = PendingIntent.getActivity(context, 0, contextIntent, + PendingIntent.FLAG_UPDATE_CURRENT); + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.HONEYCOMB) { + notification = new Notification( + context.getApplicationInfo().icon, "{{ args.name }}", System.currentTimeMillis()); + try { + // prevent using NotificationCompat, this saves 100kb on apk + Method func = notification.getClass().getMethod( + "setLatestEventInfo", Context.class, CharSequence.class, + CharSequence.class, PendingIntent.class); + func.invoke(notification, context, "{{ args.name }}", "{{ name| capitalize }}", pIntent); + } catch (NoSuchMethodException | IllegalAccessException | + IllegalArgumentException | InvocationTargetException e) { + } + } else { + Notification.Builder builder = new Notification.Builder(context); + builder.setContentTitle("{{ args.name }}"); + builder.setContentText("{{ name| capitalize }}"); + builder.setContentIntent(pIntent); + builder.setSmallIcon(context.getApplicationInfo().icon); + notification = builder.build(); + } + startForeground({{ service_id }}, notification); + } + + static public void start(Context ctx, String pythonServiceArgument) { + Intent intent = new Intent(ctx, Service{{ name|capitalize }}.class); + String argument = ctx.getFilesDir().getAbsolutePath() + "/app"; + intent.putExtra("androidPrivate", ctx.getFilesDir().getAbsolutePath()); + intent.putExtra("androidArgument", argument); + intent.putExtra("serviceEntrypoint", "{{ entrypoint }}"); + intent.putExtra("pythonName", "{{ name }}"); + intent.putExtra("pythonHome", argument); + intent.putExtra("pythonPath", argument + ":" + argument + "/lib"); + intent.putExtra("pythonServiceArgument", pythonServiceArgument); + ctx.startService(intent); + } + + static public void stop(Context ctx) { + Intent intent = new Intent(ctx, Service{{ name|capitalize }}.class); + ctx.stopService(intent); + } +} diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/templates/activity_service_control.xml b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/activity_service_control.xml similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/templates/activity_service_control.xml rename to p4a/pythonforandroidold/bootstraps/lbry/build/templates/activity_service_control.xml diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/templates/build.properties b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/build.properties new file mode 100644 index 0000000..f12e258 --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/build.properties @@ -0,0 +1,21 @@ +# This file is used to override default values used by the Ant build system. +# +# This file must be checked in Version Control Systems, as it is +# integral to the build system of your project. + +# This file is only used by the Ant script. + +# You can use this to override default values such as +# 'source.dir' for the location of your java source folder and +# 'out.dir' for the location of your output folder. + +# You can also use it define how the release builds are signed by declaring +# the following properties: +# 'key.store' for the location of your keystore and +# 'key.alias' for the name of the key to use. +# The password will be asked during the build when you use the 'release' target. + +key.store=${env.P4A_RELEASE_KEYSTORE} +key.alias=${env.P4A_RELEASE_KEYALIAS} +key.store.password=${env.P4A_RELEASE_KEYSTORE_PASSWD} +key.alias.password=${env.P4A_RELEASE_KEYALIAS_PASSWD} diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/templates/build.tmpl.gradle b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/build.tmpl.gradle similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/templates/build.tmpl.gradle rename to p4a/pythonforandroidold/bootstraps/lbry/build/templates/build.tmpl.gradle diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/templates/build.tmpl.gradle.arm b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/build.tmpl.gradle.arm similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/templates/build.tmpl.gradle.arm rename to p4a/pythonforandroidold/bootstraps/lbry/build/templates/build.tmpl.gradle.arm diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/templates/build.tmpl.xml b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/build.tmpl.xml new file mode 100644 index 0000000..9ab301a --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/build.tmpl.xml @@ -0,0 +1,95 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/templates/colors.tmpl.xml b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/colors.tmpl.xml similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/templates/colors.tmpl.xml rename to p4a/pythonforandroidold/bootstraps/lbry/build/templates/colors.tmpl.xml diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/templates/custom_rules.tmpl.xml b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/custom_rules.tmpl.xml new file mode 100644 index 0000000..a6a7eba --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/custom_rules.tmpl.xml @@ -0,0 +1,21 @@ + + + + + {% if args.launcher %} + + {% else %} + + + + + {% endif %} + {% for dir, includes in args.extra_source_dirs %} + + {% endfor %} + + + + + + diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/templates/google-services.json.secret b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/google-services.json.secret similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/templates/google-services.json.secret rename to p4a/pythonforandroidold/bootstraps/lbry/build/templates/google-services.json.secret diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/templates/gradle.properties b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/gradle.properties similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/templates/gradle.properties rename to p4a/pythonforandroidold/bootstraps/lbry/build/templates/gradle.properties diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/templates/lbry-icon.png b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/lbry-icon.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/templates/lbry-icon.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/templates/lbry-icon.png diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/templates/res/drawable-hdpi/ic_file_download_black_24dp.png b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/res/drawable-hdpi/ic_file_download_black_24dp.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/templates/res/drawable-hdpi/ic_file_download_black_24dp.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/templates/res/drawable-hdpi/ic_file_download_black_24dp.png diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/templates/res/drawable-hdpi/ic_launcher.png b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/res/drawable-hdpi/ic_launcher.png new file mode 100644 index 0000000..d50bdaa Binary files /dev/null and b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/res/drawable-hdpi/ic_launcher.png differ diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/templates/res/drawable-mdpi/ic_file_download_black_24dp.png b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/res/drawable-mdpi/ic_file_download_black_24dp.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/templates/res/drawable-mdpi/ic_file_download_black_24dp.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/templates/res/drawable-mdpi/ic_file_download_black_24dp.png diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/templates/res/drawable-mdpi/ic_launcher.png b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/res/drawable-mdpi/ic_launcher.png new file mode 100644 index 0000000..0a299eb Binary files /dev/null and b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/res/drawable-mdpi/ic_launcher.png differ diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/templates/res/drawable-xhdpi/ic_file_download_black_24dp.png b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/res/drawable-xhdpi/ic_file_download_black_24dp.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/templates/res/drawable-xhdpi/ic_file_download_black_24dp.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/templates/res/drawable-xhdpi/ic_file_download_black_24dp.png diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/templates/res/drawable-xhdpi/ic_launcher.png b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/res/drawable-xhdpi/ic_launcher.png new file mode 100644 index 0000000..a336ad5 Binary files /dev/null and b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/res/drawable-xhdpi/ic_launcher.png differ diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/templates/res/drawable-xxhdpi/ic_file_download_black_24dp.png b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/res/drawable-xxhdpi/ic_file_download_black_24dp.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/templates/res/drawable-xxhdpi/ic_file_download_black_24dp.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/templates/res/drawable-xxhdpi/ic_file_download_black_24dp.png diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/templates/res/drawable-xxhdpi/ic_launcher.png b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/res/drawable-xxhdpi/ic_launcher.png new file mode 100644 index 0000000..d423dac Binary files /dev/null and b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/res/drawable-xxhdpi/ic_launcher.png differ diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/templates/res/drawable-xxxhdpi/ic_file_download_black_24dp.png b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/res/drawable-xxxhdpi/ic_file_download_black_24dp.png similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/templates/res/drawable-xxxhdpi/ic_file_download_black_24dp.png rename to p4a/pythonforandroidold/bootstraps/lbry/build/templates/res/drawable-xxxhdpi/ic_file_download_black_24dp.png diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/templates/strings.tmpl.xml b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/strings.tmpl.xml similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/templates/strings.tmpl.xml rename to p4a/pythonforandroidold/bootstraps/lbry/build/templates/strings.tmpl.xml diff --git a/p4a/pythonforandroid/bootstraps/lbry/build/templates/themes.tmpl.xml b/p4a/pythonforandroidold/bootstraps/lbry/build/templates/themes.tmpl.xml similarity index 100% rename from p4a/pythonforandroid/bootstraps/lbry/build/templates/themes.tmpl.xml rename to p4a/pythonforandroidold/bootstraps/lbry/build/templates/themes.tmpl.xml diff --git a/p4a/pythonforandroidold/bootstraps/lbry/build/whitelist.txt b/p4a/pythonforandroidold/bootstraps/lbry/build/whitelist.txt new file mode 100644 index 0000000..41b06ee --- /dev/null +++ b/p4a/pythonforandroidold/bootstraps/lbry/build/whitelist.txt @@ -0,0 +1 @@ +# put files here that you need to un-blacklist diff --git a/p4a/pythonforandroidold/build.py b/p4a/pythonforandroidold/build.py new file mode 100644 index 0000000..374929d --- /dev/null +++ b/p4a/pythonforandroidold/build.py @@ -0,0 +1,900 @@ +from __future__ import print_function + +from os.path import (join, realpath, dirname, expanduser, exists, + split, isdir) +from os import environ +import copy +import os +import glob +import sys +import re +import sh +import subprocess + +from pythonforandroid.util import ( + current_directory, ensure_dir, get_virtualenv_executable, + BuildInterruptingException +) +from pythonforandroid.logger import (info, warning, info_notify, info_main, shprint) +from pythonforandroid.archs import ArchARM, ArchARMv7_a, ArchAarch_64, Archx86, Archx86_64 +from pythonforandroid.recipe import CythonRecipe, Recipe +from pythonforandroid.recommendations import ( + check_ndk_version, check_target_api, check_ndk_api, + RECOMMENDED_NDK_API, RECOMMENDED_TARGET_API) + + +class Context(object): + '''A build context. If anything will be built, an instance this class + will be instantiated and used to hold all the build state.''' + + env = environ.copy() + # the filepath of toolchain.py + root_dir = None + # the root dir where builds and dists will be stored + storage_dir = None + + # in which bootstraps are copied for building + # and recipes are built + build_dir = None + # the Android project folder where everything ends up + dist_dir = None + # where Android libs are cached after build + # but before being placed in dists + libs_dir = None + aars_dir = None + + ccache = None # whether to use ccache + cython = None # the cython interpreter name + + ndk_platform = None # the ndk platform directory + + dist_name = None # should be deprecated in favour of self.dist.dist_name + bootstrap = None + bootstrap_build_dir = None + + recipe_build_order = None # Will hold the list of all built recipes + + symlink_java_src = False # If True, will symlink instead of copying during build + + java_build_tool = 'auto' + + @property + def packages_path(self): + '''Where packages are downloaded before being unpacked''' + return join(self.storage_dir, 'packages') + + @property + def templates_dir(self): + return join(self.root_dir, 'templates') + + @property + def libs_dir(self): + # Was previously hardcoded as self.build_dir/libs + dir = join(self.build_dir, 'libs_collections', + self.bootstrap.distribution.name) + ensure_dir(dir) + return dir + + @property + def javaclass_dir(self): + # Was previously hardcoded as self.build_dir/java + dir = join(self.build_dir, 'javaclasses', + self.bootstrap.distribution.name) + ensure_dir(dir) + return dir + + @property + def aars_dir(self): + dir = join(self.build_dir, 'aars', self.bootstrap.distribution.name) + ensure_dir(dir) + return dir + + @property + def python_installs_dir(self): + dir = join(self.build_dir, 'python-installs') + ensure_dir(dir) + return dir + + def get_python_install_dir(self): + dir = join(self.python_installs_dir, self.bootstrap.distribution.name) + return dir + + def setup_dirs(self, storage_dir): + '''Calculates all the storage and build dirs, and makes sure + the directories exist where necessary.''' + self.storage_dir = expanduser(storage_dir) + if ' ' in self.storage_dir: + raise ValueError('storage dir path cannot contain spaces, please ' + 'specify a path with --storage-dir') + self.build_dir = join(self.storage_dir, 'build') + self.dist_dir = join(self.storage_dir, 'dists') + + def ensure_dirs(self): + ensure_dir(self.storage_dir) + ensure_dir(self.build_dir) + ensure_dir(self.dist_dir) + ensure_dir(join(self.build_dir, 'bootstrap_builds')) + ensure_dir(join(self.build_dir, 'other_builds')) + + @property + def android_api(self): + '''The Android API being targeted.''' + if self._android_api is None: + raise ValueError('Tried to access android_api but it has not ' + 'been set - this should not happen, something ' + 'went wrong!') + return self._android_api + + @android_api.setter + def android_api(self, value): + self._android_api = value + + @property + def ndk_api(self): + '''The API number compile against''' + if self._ndk_api is None: + raise ValueError('Tried to access ndk_api but it has not ' + 'been set - this should not happen, something ' + 'went wrong!') + return self._ndk_api + + @ndk_api.setter + def ndk_api(self, value): + self._ndk_api = value + + @property + def sdk_dir(self): + '''The path to the Android SDK.''' + if self._sdk_dir is None: + raise ValueError('Tried to access sdk_dir but it has not ' + 'been set - this should not happen, something ' + 'went wrong!') + return self._sdk_dir + + @sdk_dir.setter + def sdk_dir(self, value): + self._sdk_dir = value + + @property + def ndk_dir(self): + '''The path to the Android NDK.''' + if self._ndk_dir is None: + raise ValueError('Tried to access ndk_dir but it has not ' + 'been set - this should not happen, something ' + 'went wrong!') + return self._ndk_dir + + @ndk_dir.setter + def ndk_dir(self, value): + self._ndk_dir = value + + def prepare_build_environment(self, + user_sdk_dir, + user_ndk_dir, + user_android_api, + user_ndk_api): + '''Checks that build dependencies exist and sets internal variables + for the Android SDK etc. + + ..warning:: This *must* be called before trying any build stuff + + ''' + + self.ensure_dirs() + + if self._build_env_prepared: + return + + ok = True + + # Work out where the Android SDK is + sdk_dir = None + if user_sdk_dir: + sdk_dir = user_sdk_dir + # This is the old P4A-specific var + if sdk_dir is None: + sdk_dir = environ.get('ANDROIDSDK', None) + # This seems used more conventionally + if sdk_dir is None: + sdk_dir = environ.get('ANDROID_HOME', None) + # Checks in the buildozer SDK dir, useful for debug tests of p4a + if sdk_dir is None: + possible_dirs = glob.glob(expanduser(join( + '~', '.buildozer', 'android', 'platform', 'android-sdk-*'))) + possible_dirs = [d for d in possible_dirs if not + (d.endswith('.bz2') or d.endswith('.gz'))] + if possible_dirs: + info('Found possible SDK dirs in buildozer dir: {}'.format( + ', '.join([d.split(os.sep)[-1] for d in possible_dirs]))) + info('Will attempt to use SDK at {}'.format(possible_dirs[0])) + warning('This SDK lookup is intended for debug only, if you ' + 'use python-for-android much you should probably ' + 'maintain your own SDK download.') + sdk_dir = possible_dirs[0] + if sdk_dir is None: + raise BuildInterruptingException('Android SDK dir was not specified, exiting.') + self.sdk_dir = realpath(sdk_dir) + + # Check what Android API we're using + android_api = None + if user_android_api: + android_api = user_android_api + info('Getting Android API version from user argument: {}'.format(android_api)) + elif 'ANDROIDAPI' in environ: + android_api = environ['ANDROIDAPI'] + info('Found Android API target in $ANDROIDAPI: {}'.format(android_api)) + else: + info('Android API target was not set manually, using ' + 'the default of {}'.format(RECOMMENDED_TARGET_API)) + android_api = RECOMMENDED_TARGET_API + android_api = int(android_api) + self.android_api = android_api + + check_target_api(android_api, self.archs[0].arch) + + if exists(join(sdk_dir, 'tools', 'bin', 'avdmanager')): + avdmanager = sh.Command(join(sdk_dir, 'tools', 'bin', 'avdmanager')) + targets = avdmanager('list', 'target').stdout.decode('utf-8').split('\n') + elif exists(join(sdk_dir, 'tools', 'android')): + android = sh.Command(join(sdk_dir, 'tools', 'android')) + targets = android('list').stdout.decode('utf-8').split('\n') + else: + raise BuildInterruptingException( + 'Could not find `android` or `sdkmanager` binaries in Android SDK', + instructions='Make sure the path to the Android SDK is correct') + apis = [s for s in targets if re.match(r'^ *API level: ', s)] + apis = [re.findall(r'[0-9]+', s) for s in apis] + apis = [int(s[0]) for s in apis if s] + info('Available Android APIs are ({})'.format( + ', '.join(map(str, apis)))) + if android_api in apis: + info(('Requested API target {} is available, ' + 'continuing.').format(android_api)) + else: + raise BuildInterruptingException( + ('Requested API target {} is not available, install ' + 'it with the SDK android tool.').format(android_api)) + + # Find the Android NDK + # Could also use ANDROID_NDK, but doesn't look like many tools use this + ndk_dir = None + if user_ndk_dir: + ndk_dir = user_ndk_dir + info('Getting NDK dir from from user argument') + if ndk_dir is None: # The old P4A-specific dir + ndk_dir = environ.get('ANDROIDNDK', None) + if ndk_dir is not None: + info('Found NDK dir in $ANDROIDNDK: {}'.format(ndk_dir)) + if ndk_dir is None: # Apparently the most common convention + ndk_dir = environ.get('NDK_HOME', None) + if ndk_dir is not None: + info('Found NDK dir in $NDK_HOME: {}'.format(ndk_dir)) + if ndk_dir is None: # Another convention (with maven?) + ndk_dir = environ.get('ANDROID_NDK_HOME', None) + if ndk_dir is not None: + info('Found NDK dir in $ANDROID_NDK_HOME: {}'.format(ndk_dir)) + if ndk_dir is None: # Checks in the buildozer NDK dir, useful + # # for debug tests of p4a + possible_dirs = glob.glob(expanduser(join( + '~', '.buildozer', 'android', 'platform', 'android-ndk-r*'))) + if possible_dirs: + info('Found possible NDK dirs in buildozer dir: {}'.format( + ', '.join([d.split(os.sep)[-1] for d in possible_dirs]))) + info('Will attempt to use NDK at {}'.format(possible_dirs[0])) + warning('This NDK lookup is intended for debug only, if you ' + 'use python-for-android much you should probably ' + 'maintain your own NDK download.') + ndk_dir = possible_dirs[0] + if ndk_dir is None: + raise BuildInterruptingException('Android NDK dir was not specified') + self.ndk_dir = realpath(ndk_dir) + + check_ndk_version(ndk_dir) + + self.ndk = 'crystax' # force crystax detection + + ndk_api = None + if user_ndk_api: + ndk_api = user_ndk_api + info('Getting NDK API version (i.e. minimum supported API) from user argument') + elif 'NDKAPI' in environ: + ndk_api = environ.get('NDKAPI', None) + info('Found Android API target in $NDKAPI') + else: + ndk_api = min(self.android_api, RECOMMENDED_NDK_API) + warning('NDK API target was not set manually, using ' + 'the default of {} = min(android-api={}, default ndk-api={})'.format( + ndk_api, self.android_api, RECOMMENDED_NDK_API)) + ndk_api = int(ndk_api) + self.ndk_api = ndk_api + + check_ndk_api(ndk_api, self.android_api) + + virtualenv = get_virtualenv_executable() + if virtualenv is None: + raise IOError('Couldn\'t find a virtualenv executable, ' + 'you must install this to use p4a.') + self.virtualenv = virtualenv + info('Found virtualenv at {}'.format(virtualenv)) + + # path to some tools + self.ccache = sh.which("ccache") + if not self.ccache: + info('ccache is missing, the build will not be optimized in the ' + 'future.') + for cython_fn in ("cython", "cython3", "cython2", "cython-2.7"): + cython = sh.which(cython_fn) + if cython: + self.cython = cython + break + else: + raise BuildInterruptingException('No cython binary found.') + if not self.cython: + ok = False + warning("Missing requirement: cython is not installed") + + # This would need to be changed if supporting multiarch APKs + arch = self.archs[0] + platform_dir = arch.platform_dir + toolchain_prefix = arch.toolchain_prefix + toolchain_version = None + self.ndk_platform = join( + self.ndk_dir, + 'platforms', + 'android-{}'.format(self.ndk_api), + platform_dir) + if not exists(self.ndk_platform): + warning('ndk_platform doesn\'t exist: {}'.format( + self.ndk_platform)) + ok = False + + py_platform = sys.platform + if py_platform in ['linux2', 'linux3']: + py_platform = 'linux' + + toolchain_versions = [] + toolchain_path = join(self.ndk_dir, 'toolchains') + if isdir(toolchain_path): + toolchain_contents = glob.glob('{}/{}-*'.format(toolchain_path, + toolchain_prefix)) + toolchain_versions = [split(path)[-1][len(toolchain_prefix) + 1:] + for path in toolchain_contents] + else: + warning('Could not find toolchain subdirectory!') + ok = False + toolchain_versions.sort() + + toolchain_versions_gcc = [] + for toolchain_version in toolchain_versions: + if toolchain_version[0].isdigit(): + # GCC toolchains begin with a number + toolchain_versions_gcc.append(toolchain_version) + + if toolchain_versions: + info('Found the following toolchain versions: {}'.format( + toolchain_versions)) + info('Picking the latest gcc toolchain, here {}'.format( + toolchain_versions_gcc[-1])) + toolchain_version = toolchain_versions_gcc[-1] + else: + warning('Could not find any toolchain for {}!'.format( + toolchain_prefix)) + ok = False + + self.toolchain_prefix = toolchain_prefix + self.toolchain_version = toolchain_version + # Modify the path so that sh finds modules appropriately + environ['PATH'] = ( + '{ndk_dir}/toolchains/{toolchain_prefix}-{toolchain_version}/' + 'prebuilt/{py_platform}-x86/bin/:{ndk_dir}/toolchains/' + '{toolchain_prefix}-{toolchain_version}/prebuilt/' + '{py_platform}-x86_64/bin/:{ndk_dir}:{sdk_dir}/' + 'tools:{path}').format( + sdk_dir=self.sdk_dir, ndk_dir=self.ndk_dir, + toolchain_prefix=toolchain_prefix, + toolchain_version=toolchain_version, + py_platform=py_platform, path=environ.get('PATH')) + + for executable in ("pkg-config", "autoconf", "automake", "libtoolize", + "tar", "bzip2", "unzip", "make", "gcc", "g++"): + if not sh.which(executable): + warning("Missing executable: {} is not installed".format( + executable)) + + if not ok: + raise BuildInterruptingException( + 'python-for-android cannot continue due to the missing executables above') + + def __init__(self): + super(Context, self).__init__() + self.include_dirs = [] + + self._build_env_prepared = False + + self._sdk_dir = None + self._ndk_dir = None + self._android_api = None + self._ndk_api = None + self.ndk = None + + self.toolchain_prefix = None + self.toolchain_version = None + + self.local_recipes = None + self.copy_libs = False + + # this list should contain all Archs, it is pruned later + self.archs = ( + ArchARM(self), + ArchARMv7_a(self), + Archx86(self), + Archx86_64(self), + ArchAarch_64(self), + ) + + self.root_dir = realpath(dirname(__file__)) + + # remove the most obvious flags that can break the compilation + self.env.pop("LDFLAGS", None) + self.env.pop("ARCHFLAGS", None) + self.env.pop("CFLAGS", None) + + self.python_recipe = None # Set by TargetPythonRecipe + + def set_archs(self, arch_names): + all_archs = self.archs + new_archs = set() + for name in arch_names: + matching = [arch for arch in all_archs if arch.arch == name] + for match in matching: + new_archs.add(match) + self.archs = list(new_archs) + if not self.archs: + raise BuildInterruptingException('Asked to compile for no Archs, so failing.') + info('Will compile for the following archs: {}'.format( + ', '.join([arch.arch for arch in self.archs]))) + + def prepare_bootstrap(self, bs): + bs.ctx = self + self.bootstrap = bs + self.bootstrap.prepare_build_dir() + self.bootstrap_build_dir = self.bootstrap.build_dir + + def prepare_dist(self, name): + self.dist_name = name + self.bootstrap.prepare_dist_dir(self.dist_name) + + def get_site_packages_dir(self, arch=None): + '''Returns the location of site-packages in the python-install build + dir. + ''' + if self.python_recipe.name == 'python2legacy': + return join(self.get_python_install_dir(), + 'lib', 'python2.7', 'site-packages') + return self.get_python_install_dir() + + def get_libs_dir(self, arch): + '''The libs dir for a given arch.''' + ensure_dir(join(self.libs_dir, arch)) + return join(self.libs_dir, arch) + + def has_lib(self, arch, lib): + return exists(join(self.get_libs_dir(arch), lib)) + + def has_package(self, name, arch=None): + # If this is a file path, it'll need special handling: + if (name.find("/") >= 0 or name.find("\\") >= 0) and \ + name.find("://") < 0: # (:// would indicate an url) + if not os.path.exists(name): + # Non-existing dir, cannot look this up. + return False + if os.path.exists(os.path.join(name, "setup.py")): + # Get name from setup.py: + name = subprocess.check_output([ + sys.executable, "setup.py", "--name"], + cwd=name) + try: + name = name.decode('utf-8', 'replace') + except AttributeError: + pass + name = name.strip() + if len(name) == 0: + # Failed to look up any meaningful name. + return False + else: + # A folder with whatever, cannot look this up. + return False + + # Try to look up recipe by name: + try: + recipe = Recipe.get_recipe(name, self) + except ValueError: + pass + else: + name = getattr(recipe, 'site_packages_name', None) or name + name = name.replace('.', '/') + site_packages_dir = self.get_site_packages_dir(arch) + return (exists(join(site_packages_dir, name)) or + exists(join(site_packages_dir, name + '.py')) or + exists(join(site_packages_dir, name + '.pyc')) or + exists(join(site_packages_dir, name + '.pyo')) or + exists(join(site_packages_dir, name + '.so')) or + glob.glob(join(site_packages_dir, name + '-*.egg'))) + + def not_has_package(self, name, arch=None): + return not self.has_package(name, arch) + + +def build_recipes(build_order, python_modules, ctx): + # Put recipes in correct build order + info_notify("Recipe build order is {}".format(build_order)) + if python_modules: + python_modules = sorted(set(python_modules)) + info_notify( + ('The requirements ({}) were not found as recipes, they will be ' + 'installed with pip.').format(', '.join(python_modules))) + + recipes = [Recipe.get_recipe(name, ctx) for name in build_order] + + # download is arch independent + info_main('# Downloading recipes ') + for recipe in recipes: + recipe.download_if_necessary() + + for arch in ctx.archs: + info_main('# Building all recipes for arch {}'.format(arch.arch)) + + info_main('# Unpacking recipes') + for recipe in recipes: + ensure_dir(recipe.get_build_container_dir(arch.arch)) + recipe.prepare_build_dir(arch.arch) + + info_main('# Prebuilding recipes') + # 2) prebuild packages + for recipe in recipes: + info_main('Prebuilding {} for {}'.format(recipe.name, arch.arch)) + recipe.prebuild_arch(arch) + recipe.apply_patches(arch) + + # 3) build packages + info_main('# Building recipes') + for recipe in recipes: + info_main('Building {} for {}'.format(recipe.name, arch.arch)) + if recipe.should_build(arch): + recipe.build_arch(arch) + else: + info('{} said it is already built, skipping' + .format(recipe.name)) + + # 4) biglink everything + info_main('# Biglinking object files') + if not ctx.python_recipe or not ctx.python_recipe.from_crystax: + biglink(ctx, arch) + else: + info('NDK is crystax, skipping biglink (will this work?)') + + # 5) postbuild packages + info_main('# Postbuilding recipes') + for recipe in recipes: + info_main('Postbuilding {} for {}'.format(recipe.name, arch.arch)) + recipe.postbuild_arch(arch) + + info_main('# Installing pure Python modules') + run_pymodules_install(ctx, python_modules) + + return + + +def run_pymodules_install(ctx, modules): + modules = list(filter(ctx.not_has_package, modules)) + + if not modules: + info('There are no Python modules to install, skipping') + return + + info('The requirements ({}) don\'t have recipes, attempting to install ' + 'them with pip'.format(', '.join(modules))) + info('If this fails, it may mean that the module has compiled ' + 'components and needs a recipe.') + + venv = sh.Command(ctx.virtualenv) + with current_directory(join(ctx.build_dir)): + shprint(venv, + '--python=python{}.{}'.format( + ctx.python_recipe.major_minor_version_string.partition(".")[0], + ctx.python_recipe.major_minor_version_string.partition(".")[2] + ), + 'venv' + ) + + info('Creating a requirements.txt file for the Python modules') + with open('requirements.txt', 'w') as fileh: + for module in modules: + key = 'VERSION_' + module + if key in environ: + line = '{}=={}\n'.format(module, environ[key]) + else: + line = '{}\n'.format(module) + fileh.write(line) + + # Prepare base environment and upgrade pip: + base_env = copy.copy(os.environ) + base_env["PYTHONPATH"] = ctx.get_site_packages_dir() + info('Upgrade pip to latest version') + shprint(sh.bash, '-c', ( + "source venv/bin/activate && pip install -U pip" + ), _env=copy.copy(base_env)) + + # Install Cython in case modules need it to build: + info('Install Cython in case one of the modules needs it to build') + shprint(sh.bash, '-c', ( + "venv/bin/pip install Cython" + ), _env=copy.copy(base_env)) + + # Get environment variables for build (with CC/compiler set): + standard_recipe = CythonRecipe() + standard_recipe.ctx = ctx + # (note: following line enables explicit -lpython... linker options) + standard_recipe.call_hostpython_via_targetpython = False + recipe_env = standard_recipe.get_recipe_env(ctx.archs[0]) + env = copy.copy(base_env) + env.update(recipe_env) + + info('Installing Python modules with pip') + info('IF THIS FAILS, THE MODULES MAY NEED A RECIPE. ' + 'A reason for this is often modules compiling ' + 'native code that is unaware of Android cross-compilation ' + 'and does not work without additional ' + 'changes / workarounds.') + + # Make sure our build package dir is available, and the virtualenv + # site packages come FIRST (so the proper pip version is used): + env["PYTHONPATH"] += ":" + ctx.get_site_packages_dir() + env["PYTHONPATH"] = os.path.abspath(join( + ctx.build_dir, "venv", "lib", + "python" + ctx.python_recipe.major_minor_version_string, + "site-packages")) + ":" + env["PYTHONPATH"] + + ''' + # Do actual install: + shprint(sh.bash, '-c', ( + "venv/bin/pip " + + "install -v --target '{0}' --no-deps -r requirements.txt" + ).format(ctx.get_site_packages_dir().replace("'", "'\"'\"'")), + _env=copy.copy(env)) + ''' + + # use old install script + shprint(sh.bash, '-c', ( + "source venv/bin/activate && env CC=/bin/false CXX=/bin/false " + "PYTHONPATH={0} pip install --target '{0}' --no-deps -r requirements.txt" + ).format(ctx.get_site_packages_dir())) + + # Strip object files after potential Cython or native code builds: + standard_recipe.strip_object_files(ctx.archs[0], env, + build_dir=ctx.build_dir) + + +def biglink(ctx, arch): + # First, collate object files from each recipe + info('Collating object files from each recipe') + obj_dir = join(ctx.bootstrap.build_dir, 'collated_objects') + ensure_dir(obj_dir) + recipes = [Recipe.get_recipe(name, ctx) for name in ctx.recipe_build_order] + for recipe in recipes: + recipe_obj_dir = join(recipe.get_build_container_dir(arch.arch), + 'objects_{}'.format(recipe.name)) + if not exists(recipe_obj_dir): + info('{} recipe has no biglinkable files dir, skipping' + .format(recipe.name)) + continue + files = glob.glob(join(recipe_obj_dir, '*')) + if not len(files): + info('{} recipe has no biglinkable files, skipping' + .format(recipe.name)) + continue + info('{} recipe has object files, copying'.format(recipe.name)) + files.append(obj_dir) + shprint(sh.cp, '-r', *files) + + env = arch.get_env() + env['LDFLAGS'] = env['LDFLAGS'] + ' -L{}'.format( + join(ctx.bootstrap.build_dir, 'obj', 'local', arch.arch)) + + if not len(glob.glob(join(obj_dir, '*'))): + info('There seem to be no libraries to biglink, skipping.') + return + info('Biglinking') + info('target {}'.format(join(ctx.get_libs_dir(arch.arch), + 'libpymodules.so'))) + do_biglink = copylibs_function if ctx.copy_libs else biglink_function + + # Move to the directory containing crtstart_so.o and crtend_so.o + # This is necessary with newer NDKs? A gcc bug? + with current_directory(join(ctx.ndk_platform, 'usr', 'lib')): + do_biglink( + join(ctx.get_libs_dir(arch.arch), 'libpymodules.so'), + obj_dir.split(' '), + extra_link_dirs=[join(ctx.bootstrap.build_dir, + 'obj', 'local', arch.arch), + os.path.abspath('.')], + env=env) + + +def biglink_function(soname, objs_paths, extra_link_dirs=[], env=None): + print('objs_paths are', objs_paths) + sofiles = [] + + for directory in objs_paths: + for fn in os.listdir(directory): + fn = os.path.join(directory, fn) + + if not fn.endswith(".so.o"): + continue + if not os.path.exists(fn[:-2] + ".libs"): + continue + + sofiles.append(fn[:-2]) + + # The raw argument list. + args = [] + + for fn in sofiles: + afn = fn + ".o" + libsfn = fn + ".libs" + + args.append(afn) + with open(libsfn) as fd: + data = fd.read() + args.extend(data.split(" ")) + + unique_args = [] + while args: + a = args.pop() + if a in ('-L', ): + continue + if a not in unique_args: + unique_args.insert(0, a) + + for dir in extra_link_dirs: + link = '-L{}'.format(dir) + if link not in unique_args: + unique_args.append(link) + + cc_name = env['CC'] + cc = sh.Command(cc_name.split()[0]) + cc = cc.bake(*cc_name.split()[1:]) + + shprint(cc, '-shared', '-O3', '-o', soname, *unique_args, _env=env) + + +def copylibs_function(soname, objs_paths, extra_link_dirs=[], env=None): + print('objs_paths are', objs_paths) + + re_needso = re.compile(r'^.*\(NEEDED\)\s+Shared library: \[lib(.*)\.so\]\s*$') + blacklist_libs = ( + 'c', + 'stdc++', + 'dl', + 'python2.7', + 'sdl', + 'sdl_image', + 'sdl_ttf', + 'z', + 'm', + 'GLESv2', + 'jpeg', + 'png', + 'log', + + # bootstrap takes care of sdl2 libs (if applicable) + 'SDL2', + 'SDL2_ttf', + 'SDL2_image', + 'SDL2_mixer', + ) + found_libs = [] + sofiles = [] + if env and 'READELF' in env: + readelf = env['READELF'] + elif 'READELF' in os.environ: + readelf = os.environ['READELF'] + else: + readelf = sh.which('readelf').strip() + readelf = sh.Command(readelf).bake('-d') + + dest = dirname(soname) + + for directory in objs_paths: + for fn in os.listdir(directory): + fn = join(directory, fn) + + if not fn.endswith('.libs'): + continue + + dirfn = fn[:-1] + 'dirs' + if not exists(dirfn): + continue + + with open(fn) as f: + libs = f.read().strip().split(' ') + needed_libs = [lib for lib in libs + if lib and + lib not in blacklist_libs and + lib not in found_libs] + + while needed_libs: + print('need libs:\n\t' + '\n\t'.join(needed_libs)) + + start_needed_libs = needed_libs[:] + found_sofiles = [] + + with open(dirfn) as f: + libdirs = f.read().split() + for libdir in libdirs: + if not needed_libs: + break + + if libdir == dest: + # don't need to copy from dest to dest! + continue + + libdir = libdir.strip() + print('scanning', libdir) + for lib in needed_libs[:]: + if lib in found_libs: + continue + + if lib.endswith('.a'): + needed_libs.remove(lib) + found_libs.append(lib) + continue + + lib_a = 'lib' + lib + '.a' + libpath_a = join(libdir, lib_a) + lib_so = 'lib' + lib + '.so' + libpath_so = join(libdir, lib_so) + plain_so = lib + '.so' + plainpath_so = join(libdir, plain_so) + + sopath = None + if exists(libpath_so): + sopath = libpath_so + elif exists(plainpath_so): + sopath = plainpath_so + + if sopath: + print('found', lib, 'in', libdir) + found_sofiles.append(sopath) + needed_libs.remove(lib) + found_libs.append(lib) + continue + + if exists(libpath_a): + print('found', lib, '(static) in', libdir) + needed_libs.remove(lib) + found_libs.append(lib) + continue + + for sofile in found_sofiles: + print('scanning dependencies for', sofile) + out = readelf(sofile) + for line in out.splitlines(): + needso = re_needso.match(line) + if needso: + lib = needso.group(1) + if (lib not in needed_libs + and lib not in found_libs + and lib not in blacklist_libs): + needed_libs.append(needso.group(1)) + + sofiles += found_sofiles + + if needed_libs == start_needed_libs: + raise RuntimeError( + 'Failed to locate needed libraries!\n\t' + + '\n\t'.join(needed_libs)) + + print('Copying libraries') + for lib in sofiles: + shprint(sh.cp, lib, dest) diff --git a/p4a/pythonforandroidold/distribution.py b/p4a/pythonforandroidold/distribution.py new file mode 100644 index 0000000..9fa7b4c --- /dev/null +++ b/p4a/pythonforandroidold/distribution.py @@ -0,0 +1,237 @@ +from os.path import exists, join +import glob +import json + +from pythonforandroid.logger import (info, info_notify, warning, Err_Style, Err_Fore) +from pythonforandroid.util import current_directory, BuildInterruptingException +from shutil import rmtree + + +class Distribution(object): + '''State container for information about a distribution (i.e. an + Android project). + + This is separate from a Bootstrap because the Bootstrap is + concerned with building and populating the dist directory, whereas + the dist itself could also come from e.g. a binary download. + ''' + ctx = None + + name = None # A name identifying the dist. May not be None. + needs_build = False # Whether the dist needs compiling + url = None + dist_dir = None # Where the dist dir ultimately is. Should not be None. + ndk_api = None + + archs = [] + '''The arch targets that the dist is built for.''' + + recipes = [] + + description = '' # A long description + + def __init__(self, ctx): + self.ctx = ctx + + def __str__(self): + return ''.format( + # self.name, ', '.join([recipe.name for recipe in self.recipes])) + self.name, ', '.join(self.recipes)) + + def __repr__(self): + return str(self) + + @classmethod + def get_distribution(cls, ctx, name=None, recipes=[], + ndk_api=None, + force_build=False, + extra_dist_dirs=[], + require_perfect_match=False, + allow_replace_dist=True): + '''Takes information about the distribution, and decides what kind of + distribution it will be. + + If parameters conflict (e.g. a dist with that name already + exists, but doesn't have the right set of recipes), + an error is thrown. + + Parameters + ---------- + name : str + The name of the distribution. If a dist with this name already ' + exists, it will be used. + recipes : list + The recipes that the distribution must contain. + force_download: bool + If True, only downloaded dists are considered. + force_build : bool + If True, the dist is forced to be built locally. + extra_dist_dirs : list + Any extra directories in which to search for dists. + require_perfect_match : bool + If True, will only match distributions with precisely the + correct set of recipes. + allow_replace_dist : bool + If True, will allow an existing dist with the specified + name but incompatible requirements to be overwritten by + a new one with the current requirements. + ''' + + existing_dists = Distribution.get_distributions(ctx) + + possible_dists = existing_dists + + name_match_dist = None + + # 0) Check if a dist with that name already exists + if name is not None and name: + possible_dists = [d for d in possible_dists if d.name == name] + if possible_dists: + name_match_dist = possible_dists[0] + + # 1) Check if any existing dists meet the requirements + _possible_dists = [] + for dist in possible_dists: + if ( + ndk_api is not None and dist.ndk_api != ndk_api + ) or dist.ndk_api is None: + continue + for recipe in recipes: + if recipe not in dist.recipes: + break + else: + _possible_dists.append(dist) + possible_dists = _possible_dists + + if possible_dists: + info('Of the existing distributions, the following meet ' + 'the given requirements:') + pretty_log_dists(possible_dists) + else: + info('No existing dists meet the given requirements!') + + # If any dist has perfect recipes and ndk API, return it + for dist in possible_dists: + if force_build: + continue + if ndk_api is not None and dist.ndk_api != ndk_api: + continue + if (set(dist.recipes) == set(recipes) or + (set(recipes).issubset(set(dist.recipes)) and + not require_perfect_match)): + info_notify('{} has compatible recipes, using this one' + .format(dist.name)) + return dist + + assert len(possible_dists) < 2 + + # If there was a name match but we didn't already choose it, + # then the existing dist is incompatible with the requested + # configuration and the build cannot continue + if name_match_dist is not None and not allow_replace_dist: + raise BuildInterruptingException( + 'Asked for dist with name {name} with recipes ({req_recipes}) and ' + 'NDK API {req_ndk_api}, but a dist ' + 'with this name already exists and has either incompatible recipes ' + '({dist_recipes}) or NDK API {dist_ndk_api}'.format( + name=name, + req_ndk_api=ndk_api, + dist_ndk_api=name_match_dist.ndk_api, + req_recipes=', '.join(recipes), + dist_recipes=', '.join(name_match_dist.recipes))) + + # If we got this far, we need to build a new dist + dist = Distribution(ctx) + dist.needs_build = True + + if not name: + filen = 'unnamed_dist_{}' + i = 1 + while exists(join(ctx.dist_dir, filen.format(i))): + i += 1 + name = filen.format(i) + + dist.name = name + dist.dist_dir = join(ctx.dist_dir, dist.name) + dist.recipes = recipes + dist.ndk_api = ctx.ndk_api + + return dist + + def folder_exists(self): + return exists(self.dist_dir) + + def delete(self): + rmtree(self.dist_dir) + + @classmethod + def get_distributions(cls, ctx, extra_dist_dirs=[]): + '''Returns all the distributions found locally.''' + if extra_dist_dirs: + raise BuildInterruptingException( + 'extra_dist_dirs argument to get_distributions ' + 'is not yet implemented') + dist_dir = ctx.dist_dir + folders = glob.glob(join(dist_dir, '*')) + for dir in extra_dist_dirs: + folders.extend(glob.glob(join(dir, '*'))) + + dists = [] + for folder in folders: + if exists(join(folder, 'dist_info.json')): + with open(join(folder, 'dist_info.json')) as fileh: + dist_info = json.load(fileh) + dist = cls(ctx) + dist.name = folder.split('/')[-1] + dist.dist_dir = folder + dist.needs_build = False + dist.recipes = dist_info['recipes'] + if 'archs' in dist_info: + dist.archs = dist_info['archs'] + if 'ndk_api' in dist_info: + dist.ndk_api = dist_info['ndk_api'] + else: + dist.ndk_api = None + warning( + "Distribution {distname}: ({distdir}) has been " + "built with an unknown api target, ignoring it, " + "you might want to delete it".format( + distname=dist.name, + distdir=dist.dist_dir + ) + ) + dists.append(dist) + return dists + + def save_info(self, dirn): + ''' + Save information about the distribution in its dist_dir. + ''' + with current_directory(dirn): + info('Saving distribution info') + with open('dist_info.json', 'w') as fileh: + json.dump({'dist_name': self.ctx.dist_name, + 'bootstrap': self.ctx.bootstrap.name, + 'archs': [arch.arch for arch in self.ctx.archs], + 'ndk_api': self.ctx.ndk_api, + 'recipes': self.ctx.recipe_build_order + self.ctx.python_modules, + 'hostpython': self.ctx.hostpython, + 'python_version': self.ctx.python_recipe.major_minor_version_string}, + fileh) + + +def pretty_log_dists(dists, log_func=info): + infos = [] + for dist in dists: + ndk_api = 'unknown' if dist.ndk_api is None else dist.ndk_api + infos.append('{Fore.GREEN}{Style.BRIGHT}{name}{Style.RESET_ALL}: min API {ndk_api}, ' + 'includes recipes ({Fore.GREEN}{recipes}' + '{Style.RESET_ALL}), built for archs ({Fore.BLUE}' + '{archs}{Style.RESET_ALL})'.format( + ndk_api=ndk_api, + name=dist.name, recipes=', '.join(dist.recipes), + archs=', '.join(dist.archs) if dist.archs else 'UNKNOWN', + Fore=Err_Fore, Style=Err_Style)) + + for line in infos: + log_func('\t' + line) diff --git a/p4a/pythonforandroidold/graph.py b/p4a/pythonforandroidold/graph.py new file mode 100644 index 0000000..646a66e --- /dev/null +++ b/p4a/pythonforandroidold/graph.py @@ -0,0 +1,340 @@ +from copy import deepcopy +from itertools import product + +from pythonforandroid.logger import info +from pythonforandroid.recipe import Recipe +from pythonforandroid.bootstrap import Bootstrap +from pythonforandroid.util import BuildInterruptingException + + +def fix_deplist(deps): + """ Turn a dependency list into lowercase, and make sure all entries + that are just a string become a tuple of strings + """ + deps = [ + ((dep.lower(),) + if not isinstance(dep, (list, tuple)) + else tuple([dep_entry.lower() + for dep_entry in dep + ])) + for dep in deps + ] + return deps + + +class RecipeOrder(dict): + def __init__(self, ctx): + self.ctx = ctx + + def conflicts(self): + for name in self.keys(): + try: + recipe = Recipe.get_recipe(name, self.ctx) + conflicts = [dep.lower() for dep in recipe.conflicts] + except ValueError: + conflicts = [] + + if any([c in self for c in conflicts]): + return True + return False + + +def get_dependency_tuple_list_for_recipe(recipe, blacklist=None): + """ Get the dependencies of a recipe with filtered out blacklist, and + turned into tuples with fix_deplist() + """ + if blacklist is None: + blacklist = set() + assert(type(blacklist) == set) + if recipe.depends is None: + dependencies = [] + else: + # Turn all dependencies into tuples so that product will work + dependencies = fix_deplist(recipe.depends) + + # Filter out blacklisted items and turn lowercase: + dependencies = [ + tuple(set(deptuple) - blacklist) + for deptuple in dependencies + if tuple(set(deptuple) - blacklist) + ] + return dependencies + + +def recursively_collect_orders( + name, ctx, all_inputs, orders=None, blacklist=None + ): + '''For each possible recipe ordering, try to add the new recipe name + to that order. Recursively do the same thing with all the + dependencies of each recipe. + + ''' + name = name.lower() + if orders is None: + orders = [] + if blacklist is None: + blacklist = set() + try: + recipe = Recipe.get_recipe(name, ctx) + dependencies = get_dependency_tuple_list_for_recipe( + recipe, blacklist=blacklist + ) + + # handle opt_depends: these impose requirements on the build + # order only if already present in the list of recipes to build + dependencies.extend(fix_deplist( + [[d] for d in recipe.get_opt_depends_in_list(all_inputs) + if d.lower() not in blacklist] + )) + + if recipe.conflicts is None: + conflicts = [] + else: + conflicts = [dep.lower() for dep in recipe.conflicts] + except ValueError: + # The recipe does not exist, so we assume it can be installed + # via pip with no extra dependencies + dependencies = [] + conflicts = [] + + new_orders = [] + # for each existing recipe order, see if we can add the new recipe name + for order in orders: + if name in order: + new_orders.append(deepcopy(order)) + continue + if order.conflicts(): + continue + if any([conflict in order for conflict in conflicts]): + continue + + for dependency_set in product(*dependencies): + new_order = deepcopy(order) + new_order[name] = set(dependency_set) + + dependency_new_orders = [new_order] + for dependency in dependency_set: + dependency_new_orders = recursively_collect_orders( + dependency, ctx, all_inputs, dependency_new_orders, + blacklist=blacklist + ) + + new_orders.extend(dependency_new_orders) + + return new_orders + + +def find_order(graph): + ''' + Do a topological sort on the dependency graph dict. + ''' + while graph: + # Find all items without a parent + leftmost = [l for l, s in graph.items() if not s] + if not leftmost: + raise ValueError('Dependency cycle detected! %s' % graph) + # If there is more than one, sort them for predictable order + leftmost.sort() + for result in leftmost: + # Yield and remove them from the graph + yield result + graph.pop(result) + for bset in graph.values(): + bset.discard(result) + + +def obvious_conflict_checker(ctx, name_tuples, blacklist=None): + """ This is a pre-flight check function that will completely ignore + recipe order or choosing an actual value in any of the multiple + choice tuples/dependencies, and just do a very basic obvious + conflict check. + """ + deps_were_added_by = dict() + deps = set() + if blacklist is None: + blacklist = set() + + # Add dependencies for all recipes: + to_be_added = [(name_tuple, None) for name_tuple in name_tuples] + while len(to_be_added) > 0: + current_to_be_added = list(to_be_added) + to_be_added = [] + for (added_tuple, adding_recipe) in current_to_be_added: + assert(type(added_tuple) == tuple) + if len(added_tuple) > 1: + # No obvious commitment in what to add, don't check it itself + # but throw it into deps for later comparing against + # (Remember this function only catches obvious issues) + deps.add(added_tuple) + continue + + name = added_tuple[0] + recipe_conflicts = set() + recipe_dependencies = [] + try: + # Get recipe to add and who's ultimately adding it: + recipe = Recipe.get_recipe(name, ctx) + recipe_conflicts = {c.lower() for c in recipe.conflicts} + recipe_dependencies = get_dependency_tuple_list_for_recipe( + recipe, blacklist=blacklist + ) + except ValueError: + pass + adder_first_recipe_name = adding_recipe or name + + # Collect the conflicts: + triggered_conflicts = [] + for dep_tuple_list in deps: + # See if the new deps conflict with things added before: + if set(dep_tuple_list).intersection( + recipe_conflicts) == set(dep_tuple_list): + triggered_conflicts.append(dep_tuple_list) + continue + + # See if what was added before conflicts with the new deps: + if len(dep_tuple_list) > 1: + # Not an obvious commitment to a specific recipe/dep + # to be added, so we won't check. + # (remember this function only catches obvious issues) + continue + try: + dep_recipe = Recipe.get_recipe(dep_tuple_list[0], ctx) + except ValueError: + continue + conflicts = [c.lower() for c in dep_recipe.conflicts] + if name in conflicts: + triggered_conflicts.append(dep_tuple_list) + + # Throw error on conflict: + if triggered_conflicts: + # Get first conflict and see who added that one: + adder_second_recipe_name = "'||'".join(triggered_conflicts[0]) + second_recipe_original_adder = deps_were_added_by.get( + (adder_second_recipe_name,), None + ) + if second_recipe_original_adder: + adder_second_recipe_name = second_recipe_original_adder + + # Prompt error: + raise BuildInterruptingException( + "Conflict detected: '{}'" + " inducing dependencies {}, and '{}'" + " inducing conflicting dependencies {}".format( + adder_first_recipe_name, + (recipe.name,), + adder_second_recipe_name, + triggered_conflicts[0] + )) + + # Actually add it to our list: + deps.add(added_tuple) + deps_were_added_by[added_tuple] = adding_recipe + + # Schedule dependencies to be added + to_be_added += [ + (dep, adder_first_recipe_name or name) + for dep in recipe_dependencies + if dep not in deps + ] + # If we came here, then there were no obvious conflicts. + return None + + +def get_recipe_order_and_bootstrap(ctx, names, bs=None, blacklist=None): + # Get set of recipe/dependency names, clean up and add bootstrap deps: + names = set(names) + if bs is not None and bs.recipe_depends: + names = names.union(set(bs.recipe_depends)) + names = fix_deplist([ + ([name] if not isinstance(name, (list, tuple)) else name) + for name in names + ]) + if blacklist is None: + blacklist = set() + blacklist = {bitem.lower() for bitem in blacklist} + + # Remove all values that are in the blacklist: + names_before_blacklist = list(names) + names = [] + for name in names_before_blacklist: + cleaned_up_tuple = tuple([ + item for item in name if item not in blacklist + ]) + if cleaned_up_tuple: + names.append(cleaned_up_tuple) + + # Do check for obvious conflicts (that would trigger in any order, and + # without comitting to any specific choice in a multi-choice tuple of + # dependencies): + obvious_conflict_checker(ctx, names, blacklist=blacklist) + # If we get here, no obvious conflicts! + + # get all possible order graphs, as names may include tuples/lists + # of alternative dependencies + possible_orders = [] + for name_set in product(*names): + new_possible_orders = [RecipeOrder(ctx)] + for name in name_set: + new_possible_orders = recursively_collect_orders( + name, ctx, name_set, orders=new_possible_orders, + blacklist=blacklist + ) + possible_orders.extend(new_possible_orders) + + # turn each order graph into a linear list if possible + orders = [] + for possible_order in possible_orders: + try: + order = find_order(possible_order) + except ValueError: # a circular dependency was found + info('Circular dependency found in graph {}, skipping it.'.format( + possible_order)) + continue + orders.append(list(order)) + + # prefer python3 and SDL2 if available + orders.sort(key=lambda order: -('python3' in order) - ('sdl2' in order)) + + if not orders: + raise BuildInterruptingException( + 'Didn\'t find any valid dependency graphs. ' + 'This means that some of your ' + 'requirements pull in conflicting dependencies.') + + # It would be better to check against possible orders other + # than the first one, but in practice clashes will be rare, + # and can be resolved by specifying more parameters + chosen_order = orders[0] + if len(orders) > 1: + info('Found multiple valid dependency orders:') + for order in orders: + info(' {}'.format(order)) + info('Using the first of these: {}'.format(chosen_order)) + else: + info('Found a single valid recipe set: {}'.format(chosen_order)) + + if bs is None: + bs = Bootstrap.get_bootstrap_from_recipes(chosen_order, ctx) + if bs is None: + # Note: don't remove this without thought, causes infinite loop + raise BuildInterruptingException( + "Could not find any compatible bootstrap!" + ) + recipes, python_modules, bs = get_recipe_order_and_bootstrap( + ctx, chosen_order, bs=bs, blacklist=blacklist + ) + else: + # check if each requirement has a recipe + recipes = [] + python_modules = [] + for name in chosen_order: + try: + recipe = Recipe.get_recipe(name, ctx) + python_modules += recipe.python_depends + except ValueError: + python_modules.append(name) + else: + recipes.append(name) + + python_modules = list(set(python_modules)) + return recipes, python_modules, bs diff --git a/p4a/pythonforandroidold/includes/arm64-v8a/machine/cpu-features.h b/p4a/pythonforandroidold/includes/arm64-v8a/machine/cpu-features.h new file mode 100644 index 0000000..ca50906 --- /dev/null +++ b/p4a/pythonforandroidold/includes/arm64-v8a/machine/cpu-features.h @@ -0,0 +1,7 @@ +#ifndef _ARM64_CPU_FEATURES +#define _ARM64_CPU_FEATURES + +#define __ARM_ARCH__ 8 +#define __ARM_HAVE_HALFWORD_MULTIPLY 1 + +#endif // _ARM64_CPU_FEATURES diff --git a/p4a/pythonforandroidold/logger.py b/p4a/pythonforandroidold/logger.py new file mode 100644 index 0000000..b25b94c --- /dev/null +++ b/p4a/pythonforandroidold/logger.py @@ -0,0 +1,244 @@ +import logging +import os +import re +import sh +from sys import stdout, stderr +from math import log10 +from collections import defaultdict +from colorama import Style as Colo_Style, Fore as Colo_Fore +import six + +# This codecs change fixes a bug with log output, but crashes under python3 +if not six.PY3: + import codecs + stdout = codecs.getwriter('utf8')(stdout) + stderr = codecs.getwriter('utf8')(stderr) + +if six.PY2: + unistr = unicode # noqa F821 +else: + unistr = str + +# monkey patch to show full output +sh.ErrorReturnCode.truncate_cap = 999999 + + +class LevelDifferentiatingFormatter(logging.Formatter): + def format(self, record): + if record.levelno > 30: + record.msg = '{}{}[ERROR]{}{}: '.format( + Err_Style.BRIGHT, Err_Fore.RED, Err_Fore.RESET, + Err_Style.RESET_ALL) + record.msg + elif record.levelno > 20: + record.msg = '{}{}[WARNING]{}{}: '.format( + Err_Style.BRIGHT, Err_Fore.RED, Err_Fore.RESET, + Err_Style.RESET_ALL) + record.msg + elif record.levelno > 10: + record.msg = '{}[INFO]{}: '.format( + Err_Style.BRIGHT, Err_Style.RESET_ALL) + record.msg + else: + record.msg = '{}{}[DEBUG]{}{}: '.format( + Err_Style.BRIGHT, Err_Fore.LIGHTBLACK_EX, Err_Fore.RESET, + Err_Style.RESET_ALL) + record.msg + return super(LevelDifferentiatingFormatter, self).format(record) + + +logger = logging.getLogger('p4a') +# Necessary as importlib reloads this, +# which would add a second handler and reset the level +if not hasattr(logger, 'touched'): + logger.setLevel(logging.INFO) + logger.touched = True + ch = logging.StreamHandler(stderr) + formatter = LevelDifferentiatingFormatter('%(message)s') + ch.setFormatter(formatter) + logger.addHandler(ch) +info = logger.info +debug = logger.debug +warning = logger.warning +error = logger.error + + +class colorama_shim(object): + + def __init__(self, real): + self._dict = defaultdict(str) + self._real = real + self._enabled = False + + def __getattr__(self, key): + return getattr(self._real, key) if self._enabled else self._dict[key] + + def enable(self, enable): + self._enabled = enable + + +Out_Style = colorama_shim(Colo_Style) +Out_Fore = colorama_shim(Colo_Fore) +Err_Style = colorama_shim(Colo_Style) +Err_Fore = colorama_shim(Colo_Fore) + + +def setup_color(color): + enable_out = (False if color == 'never' else + True if color == 'always' else + stdout.isatty()) + Out_Style.enable(enable_out) + Out_Fore.enable(enable_out) + + enable_err = (False if color == 'never' else + True if color == 'always' else + stderr.isatty()) + Err_Style.enable(enable_err) + Err_Fore.enable(enable_err) + + +def info_main(*args): + logger.info(''.join([Err_Style.BRIGHT, Err_Fore.GREEN] + list(args) + + [Err_Style.RESET_ALL, Err_Fore.RESET])) + + +def info_notify(s): + info('{}{}{}{}'.format(Err_Style.BRIGHT, Err_Fore.LIGHTBLUE_EX, s, + Err_Style.RESET_ALL)) + + +def shorten_string(string, max_width): + ''' make limited length string in form: + "the string is very lo...(and 15 more)" + ''' + string_len = len(string) + if string_len <= max_width: + return string + visible = max_width - 16 - int(log10(string_len)) + # expected suffix len "...(and XXXXX more)" + if not isinstance(string, unistr): + visstring = unistr(string[:visible], errors='ignore') + else: + visstring = string[:visible] + return u''.join((visstring, u'...(and ', + unistr(string_len - visible), u' more)')) + + +def get_console_width(): + try: + cols = int(os.environ['COLUMNS']) + except (KeyError, ValueError): + pass + else: + if cols >= 25: + return cols + + try: + cols = max(25, int(os.popen('stty size', 'r').read().split()[1])) + except Exception: + pass + else: + return cols + + return 100 + + +def shprint(command, *args, **kwargs): + '''Runs the command (which should be an sh.Command instance), while + logging the output.''' + kwargs["_iter"] = True + kwargs["_out_bufsize"] = 1 + kwargs["_err_to_out"] = True + kwargs["_bg"] = True + is_critical = kwargs.pop('_critical', False) + tail_n = kwargs.pop('_tail', None) + full_debug = False + if "P4A_FULL_DEBUG" in os.environ: + tail_n = 0 + full_debug = True + filter_in = kwargs.pop('_filter', None) + filter_out = kwargs.pop('_filterout', None) + if len(logger.handlers) > 1: + logger.removeHandler(logger.handlers[1]) + columns = get_console_width() + command_path = str(command).split('/') + command_string = command_path[-1] + + string = ' '.join(['{}->{} running'.format(Out_Fore.LIGHTBLACK_EX, + Out_Style.RESET_ALL), + command_string] + list(args)) + + # If logging is not in DEBUG mode, trim the command if necessary + if logger.level > logging.DEBUG: + logger.info('{}{}'.format(shorten_string(string, columns - 12), + Err_Style.RESET_ALL)) + else: + logger.debug('{}{}'.format(string, Err_Style.RESET_ALL)) + + need_closing_newline = False + try: + msg_hdr = ' working: ' + msg_width = columns - len(msg_hdr) - 1 + output = command(*args, **kwargs) + for line in output: + if isinstance(line, bytes): + line = line.decode('utf-8', errors='replace') + if logger.level > logging.DEBUG: + if full_debug: + stdout.write(line) + stdout.flush() + continue + msg = line.replace( + '\n', ' ').replace( + '\t', ' ').replace( + '\b', ' ').rstrip() + if msg: + if "CI" not in os.environ: + stdout.write(u'{}\r{}{:<{width}}'.format( + Err_Style.RESET_ALL, msg_hdr, + shorten_string(msg, msg_width), width=msg_width)) + stdout.flush() + need_closing_newline = True + else: + logger.debug(''.join(['\t', line.rstrip()])) + if need_closing_newline: + stdout.write('{}\r{:>{width}}\r'.format( + Err_Style.RESET_ALL, ' ', width=(columns - 1))) + stdout.flush() + except sh.ErrorReturnCode as err: + if need_closing_newline: + stdout.write('{}\r{:>{width}}\r'.format( + Err_Style.RESET_ALL, ' ', width=(columns - 1))) + stdout.flush() + if tail_n is not None or filter_in or filter_out: + def printtail(out, name, forecolor, tail_n=0, + re_filter_in=None, re_filter_out=None): + lines = out.splitlines() + if re_filter_in is not None: + lines = [l for l in lines if re_filter_in.search(l)] + if re_filter_out is not None: + lines = [l for l in lines if not re_filter_out.search(l)] + if tail_n == 0 or len(lines) <= tail_n: + info('{}:\n{}\t{}{}'.format( + name, forecolor, '\t\n'.join(lines), Out_Fore.RESET)) + else: + info('{} (last {} lines of {}):\n{}\t{}{}'.format( + name, tail_n, len(lines), + forecolor, '\t\n'.join([s for s in lines[-tail_n:]]), + Out_Fore.RESET)) + printtail(err.stdout.decode('utf-8'), 'STDOUT', Out_Fore.YELLOW, tail_n, + re.compile(filter_in) if filter_in else None, + re.compile(filter_out) if filter_out else None) + printtail(err.stderr.decode('utf-8'), 'STDERR', Err_Fore.RED) + if is_critical: + env = kwargs.get("env") + if env is not None: + info("{}ENV:{}\n{}\n".format( + Err_Fore.YELLOW, Err_Fore.RESET, "\n".join( + "set {}={}".format(n, v) for n, v in env.items()))) + info("{}COMMAND:{}\ncd {} && {} {}\n".format( + Err_Fore.YELLOW, Err_Fore.RESET, os.getcwd(), command, + ' '.join(args))) + warning("{}ERROR: {} failed!{}".format( + Err_Fore.RED, command, Err_Fore.RESET)) + exit(1) + else: + raise + + return output diff --git a/p4a/pythonforandroidold/patching.py b/p4a/pythonforandroidold/patching.py new file mode 100644 index 0000000..2a47733 --- /dev/null +++ b/p4a/pythonforandroidold/patching.py @@ -0,0 +1,71 @@ +from os import uname + + +def check_all(*callables): + def check(**kwargs): + return all(c(**kwargs) for c in callables) + return check + + +def check_any(*callables): + def check(**kwargs): + return any(c(**kwargs) for c in callables) + return check + + +def is_platform(platform): + def is_x(**kwargs): + return uname()[0] == platform + return is_x + + +is_linux = is_platform('Linux') +is_darwin = is_platform('Darwin') + + +def is_arch(xarch): + def is_x(arch, **kwargs): + return arch.arch == xarch + return is_x + + +def is_api_gt(apiver): + def is_x(recipe, **kwargs): + return recipe.ctx.android_api > apiver + return is_x + + +def is_api_gte(apiver): + def is_x(recipe, **kwargs): + return recipe.ctx.android_api >= apiver + return is_x + + +def is_api_lt(apiver): + def is_x(recipe, **kwargs): + return recipe.ctx.android_api < apiver + return is_x + + +def is_api_lte(apiver): + def is_x(recipe, **kwargs): + return recipe.ctx.android_api <= apiver + return is_x + + +def is_api(apiver): + def is_x(recipe, **kwargs): + return recipe.ctx.android_api == apiver + return is_x + + +def will_build(recipe_name): + def will(recipe, **kwargs): + return recipe_name in recipe.ctx.recipe_build_order + return will + + +def is_ndk(ndk): + def is_x(recipe, **kwargs): + return recipe.ctx.ndk == ndk + return is_x diff --git a/p4a/pythonforandroid/python.py b/p4a/pythonforandroidold/python.py similarity index 100% rename from p4a/pythonforandroid/python.py rename to p4a/pythonforandroidold/python.py diff --git a/p4a/pythonforandroidold/recipe.py b/p4a/pythonforandroidold/recipe.py new file mode 100644 index 0000000..071aa22 --- /dev/null +++ b/p4a/pythonforandroidold/recipe.py @@ -0,0 +1,1169 @@ +from os.path import basename, dirname, exists, isdir, isfile, join, realpath, split +import importlib +import glob +from shutil import rmtree +from six import PY2, with_metaclass + +import hashlib +from re import match + +import sh +import shutil +import fnmatch +from os import listdir, unlink, environ, mkdir, curdir, walk +from sys import stdout +import time +try: + from urlparse import urlparse +except ImportError: + from urllib.parse import urlparse +from pythonforandroid.logger import (logger, info, warning, debug, shprint, info_main) +from pythonforandroid.util import (urlretrieve, current_directory, ensure_dir, + BuildInterruptingException) + +# this import is necessary to keep imp.load_source from complaining :) +if PY2: + import imp + import_recipe = imp.load_source +else: + import importlib.util + if hasattr(importlib.util, 'module_from_spec'): + def import_recipe(module, filename): + spec = importlib.util.spec_from_file_location(module, filename) + mod = importlib.util.module_from_spec(spec) + spec.loader.exec_module(mod) + return mod + else: + from importlib.machinery import SourceFileLoader + + def import_recipe(module, filename): + return SourceFileLoader(module, filename).load_module() + + +class RecipeMeta(type): + def __new__(cls, name, bases, dct): + if name != 'Recipe': + if 'url' in dct: + dct['_url'] = dct.pop('url') + if 'version' in dct: + dct['_version'] = dct.pop('version') + + return super(RecipeMeta, cls).__new__(cls, name, bases, dct) + + +class Recipe(with_metaclass(RecipeMeta)): + _url = None + '''The address from which the recipe may be downloaded. This is not + essential, it may be omitted if the source is available some other + way, such as via the :class:`IncludedFilesBehaviour` mixin. + + If the url includes the version, you may (and probably should) + replace this with ``{version}``, which will automatically be + replaced by the :attr:`version` string during download. + + .. note:: Methods marked (internal) are used internally and you + probably don't need to call them, but they are available + if you want. + ''' + + _version = None + '''A string giving the version of the software the recipe describes, + e.g. ``2.0.3`` or ``master``.''' + + md5sum = None + '''The md5sum of the source from the :attr:`url`. Non-essential, but + you should try to include this, it is used to check that the download + finished correctly. + ''' + + depends = [] + '''A list containing the names of any recipes that this recipe depends on. + ''' + + conflicts = [] + '''A list containing the names of any recipes that are known to be + incompatible with this one.''' + + opt_depends = [] + '''A list of optional dependencies, that must be built before this + recipe if they are built at all, but whose presence is not essential.''' + + patches = [] + '''A list of patches to apply to the source. Values can be either a string + referring to the patch file relative to the recipe dir, or a tuple of the + string patch file and a callable, which will receive the kwargs `arch` and + `recipe`, which should return True if the patch should be applied.''' + + python_depends = [] + '''A list of pure-Python packages that this package requires. These + packages will NOT be available at build time, but will be added to the + list of pure-Python packages to install via pip. If you need these packages + at build time, you must create a recipe.''' + + archs = ['armeabi'] # Not currently implemented properly + + @property + def version(self): + key = 'VERSION_' + self.name + return environ.get(key, self._version) + + @property + def url(self): + key = 'URL_' + self.name + return environ.get(key, self._url) + + @property + def versioned_url(self): + '''A property returning the url of the recipe with ``{version}`` + replaced by the :attr:`url`. If accessing the url, you should use this + property, *not* access the url directly.''' + if self.url is None: + return None + return self.url.format(version=self.version) + + def download_file(self, url, target, cwd=None): + """ + (internal) Download an ``url`` to a ``target``. + """ + if not url: + return + info('Downloading {} from {}'.format(self.name, url)) + + if cwd: + target = join(cwd, target) + + parsed_url = urlparse(url) + if parsed_url.scheme in ('http', 'https'): + def report_hook(index, blksize, size): + if size <= 0: + progression = '{0} bytes'.format(index * blksize) + else: + progression = '{0:.2f}%'.format( + index * blksize * 100. / float(size)) + if "CI" not in environ: + stdout.write('- Download {}\r'.format(progression)) + stdout.flush() + + if exists(target): + unlink(target) + + # Download item with multiple attempts (for bad connections): + attempts = 0 + while True: + try: + urlretrieve(url, target, report_hook) + except OSError as e: + attempts += 1 + if attempts >= 5: + raise e + stdout.write('Download failed retrying in a second...') + time.sleep(1) + continue + break + return target + elif parsed_url.scheme in ('git', 'git+file', 'git+ssh', 'git+http', 'git+https'): + if isdir(target): + with current_directory(target): + shprint(sh.git, 'fetch', '--tags') + if self.version: + shprint(sh.git, 'checkout', self.version) + shprint(sh.git, 'pull') + shprint(sh.git, 'pull', '--recurse-submodules') + shprint(sh.git, 'submodule', 'update', '--recursive') + else: + if url.startswith('git+'): + url = url[4:] + shprint(sh.git, 'clone', '--recursive', url, target) + if self.version: + with current_directory(target): + shprint(sh.git, 'checkout', self.version) + shprint(sh.git, 'submodule', 'update', '--recursive') + return target + + def apply_patch(self, filename, arch, build_dir=None): + """ + Apply a patch from the current recipe directory into the current + build directory. + + .. versionchanged:: 0.6.0 + Add ability to apply patch from any dir via kwarg `build_dir`''' + """ + info("Applying patch {}".format(filename)) + build_dir = build_dir if build_dir else self.get_build_dir(arch) + filename = join(self.get_recipe_dir(), filename) + shprint(sh.patch, "-t", "-d", build_dir, "-p1", + "-i", filename, _tail=10) + + def copy_file(self, filename, dest): + info("Copy {} to {}".format(filename, dest)) + filename = join(self.get_recipe_dir(), filename) + dest = join(self.build_dir, dest) + shutil.copy(filename, dest) + + def append_file(self, filename, dest): + info("Append {} to {}".format(filename, dest)) + filename = join(self.get_recipe_dir(), filename) + dest = join(self.build_dir, dest) + with open(filename, "rb") as fd: + data = fd.read() + with open(dest, "ab") as fd: + fd.write(data) + + @property + def name(self): + '''The name of the recipe, the same as the folder containing it.''' + modname = self.__class__.__module__ + return modname.split(".", 2)[-1] + + @property + def filtered_archs(self): + '''Return archs of self.ctx that are valid build archs + for the Recipe.''' + result = [] + for arch in self.ctx.archs: + if not self.archs or (arch.arch in self.archs): + result.append(arch) + return result + + def check_recipe_choices(self): + '''Checks what recipes are being built to see which of the alternative + and optional dependencies are being used, + and returns a list of these.''' + recipes = [] + built_recipes = self.ctx.recipe_build_order + for recipe in self.depends: + if isinstance(recipe, (tuple, list)): + for alternative in recipe: + if alternative in built_recipes: + recipes.append(alternative) + break + for recipe in self.opt_depends: + if recipe in built_recipes: + recipes.append(recipe) + return sorted(recipes) + + def get_opt_depends_in_list(self, recipes): + '''Given a list of recipe names, returns those that are also in + self.opt_depends. + ''' + return [recipe for recipe in recipes if recipe in self.opt_depends] + + def get_build_container_dir(self, arch): + '''Given the arch name, returns the directory where it will be + built. + + This returns a different directory depending on what + alternative or optional dependencies are being built. + ''' + dir_name = self.get_dir_name() + return join(self.ctx.build_dir, 'other_builds', + dir_name, '{}__ndk_target_{}'.format(arch, self.ctx.ndk_api)) + + def get_dir_name(self): + choices = self.check_recipe_choices() + dir_name = '-'.join([self.name] + choices) + return dir_name + + def get_build_dir(self, arch): + '''Given the arch name, returns the directory where the + downloaded/copied package will be built.''' + + return join(self.get_build_container_dir(arch), self.name) + + def get_recipe_dir(self): + """ + Returns the local recipe directory or defaults to the core recipe + directory. + """ + if self.ctx.local_recipes is not None: + local_recipe_dir = join(self.ctx.local_recipes, self.name) + if exists(local_recipe_dir): + return local_recipe_dir + return join(self.ctx.root_dir, 'recipes', self.name) + + # Public Recipe API to be subclassed if needed + + def download_if_necessary(self): + info_main('Downloading {}'.format(self.name)) + user_dir = environ.get('P4A_{}_DIR'.format(self.name.lower())) + if user_dir is not None: + info('P4A_{}_DIR is set, skipping download for {}'.format( + self.name, self.name)) + return + self.download() + + def download(self): + if self.url is None: + info('Skipping {} download as no URL is set'.format(self.name)) + return + + url = self.versioned_url + ma = match(u'^(.+)#md5=([0-9a-f]{32})$', url) + if ma: # fragmented URL? + if self.md5sum: + raise ValueError( + ('Received md5sum from both the {} recipe ' + 'and its url').format(self.name)) + url = ma.group(1) + expected_md5 = ma.group(2) + else: + expected_md5 = self.md5sum + + shprint(sh.mkdir, '-p', join(self.ctx.packages_path, self.name)) + + with current_directory(join(self.ctx.packages_path, self.name)): + filename = shprint(sh.basename, url).stdout[:-1].decode('utf-8') + + do_download = True + marker_filename = '.mark-{}'.format(filename) + if exists(filename) and isfile(filename): + if not exists(marker_filename): + shprint(sh.rm, filename) + elif expected_md5: + current_md5 = md5sum(filename) + if current_md5 != expected_md5: + debug('* Generated md5sum: {}'.format(current_md5)) + debug('* Expected md5sum: {}'.format(expected_md5)) + raise ValueError( + ('Generated md5sum does not match expected md5sum ' + 'for {} recipe').format(self.name)) + do_download = False + else: + do_download = False + + # If we got this far, we will download + if do_download: + debug('Downloading {} from {}'.format(self.name, url)) + + shprint(sh.rm, '-f', marker_filename) + self.download_file(self.versioned_url, filename) + shprint(sh.touch, marker_filename) + + if exists(filename) and isfile(filename) and expected_md5: + current_md5 = md5sum(filename) + if expected_md5 is not None: + if current_md5 != expected_md5: + debug('* Generated md5sum: {}'.format(current_md5)) + debug('* Expected md5sum: {}'.format(expected_md5)) + raise ValueError( + ('Generated md5sum does not match expected md5sum ' + 'for {} recipe').format(self.name)) + else: + info('{} download already cached, skipping'.format(self.name)) + + def unpack(self, arch): + info_main('Unpacking {} for {}'.format(self.name, arch)) + + build_dir = self.get_build_container_dir(arch) + + user_dir = environ.get('P4A_{}_DIR'.format(self.name.lower())) + if user_dir is not None: + info('P4A_{}_DIR exists, symlinking instead'.format( + self.name.lower())) + if exists(self.get_build_dir(arch)): + return + shprint(sh.rm, '-rf', build_dir) + shprint(sh.mkdir, '-p', build_dir) + shprint(sh.rmdir, build_dir) + ensure_dir(build_dir) + shprint(sh.cp, '-a', user_dir, self.get_build_dir(arch)) + return + + if self.url is None: + info('Skipping {} unpack as no URL is set'.format(self.name)) + return + + filename = shprint( + sh.basename, self.versioned_url).stdout[:-1].decode('utf-8') + ma = match(u'^(.+)#md5=([0-9a-f]{32})$', filename) + if ma: # fragmented URL? + filename = ma.group(1) + + with current_directory(build_dir): + directory_name = self.get_build_dir(arch) + + if not exists(directory_name) or not isdir(directory_name): + extraction_filename = join( + self.ctx.packages_path, self.name, filename) + if isfile(extraction_filename): + if extraction_filename.endswith('.zip'): + try: + sh.unzip(extraction_filename) + except (sh.ErrorReturnCode_1, sh.ErrorReturnCode_2): + # return code 1 means unzipping had + # warnings but did complete, + # apparently happens sometimes with + # github zips + pass + import zipfile + fileh = zipfile.ZipFile(extraction_filename, 'r') + root_directory = fileh.filelist[0].filename.split('/')[0] + if root_directory != basename(directory_name): + shprint(sh.mv, root_directory, directory_name) + elif extraction_filename.endswith( + ('.tar.gz', '.tgz', '.tar.bz2', '.tbz2', '.tar.xz', '.txz')): + sh.tar('xf', extraction_filename) + root_directory = sh.tar('tf', extraction_filename).stdout.decode( + 'utf-8').split('\n')[0].split('/')[0] + if root_directory != directory_name: + shprint(sh.mv, root_directory, directory_name) + else: + raise Exception( + 'Could not extract {} download, it must be .zip, ' + '.tar.gz or .tar.bz2 or .tar.xz'.format(extraction_filename)) + elif isdir(extraction_filename): + mkdir(directory_name) + for entry in listdir(extraction_filename): + if entry not in ('.git',): + shprint(sh.cp, '-Rv', + join(extraction_filename, entry), + directory_name) + else: + raise Exception( + 'Given path is neither a file nor a directory: {}' + .format(extraction_filename)) + + else: + info('{} is already unpacked, skipping'.format(self.name)) + + def get_recipe_env(self, arch=None, with_flags_in_cc=True, clang=False): + """Return the env specialized for the recipe + """ + if arch is None: + arch = self.filtered_archs[0] + return arch.get_env(with_flags_in_cc=with_flags_in_cc, clang=clang) + + def prebuild_arch(self, arch): + '''Run any pre-build tasks for the Recipe. By default, this checks if + any prebuild_archname methods exist for the archname of the current + architecture, and runs them if so.''' + prebuild = "prebuild_{}".format(arch.arch.replace('-', '_')) + if hasattr(self, prebuild): + getattr(self, prebuild)() + else: + info('{} has no {}, skipping'.format(self.name, prebuild)) + + def is_patched(self, arch): + build_dir = self.get_build_dir(arch.arch) + return exists(join(build_dir, '.patched')) + + def apply_patches(self, arch, build_dir=None): + '''Apply any patches for the Recipe. + + .. versionchanged:: 0.6.0 + Add ability to apply patches from any dir via kwarg `build_dir`''' + if self.patches: + info_main('Applying patches for {}[{}]' + .format(self.name, arch.arch)) + + if self.is_patched(arch): + info_main('{} already patched, skipping'.format(self.name)) + return + + build_dir = build_dir if build_dir else self.get_build_dir(arch.arch) + for patch in self.patches: + if isinstance(patch, (tuple, list)): + patch, patch_check = patch + if not patch_check(arch=arch, recipe=self): + continue + + self.apply_patch( + patch.format(version=self.version, arch=arch.arch), + arch.arch, build_dir=build_dir) + + shprint(sh.touch, join(build_dir, '.patched')) + + def should_build(self, arch): + '''Should perform any necessary test and return True only if it needs + building again. + + ''' + return True + + def build_arch(self, arch): + '''Run any build tasks for the Recipe. By default, this checks if + any build_archname methods exist for the archname of the current + architecture, and runs them if so.''' + build = "build_{}".format(arch.arch) + if hasattr(self, build): + getattr(self, build)() + + def postbuild_arch(self, arch): + '''Run any post-build tasks for the Recipe. By default, this checks if + any postbuild_archname methods exist for the archname of the + current architecture, and runs them if so. + ''' + postbuild = "postbuild_{}".format(arch.arch) + if hasattr(self, postbuild): + getattr(self, postbuild)() + + def prepare_build_dir(self, arch): + '''Copies the recipe data into a build dir for the given arch. By + default, this unpacks a downloaded recipe. You should override + it (or use a Recipe subclass with different behaviour) if you + want to do something else. + ''' + self.unpack(arch) + + def clean_build(self, arch=None): + '''Deletes all the build information of the recipe. + + If arch is not None, only this arch dir is deleted. Otherwise + (the default) all builds for all archs are deleted. + + By default, this just deletes the main build dir. If the + recipe has e.g. object files biglinked, or .so files stored + elsewhere, you should override this method. + + This method is intended for testing purposes, it may have + strange results. Rebuild everything if this seems to happen. + + ''' + if arch is None: + base_dir = join(self.ctx.build_dir, 'other_builds', self.name) + else: + base_dir = self.get_build_container_dir(arch) + dirs = glob.glob(base_dir + '-*') + if exists(base_dir): + dirs.append(base_dir) + if not dirs: + warning('Attempted to clean build for {} but found no existing ' + 'build dirs'.format(self.name)) + + for directory in dirs: + if exists(directory): + info('Deleting {}'.format(directory)) + shutil.rmtree(directory) + + # Delete any Python distributions to ensure the recipe build + # doesn't persist in site-packages + shutil.rmtree(self.ctx.python_installs_dir) + + def install_libs(self, arch, *libs): + libs_dir = self.ctx.get_libs_dir(arch.arch) + if not libs: + warning('install_libs called with no libraries to install!') + return + args = libs + (libs_dir,) + shprint(sh.cp, *args) + + def has_libs(self, arch, *libs): + return all(map(lambda l: self.ctx.has_lib(arch.arch, l), libs)) + + @classmethod + def recipe_dirs(cls, ctx): + recipe_dirs = [] + if ctx.local_recipes is not None: + recipe_dirs.append(realpath(ctx.local_recipes)) + if ctx.storage_dir: + recipe_dirs.append(join(ctx.storage_dir, 'recipes')) + recipe_dirs.append(join(ctx.root_dir, "recipes")) + return recipe_dirs + + @classmethod + def list_recipes(cls, ctx): + forbidden_dirs = ('__pycache__', ) + for recipes_dir in cls.recipe_dirs(ctx): + if recipes_dir and exists(recipes_dir): + for name in listdir(recipes_dir): + if name in forbidden_dirs: + continue + fn = join(recipes_dir, name) + if isdir(fn): + yield name + + @classmethod + def get_recipe(cls, name, ctx): + '''Returns the Recipe with the given name, if it exists.''' + name = name.lower() + if not hasattr(cls, "recipes"): + cls.recipes = {} + if name in cls.recipes: + return cls.recipes[name] + + recipe_file = None + for recipes_dir in cls.recipe_dirs(ctx): + if not exists(recipes_dir): + continue + # Find matching folder (may differ in case): + for subfolder in listdir(recipes_dir): + if subfolder.lower() == name: + recipe_file = join(recipes_dir, subfolder, '__init__.py') + if exists(recipe_file): + name = subfolder # adapt to actual spelling + break + recipe_file = None + if recipe_file is not None: + break + + if not recipe_file: + raise ValueError('Recipe does not exist: {}'.format(name)) + + mod = import_recipe('pythonforandroid.recipes.{}'.format(name), recipe_file) + if len(logger.handlers) > 1: + logger.removeHandler(logger.handlers[1]) + recipe = mod.recipe + recipe.ctx = ctx + cls.recipes[name.lower()] = recipe + return recipe + + +class IncludedFilesBehaviour(object): + '''Recipe mixin class that will automatically unpack files included in + the recipe directory.''' + src_filename = None + + def prepare_build_dir(self, arch): + if self.src_filename is None: + raise BuildInterruptingException( + 'IncludedFilesBehaviour failed: no src_filename specified') + shprint(sh.rm, '-rf', self.get_build_dir(arch)) + shprint(sh.cp, '-a', join(self.get_recipe_dir(), self.src_filename), + self.get_build_dir(arch)) + + +class BootstrapNDKRecipe(Recipe): + '''A recipe class for recipes built in an Android project jni dir with + an Android.mk. These are not cached separatly, but built in the + bootstrap's own building directory. + + To build an NDK project which is not part of the bootstrap, see + :class:`~pythonforandroid.recipe.NDKRecipe`. + + To link with python, call the method :meth:`get_recipe_env` + with the kwarg *with_python=True*. + ''' + + dir_name = None # The name of the recipe build folder in the jni dir + + def get_build_container_dir(self, arch): + return self.get_jni_dir() + + def get_build_dir(self, arch): + if self.dir_name is None: + raise ValueError('{} recipe doesn\'t define a dir_name, but ' + 'this is necessary'.format(self.name)) + return join(self.get_build_container_dir(arch), self.dir_name) + + def get_jni_dir(self): + return join(self.ctx.bootstrap.build_dir, 'jni') + + def get_recipe_env(self, arch=None, with_flags_in_cc=True, with_python=False): + env = super(BootstrapNDKRecipe, self).get_recipe_env( + arch, with_flags_in_cc) + if not with_python: + return env + + env['PYTHON_INCLUDE_ROOT'] = self.ctx.python_recipe.include_root(arch.arch) + env['PYTHON_LINK_ROOT'] = self.ctx.python_recipe.link_root(arch.arch) + env['EXTRA_LDLIBS'] = ' -lpython{}'.format( + self.ctx.python_recipe.major_minor_version_string) + #if 'python3' in self.ctx.python_recipe.name: + # env['EXTRA_LDLIBS'] += 'm' + return env + + +class NDKRecipe(Recipe): + '''A recipe class for any NDK project not included in the bootstrap.''' + + generated_libraries = [] + + def should_build(self, arch): + lib_dir = self.get_lib_dir(arch) + + for lib in self.generated_libraries: + if not exists(join(lib_dir, lib)): + return True + + return False + + def get_lib_dir(self, arch): + return join(self.get_build_dir(arch.arch), 'obj', 'local', arch.arch) + + def get_jni_dir(self, arch): + return join(self.get_build_dir(arch.arch), 'jni') + + def build_arch(self, arch, *extra_args): + super(NDKRecipe, self).build_arch(arch) + + env = self.get_recipe_env(arch) + with current_directory(self.get_build_dir(arch.arch)): + shprint( + sh.ndk_build, + 'V=1', + 'APP_PLATFORM=android-' + str(self.ctx.ndk_api), + 'APP_ABI=' + arch.arch, + *extra_args, _env=env + ) + + +class PythonRecipe(Recipe): + site_packages_name = None + '''The name of the module's folder when installed in the Python + site-packages (e.g. for pyjnius it is 'jnius')''' + + call_hostpython_via_targetpython = True + '''If True, tries to install the module using the hostpython binary + copied to the target (normally arm) python build dir. However, this + will fail if the module tries to import e.g. _io.so. Set this to False + to call hostpython from its own build dir, installing the module in + the right place via arguments to setup.py. However, this may not set + the environment correctly and so False is not the default.''' + + install_in_hostpython = False + '''If True, additionally installs the module in the hostpython build + dir. This will make it available to other recipes if + call_hostpython_via_targetpython is False. + ''' + + install_in_targetpython = True + '''If True, installs the module in the targetpython installation dir. + This is almost always what you want to do.''' + + setup_extra_args = [] + '''List of extra arugments to pass to setup.py''' + + def __init__(self, *args, **kwargs): + super(PythonRecipe, self).__init__(*args, **kwargs) + depends = self.depends + depends.append(('python2', 'python2legacy', 'python3', 'python3crystax')) + depends = list(set(depends)) + self.depends = depends + + def clean_build(self, arch=None): + super(PythonRecipe, self).clean_build(arch=arch) + name = self.folder_name + python_install_dirs = glob.glob(join(self.ctx.python_installs_dir, '*')) + for python_install in python_install_dirs: + site_packages_dir = glob.glob(join(python_install, 'lib', 'python*', + 'site-packages')) + if site_packages_dir: + build_dir = join(site_packages_dir[0], name) + if exists(build_dir): + info('Deleted {}'.format(build_dir)) + rmtree(build_dir) + + @property + def real_hostpython_location(self): + host_name = 'host{}'.format(self.ctx.python_recipe.name) + host_build = Recipe.get_recipe(host_name, self.ctx).get_build_dir() + if host_name in ['hostpython2', 'hostpython3']: + return join(host_build, 'native-build', 'python') + elif host_name in ['hostpython3crystax', 'hostpython2legacy']: + return join(host_build, 'hostpython') + else: + python_recipe = self.ctx.python_recipe + return 'python{}'.format(python_recipe.version) + + @property + def hostpython_location(self): + if not self.call_hostpython_via_targetpython: + return self.real_hostpython_location + return self.ctx.hostpython + + @property + def folder_name(self): + '''The name of the build folders containing this recipe.''' + name = self.site_packages_name + if name is None: + name = self.name + return name + + def get_recipe_env(self, arch=None, with_flags_in_cc=True): + env = super(PythonRecipe, self).get_recipe_env(arch, with_flags_in_cc) + + env['PYTHONNOUSERSITE'] = '1' + + # Set the LANG, this isn't usually important but is a better default + # as it occasionally matters how Python e.g. reads files + env['LANG'] = "en_GB.UTF-8" + + if not self.call_hostpython_via_targetpython: + # sets python headers/linkages...depending on python's recipe + python_name = self.ctx.python_recipe.name + python_version = self.ctx.python_recipe.version + python_short_version = '.'.join(python_version.split('.')[:2]) + if not self.ctx.python_recipe.from_crystax: + env['CFLAGS'] += ' -I{}'.format( + self.ctx.python_recipe.include_root(arch.arch)) + env['LDFLAGS'] += ' -L{} -lpython{}'.format( + self.ctx.python_recipe.link_root(arch.arch), + self.ctx.python_recipe.major_minor_version_string) + if python_name == 'python3': + env['LDFLAGS'] += 'm' + elif python_name == 'python2legacy': + env['PYTHON_ROOT'] = join( + self.ctx.python_recipe.get_build_dir( + arch.arch), 'python-install') + else: + ndk_dir_python = join(self.ctx.ndk_dir, 'sources', + 'python', python_version) + env['CFLAGS'] += ' -I{} '.format( + join(ndk_dir_python, 'include', + 'python')) + env['LDFLAGS'] += ' -L{}'.format( + join(ndk_dir_python, 'libs', arch.arch)) + env['LDFLAGS'] += ' -lpython{}'.format(python_short_version) + + hppath = [] + hppath.append(join(dirname(self.hostpython_location), 'Lib')) + hppath.append(join(hppath[0], 'site-packages')) + builddir = join(dirname(self.hostpython_location), 'build') + if exists(builddir): + hppath += [join(builddir, d) for d in listdir(builddir) + if isdir(join(builddir, d))] + if len(hppath) > 0: + if 'PYTHONPATH' in env: + env['PYTHONPATH'] = ':'.join(hppath + [env['PYTHONPATH']]) + else: + env['PYTHONPATH'] = ':'.join(hppath) + return env + + def should_build(self, arch): + name = self.folder_name + if self.ctx.has_package(name): + info('Python package already exists in site-packages') + return False + info('{} apparently isn\'t already in site-packages'.format(name)) + return True + + def build_arch(self, arch): + '''Install the Python module by calling setup.py install with + the target Python dir.''' + super(PythonRecipe, self).build_arch(arch) + self.install_python_package(arch) + + def install_python_package(self, arch, name=None, env=None, is_dir=True): + '''Automate the installation of a Python package (or a cython + package where the cython components are pre-built).''' + # arch = self.filtered_archs[0] # old kivy-ios way + if name is None: + name = self.name + if env is None: + env = self.get_recipe_env(arch) + + info('Installing {} into site-packages'.format(self.name)) + + with current_directory(self.get_build_dir(arch.arch)): + hostpython = sh.Command(self.hostpython_location) + + if self.ctx.python_recipe.name != 'python2legacy': + hpenv = env.copy() + shprint(hostpython, 'setup.py', 'install', '-O2', + '--root={}'.format(self.ctx.get_python_install_dir()), + '--install-lib=.', + _env=hpenv, *self.setup_extra_args) + elif self.call_hostpython_via_targetpython: + shprint(hostpython, 'setup.py', 'install', '-O2', _env=env, + *self.setup_extra_args) + else: # python2legacy + hppath = join(dirname(self.hostpython_location), 'Lib', 'site-packages') + hpenv = env.copy() + if 'PYTHONPATH' in hpenv: + hpenv['PYTHONPATH'] = ':'.join([hppath] + hpenv['PYTHONPATH'].split(':')) + else: + hpenv['PYTHONPATH'] = hppath + shprint(hostpython, 'setup.py', 'install', '-O2', + '--root={}'.format(self.ctx.get_python_install_dir()), + '--install-lib=lib/python2.7/site-packages', + _env=hpenv, *self.setup_extra_args) + + # If asked, also install in the hostpython build dir + if self.install_in_hostpython: + self.install_hostpython_package(arch) + + def get_hostrecipe_env(self, arch): + env = environ.copy() + env['PYTHONPATH'] = join(dirname(self.real_hostpython_location), 'Lib', 'site-packages') + return env + + def install_hostpython_package(self, arch): + env = self.get_hostrecipe_env(arch) + real_hostpython = sh.Command(self.real_hostpython_location) + shprint(real_hostpython, 'setup.py', 'install', '-O2', + '--root={}'.format(dirname(self.real_hostpython_location)), + '--install-lib=Lib/site-packages', + _env=env, *self.setup_extra_args) + + +class CompiledComponentsPythonRecipe(PythonRecipe): + pre_build_ext = False + + build_cmd = 'build_ext' + + def build_arch(self, arch): + '''Build any cython components, then install the Python module by + calling setup.py install with the target Python dir. + ''' + Recipe.build_arch(self, arch) + self.build_compiled_components(arch) + self.install_python_package(arch) + + def build_compiled_components(self, arch): + info('Building compiled components in {}'.format(self.name)) + + env = self.get_recipe_env(arch) + with current_directory(self.get_build_dir(arch.arch)): + hostpython = sh.Command(self.hostpython_location) + if self.install_in_hostpython: + shprint(hostpython, 'setup.py', 'clean', '--all', _env=env) + shprint(hostpython, 'setup.py', self.build_cmd, '-v', + _env=env, *self.setup_extra_args) + build_dir = glob.glob('build/lib.*')[0] + shprint(sh.find, build_dir, '-name', '"*.o"', '-exec', + env['STRIP'], '{}', ';', _env=env) + + def install_hostpython_package(self, arch): + env = self.get_hostrecipe_env(arch) + self.rebuild_compiled_components(arch, env) + super(CompiledComponentsPythonRecipe, self).install_hostpython_package(arch) + + def rebuild_compiled_components(self, arch, env): + info('Rebuilding compiled components in {}'.format(self.name)) + + hostpython = sh.Command(self.real_hostpython_location) + shprint(hostpython, 'setup.py', 'clean', '--all', _env=env) + shprint(hostpython, 'setup.py', self.build_cmd, '-v', _env=env, + *self.setup_extra_args) + + +class CppCompiledComponentsPythonRecipe(CompiledComponentsPythonRecipe): + """ Extensions that require the cxx-stl """ + call_hostpython_via_targetpython = False + + def get_recipe_env(self, arch): + env = super(CppCompiledComponentsPythonRecipe, self).get_recipe_env(arch) + keys = dict( + ctx=self.ctx, + arch=arch, + arch_noeabi=arch.arch.replace('eabi', '') + ) + env['LDSHARED'] = env['CC'] + ' -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions' + env['CFLAGS'] += ( + " -I{ctx.ndk_dir}/platforms/android-{ctx.android_api}/arch-{arch_noeabi}/usr/include" + + " -I{ctx.ndk_dir}/sources/cxx-stl/gnu-libstdc++/{ctx.toolchain_version}/include" + + " -I{ctx.ndk_dir}/sources/cxx-stl/gnu-libstdc++/{ctx.toolchain_version}/libs/{arch.arch}/include").format(**keys) + env['CXXFLAGS'] = env['CFLAGS'] + ' -frtti -fexceptions' + env['LDFLAGS'] += ( + " -L{ctx.ndk_dir}/sources/cxx-stl/gnu-libstdc++/{ctx.toolchain_version}/libs/{arch.arch}" + + " -lgnustl_shared").format(**keys) + + return env + + def build_compiled_components(self, arch): + super(CppCompiledComponentsPythonRecipe, self).build_compiled_components(arch) + + # Copy libgnustl_shared.so + with current_directory(self.get_build_dir(arch.arch)): + sh.cp( + "{ctx.ndk_dir}/sources/cxx-stl/gnu-libstdc++/{ctx.toolchain_version}/libs/{arch.arch}/libgnustl_shared.so".format(ctx=self.ctx, arch=arch), + self.ctx.get_libs_dir(arch.arch) + ) + + +class CythonRecipe(PythonRecipe): + pre_build_ext = False + cythonize = True + cython_args = [] + call_hostpython_via_targetpython = False + + def __init__(self, *args, **kwargs): + super(CythonRecipe, self).__init__(*args, **kwargs) + depends = self.depends + depends.append(('python2', 'python2legacy', 'python3', 'python3crystax')) + depends = list(set(depends)) + self.depends = depends + + def build_arch(self, arch): + '''Build any cython components, then install the Python module by + calling setup.py install with the target Python dir. + ''' + Recipe.build_arch(self, arch) + self.build_cython_components(arch) + self.install_python_package(arch) + + def build_cython_components(self, arch): + info('Cythonizing anything necessary in {}'.format(self.name)) + + env = self.get_recipe_env(arch) + + with current_directory(self.get_build_dir(arch.arch)): + hostpython = sh.Command(self.ctx.hostpython) + shprint(hostpython, '-c', 'import sys; print(sys.path)', _env=env) + debug('cwd is {}'.format(realpath(curdir))) + info('Trying first build of {} to get cython files: this is ' + 'expected to fail'.format(self.name)) + + manually_cythonise = False + try: + shprint(hostpython, 'setup.py', 'build_ext', '-v', _env=env, + *self.setup_extra_args) + except sh.ErrorReturnCode_1: + print() + info('{} first build failed (as expected)'.format(self.name)) + manually_cythonise = True + + if manually_cythonise: + self.cythonize_build(env=env) + shprint(hostpython, 'setup.py', 'build_ext', '-v', _env=env, + _tail=20, _critical=True, *self.setup_extra_args) + else: + info('First build appeared to complete correctly, skipping manual' + 'cythonising.') + + self.strip_object_files(arch, env) + + def strip_object_files(self, arch, env, build_dir=None): + if build_dir is None: + build_dir = self.get_build_dir(arch.arch) + with current_directory(build_dir): + info('Stripping object files') + if self.ctx.python_recipe.name == 'python2legacy': + info('Stripping object files') + build_lib = glob.glob('./build/lib*') + shprint(sh.find, build_lib[0], '-name', '*.o', '-exec', + env['STRIP'], '{}', ';', _env=env) + else: + shprint(sh.find, '.', '-iname', '*.so', '-exec', + '/usr/bin/echo', '{}', ';', _env=env) + shprint(sh.find, '.', '-iname', '*.so', '-exec', + env['STRIP'].split(' ')[0], '--strip-unneeded', + # '/usr/bin/strip', '--strip-unneeded', + '{}', ';', _env=env) + + def cythonize_file(self, env, build_dir, filename): + short_filename = filename + if filename.startswith(build_dir): + short_filename = filename[len(build_dir) + 1:] + info(u"Cythonize {}".format(short_filename)) + cyenv = env.copy() + if 'CYTHONPATH' in cyenv: + cyenv['PYTHONPATH'] = cyenv['CYTHONPATH'] + elif 'PYTHONPATH' in cyenv: + del cyenv['PYTHONPATH'] + if 'PYTHONNOUSERSITE' in cyenv: + cyenv.pop('PYTHONNOUSERSITE') + cython = 'cython' if self.ctx.python_recipe.from_crystax else self.ctx.cython + cython_command = sh.Command(cython) + shprint(cython_command, filename, *self.cython_args, _env=cyenv) + + def cythonize_build(self, env, build_dir="."): + if not self.cythonize: + info('Running cython cancelled per recipe setting') + return + info('Running cython where appropriate') + for root, dirnames, filenames in walk("."): + for filename in fnmatch.filter(filenames, "*.pyx"): + self.cythonize_file(env, build_dir, join(root, filename)) + + def get_recipe_env(self, arch, with_flags_in_cc=True): + env = super(CythonRecipe, self).get_recipe_env(arch, with_flags_in_cc) + env['LDFLAGS'] = env['LDFLAGS'] + ' -L{} '.format( + self.ctx.get_libs_dir(arch.arch) + + ' -L{} '.format(self.ctx.libs_dir) + + ' -L{}'.format(join(self.ctx.bootstrap.build_dir, 'obj', 'local', + arch.arch))) + if self.ctx.python_recipe.from_crystax: + env['LDFLAGS'] = (env['LDFLAGS'] + + ' -L{}'.format(join(self.ctx.bootstrap.build_dir, 'libs', arch.arch))) + + if self.ctx.python_recipe.name == 'python2legacy': + env['LDSHARED'] = join(self.ctx.root_dir, 'tools', 'liblink.sh') + else: + env['LDSHARED'] = env['CC'] + ' -shared' + # shprint(sh.whereis, env['LDSHARED'], _env=env) + env['LIBLINK'] = 'NOTNONE' + env['NDKPLATFORM'] = self.ctx.ndk_platform + if self.ctx.copy_libs: + env['COPYLIBS'] = '1' + + # Every recipe uses its own liblink path, object files are + # collected and biglinked later + liblink_path = join(self.get_build_container_dir(arch.arch), + 'objects_{}'.format(self.name)) + env['LIBLINK_PATH'] = liblink_path + ensure_dir(liblink_path) + + # Add crystax-specific site packages: + if self.ctx.python_recipe.from_crystax: + command = sh.Command('python{}'.format(self.ctx.python_recipe.version)) + site_packages_dirs = command( + '-c', 'import site; print("\\n".join(site.getsitepackages()))') + site_packages_dirs = site_packages_dirs.stdout.decode('utf-8').split('\n') + if 'PYTHONPATH' in env: + env['PYTHONPATH'] = env['PYTHONPATH'] +\ + ':{}'.format(':'.join(site_packages_dirs)) + else: + env['PYTHONPATH'] = ':'.join(site_packages_dirs) + while env['PYTHONPATH'].find("::") > 0: + env['PYTHONPATH'] = env['PYTHONPATH'].replace("::", ":") + if env['PYTHONPATH'].endswith(":"): + env['PYTHONPATH'] = env['PYTHONPATH'][:-1] + if env['PYTHONPATH'].startswith(":"): + env['PYTHONPATH'] = env['PYTHONPATH'][1:] + + return env + + +class TargetPythonRecipe(Recipe): + '''Class for target python recipes. Sets ctx.python_recipe to point to + itself, so as to know later what kind of Python was built or used.''' + + from_crystax = False + '''True if the python is used from CrystaX, False otherwise (i.e. if + it is built by p4a).''' + + def __init__(self, *args, **kwargs): + self._ctx = None + super(TargetPythonRecipe, self).__init__(*args, **kwargs) + + def prebuild_arch(self, arch): + super(TargetPythonRecipe, self).prebuild_arch(arch) + if self.from_crystax and self.ctx.ndk != 'crystax': + raise BuildInterruptingException( + 'The {} recipe can only be built when ' + 'using the CrystaX NDK. Exiting.'.format(self.name)) + self.ctx.python_recipe = self + + def include_root(self, arch): + '''The root directory from which to include headers.''' + raise NotImplementedError('Not implemented in TargetPythonRecipe') + + def link_root(self): + raise NotImplementedError('Not implemented in TargetPythonRecipe') + + @property + def major_minor_version_string(self): + from distutils.version import LooseVersion + return '.'.join([str(v) for v in LooseVersion(self.version).version[:2]]) + + def create_python_bundle(self, dirn, arch): + """ + Create a packaged python bundle in the target directory, by + copying all the modules and standard library to the right + place. + """ + raise NotImplementedError('{} does not implement create_python_bundle'.format(self)) + + def reduce_object_file_names(self, dirn): + """Recursively renames all files named XXX.cpython-...-linux-gnu.so" + to "XXX.so", i.e. removing the erroneous architecture name + coming from the local system. + """ + py_so_files = shprint(sh.find, dirn, '-iname', '*.so') + filens = py_so_files.stdout.decode('utf-8').split('\n')[:-1] + for filen in filens: + file_dirname, file_basename = split(filen) + parts = file_basename.split('.') + if len(parts) <= 2: + continue + shprint(sh.mv, filen, join(file_dirname, parts[0] + '.so')) + + +def md5sum(filen): + '''Calculate the md5sum of a file. + ''' + with open(filen, 'rb') as fileh: + md5 = hashlib.md5(fileh.read()) + + return md5.hexdigest() diff --git a/p4a/pythonforandroidold/recipes/Pillow/__init__.py b/p4a/pythonforandroidold/recipes/Pillow/__init__.py new file mode 100644 index 0000000..14c9d2b --- /dev/null +++ b/p4a/pythonforandroidold/recipes/Pillow/__init__.py @@ -0,0 +1,59 @@ +from pythonforandroid.recipe import CompiledComponentsPythonRecipe +from os.path import join + + +class PillowRecipe(CompiledComponentsPythonRecipe): + + version = '5.2.0' + url = 'https://github.com/python-pillow/Pillow/archive/{version}.tar.gz' + site_packages_name = 'Pillow' + depends = ['png', 'jpeg', 'freetype', 'setuptools'] + patches = [join('patches', 'fix-docstring.patch'), + join('patches', 'fix-setup.patch')] + + call_hostpython_via_targetpython = False + + def get_recipe_env(self, arch=None, with_flags_in_cc=True): + env = super(PillowRecipe, self).get_recipe_env(arch, with_flags_in_cc) + + env['ANDROID_ROOT'] = join(self.ctx.ndk_platform, 'usr') + ndk_lib_dir = join(self.ctx.ndk_platform, 'usr', 'lib') + ndk_include_dir = join(self.ctx.ndk_dir, 'sysroot', 'usr', 'include') + + png = self.get_recipe('png', self.ctx) + png_lib_dir = png.get_lib_dir(arch) + png_jni_dir = png.get_jni_dir(arch) + + jpeg = self.get_recipe('jpeg', self.ctx) + jpeg_inc_dir = jpeg_lib_dir = jpeg.get_build_dir(arch.arch) + + freetype = self.get_recipe('freetype', self.ctx) + free_lib_dir = join(freetype.get_build_dir(arch.arch), 'objs', '.libs') + free_inc_dir = join(freetype.get_build_dir(arch.arch), 'include') + + # harfbuzz is a direct dependency of freetype and we need the proper + # flags to successfully build the Pillow recipe, so we add them here. + harfbuzz = self.get_recipe('harfbuzz', self.ctx) + harf_lib_dir = join(harfbuzz.get_build_dir(arch.arch), 'src', '.libs') + harf_inc_dir = harfbuzz.get_build_dir(arch.arch) + + env['JPEG_ROOT'] = '{}|{}'.format(jpeg_lib_dir, jpeg_inc_dir) + env['FREETYPE_ROOT'] = '{}|{}'.format(free_lib_dir, free_inc_dir) + env['ZLIB_ROOT'] = '{}|{}'.format(ndk_lib_dir, ndk_include_dir) + + cflags = ' -I{}'.format(png_jni_dir) + cflags += ' -I{} -I{}'.format(harf_inc_dir, join(harf_inc_dir, 'src')) + cflags += ' -I{}'.format(free_inc_dir) + cflags += ' -I{}'.format(jpeg_inc_dir) + cflags += ' -I{}'.format(ndk_include_dir) + + env['LIBS'] = ' -lpng -lfreetype -lharfbuzz -ljpeg -lturbojpeg' + + env['LDFLAGS'] += ' -L{} -L{} -L{} -L{}'.format( + png_lib_dir, harf_lib_dir, jpeg_lib_dir, ndk_lib_dir) + if cflags not in env['CFLAGS']: + env['CFLAGS'] += cflags + return env + + +recipe = PillowRecipe() diff --git a/p4a/pythonforandroid/recipes/Pillow/patches/fix-docstring.patch b/p4a/pythonforandroidold/recipes/Pillow/patches/fix-docstring.patch similarity index 100% rename from p4a/pythonforandroid/recipes/Pillow/patches/fix-docstring.patch rename to p4a/pythonforandroidold/recipes/Pillow/patches/fix-docstring.patch diff --git a/p4a/pythonforandroidold/recipes/Pillow/patches/fix-setup.patch b/p4a/pythonforandroidold/recipes/Pillow/patches/fix-setup.patch new file mode 100644 index 0000000..3b0ccef --- /dev/null +++ b/p4a/pythonforandroidold/recipes/Pillow/patches/fix-setup.patch @@ -0,0 +1,148 @@ +diff --git a/setup.py b/setup.py +index 761d552..4ddc598 100755 +--- a/setup.py ++++ b/setup.py +@@ -136,12 +136,12 @@ except (ImportError, OSError): + + NAME = 'Pillow' + PILLOW_VERSION = get_version() +-JPEG_ROOT = None ++JPEG_ROOT = tuple(os.environ['JPEG_ROOT'].split('|')) if 'JPEG_ROOT' in os.environ else None + JPEG2K_ROOT = None +-ZLIB_ROOT = None ++ZLIB_ROOT = tuple(os.environ['ZLIB_ROOT'].split('|')) if 'ZLIB_ROOT' in os.environ else None + IMAGEQUANT_ROOT = None + TIFF_ROOT = None +-FREETYPE_ROOT = None ++FREETYPE_ROOT = tuple(os.environ['FREETYPE_ROOT'].split('|')) if 'FREETYPE_ROOT' in os.environ else None + LCMS_ROOT = None + + +@@ -194,7 +194,7 @@ class pil_build_ext(build_ext): + ] + + def initialize_options(self): +- self.disable_platform_guessing = None ++ self.disable_platform_guessing = True + build_ext.initialize_options(self) + for x in self.feature: + setattr(self, 'disable_%s' % x, None) +@@ -466,61 +466,6 @@ class pil_build_ext(build_ext): + feature.jpeg = "libjpeg" # alternative name + + feature.openjpeg_version = None +- if feature.want('jpeg2000'): +- _dbg('Looking for jpeg2000') +- best_version = None +- best_path = None +- +- # Find the best version +- for directory in self.compiler.include_dirs: +- _dbg('Checking for openjpeg-#.# in %s', directory) +- try: +- listdir = os.listdir(directory) +- except Exception: +- # WindowsError, FileNotFoundError +- continue +- for name in listdir: +- if name.startswith('openjpeg-') and \ +- os.path.isfile(os.path.join(directory, name, +- 'openjpeg.h')): +- _dbg('Found openjpeg.h in %s/%s', (directory, name)) +- version = tuple(int(x) for x in name[9:].split('.')) +- if best_version is None or version > best_version: +- best_version = version +- best_path = os.path.join(directory, name) +- _dbg('Best openjpeg version %s so far in %s', +- (best_version, best_path)) +- +- if best_version and _find_library_file(self, 'openjp2'): +- # Add the directory to the include path so we can include +- # rather than having to cope with the versioned +- # include path +- # FIXME (melvyn-sopacua): +- # At this point it's possible that best_path is already in +- # self.compiler.include_dirs. Should investigate how that is +- # possible. +- _add_directory(self.compiler.include_dirs, best_path, 0) +- feature.jpeg2000 = 'openjp2' +- feature.openjpeg_version = '.'.join(str(x) for x in best_version) +- +- if feature.want('imagequant'): +- _dbg('Looking for imagequant') +- if _find_include_file(self, 'libimagequant.h'): +- if _find_library_file(self, "imagequant"): +- feature.imagequant = "imagequant" +- elif _find_library_file(self, "libimagequant"): +- feature.imagequant = "libimagequant" +- +- if feature.want('tiff'): +- _dbg('Looking for tiff') +- if _find_include_file(self, 'tiff.h'): +- if _find_library_file(self, "tiff"): +- feature.tiff = "tiff" +- if sys.platform == "win32" and _find_library_file(self, "libtiff"): +- feature.tiff = "libtiff" +- if (sys.platform == "darwin" and +- _find_library_file(self, "libtiff")): +- feature.tiff = "libtiff" + + if feature.want('freetype'): + _dbg('Looking for freetype') +@@ -546,36 +491,6 @@ class pil_build_ext(build_ext): + if subdir: + _add_directory(self.compiler.include_dirs, subdir, 0) + +- if feature.want('lcms'): +- _dbg('Looking for lcms') +- if _find_include_file(self, "lcms2.h"): +- if _find_library_file(self, "lcms2"): +- feature.lcms = "lcms2" +- elif _find_library_file(self, "lcms2_static"): +- # alternate Windows name. +- feature.lcms = "lcms2_static" +- +- if feature.want('webp'): +- _dbg('Looking for webp') +- if (_find_include_file(self, "webp/encode.h") and +- _find_include_file(self, "webp/decode.h")): +- # In Google's precompiled zip it is call "libwebp": +- if _find_library_file(self, "webp"): +- feature.webp = "webp" +- elif _find_library_file(self, "libwebp"): +- feature.webp = "libwebp" +- +- if feature.want('webpmux'): +- _dbg('Looking for webpmux') +- if (_find_include_file(self, "webp/mux.h") and +- _find_include_file(self, "webp/demux.h")): +- if (_find_library_file(self, "webpmux") and +- _find_library_file(self, "webpdemux")): +- feature.webpmux = "webpmux" +- if (_find_library_file(self, "libwebpmux") and +- _find_library_file(self, "libwebpdemux")): +- feature.webpmux = "libwebpmux" +- + for f in feature: + if not getattr(feature, f) and feature.require(f): + if f in ('jpeg', 'zlib'): +@@ -612,8 +527,6 @@ class pil_build_ext(build_ext): + defs.append(("HAVE_LIBTIFF", None)) + if sys.platform == "win32": + libs.extend(["kernel32", "user32", "gdi32"]) +- if struct.unpack("h", "\0\1".encode('ascii'))[0] == 1: +- defs.append(("WORDS_BIGENDIAN", None)) + + if sys.platform == "win32" and not (PLATFORM_PYPY or PLATFORM_MINGW): + defs.append(("PILLOW_VERSION", '"\\"%s\\""' % PILLOW_VERSION)) +@@ -658,10 +571,6 @@ class pil_build_ext(build_ext): + define_macros=defs)) + + tk_libs = ['psapi'] if sys.platform == 'win32' else [] +- exts.append(Extension("PIL._imagingtk", +- ["src/_imagingtk.c", "src/Tk/tkImaging.c"], +- include_dirs=['src/Tk'], +- libraries=tk_libs)) + + exts.append(Extension("PIL._imagingmath", ["src/_imagingmath.c"])) + exts.append(Extension("PIL._imagingmorph", ["src/_imagingmorph.c"])) diff --git a/p4a/pythonforandroidold/recipes/__init__.py b/p4a/pythonforandroidold/recipes/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/p4a/pythonforandroidold/recipes/android/__init__.py b/p4a/pythonforandroidold/recipes/android/__init__.py new file mode 100644 index 0000000..4a06ca8 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/android/__init__.py @@ -0,0 +1,95 @@ +from __future__ import unicode_literals +from pythonforandroid.recipe import CythonRecipe, IncludedFilesBehaviour +from pythonforandroid.util import current_directory +from pythonforandroid.patching import will_build +from pythonforandroid import logger + +from os.path import join + + +class AndroidRecipe(IncludedFilesBehaviour, CythonRecipe): + # name = 'android' + version = None + url = None + + src_filename = 'src' + + depends = [('pygame', 'sdl2', 'genericndkbuild'), + 'pyjnius'] + + config_env = {} + + def get_recipe_env(self, arch): + env = super(AndroidRecipe, self).get_recipe_env(arch) + env.update(self.config_env) + return env + + def prebuild_arch(self, arch): + super(AndroidRecipe, self).prebuild_arch(arch) + ctx_bootstrap = self.ctx.bootstrap.name + + # define macros for Cython, C, Python + tpxi = 'DEF {} = {}\n' + th = '#define {} {}\n' + tpy = '{} = {}\n' + + # make sure bootstrap name is in unicode + if isinstance(ctx_bootstrap, bytes): + ctx_bootstrap = ctx_bootstrap.decode('utf-8') + bootstrap = bootstrap_name = ctx_bootstrap + + is_sdl2 = bootstrap_name in ('sdl2', 'sdl2python3', 'sdl2_gradle') + is_pygame = bootstrap_name in ('pygame',) + is_webview = bootstrap_name in ('webview',) + + if is_sdl2 or is_webview: + if is_sdl2: + bootstrap = 'sdl2' + java_ns = u'org.kivy.android' + jni_ns = u'org/kivy/android' + elif is_pygame: + java_ns = u'org.renpy.android' + jni_ns = u'org/renpy/android' + else: + logger.error(( + 'unsupported bootstrap for android recipe: {}' + ''.format(bootstrap_name) + )) + exit(1) + + config = { + 'BOOTSTRAP': bootstrap, + 'IS_SDL2': int(is_sdl2), + 'IS_PYGAME': int(is_pygame), + 'PY2': int(will_build('python2')(self)), + 'JAVA_NAMESPACE': java_ns, + 'JNI_NAMESPACE': jni_ns, + } + + # create config files for Cython, C and Python + with ( + current_directory(self.get_build_dir(arch.arch))), ( + open(join('android', 'config.pxi'), 'w')) as fpxi, ( + open(join('android', 'config.h'), 'w')) as fh, ( + open(join('android', 'config.py'), 'w')) as fpy: + + for key, value in config.items(): + fpxi.write(tpxi.format(key, repr(value))) + fpy.write(tpy.format(key, repr(value))) + + fh.write(th.format( + key, + value if isinstance(value, int) else '"{}"'.format(value) + )) + self.config_env[key] = str(value) + + if is_sdl2: + fh.write('JNIEnv *SDL_AndroidGetJNIEnv(void);\n') + fh.write( + '#define SDL_ANDROID_GetJNIEnv SDL_AndroidGetJNIEnv\n' + ) + elif is_pygame: + fh.write('JNIEnv *SDL_ANDROID_GetJNIEnv(void);\n') + + +recipe = AndroidRecipe() diff --git a/p4a/pythonforandroidold/recipes/android/src/android/__init__.py b/p4a/pythonforandroidold/recipes/android/src/android/__init__.py new file mode 100644 index 0000000..cb95734 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/android/src/android/__init__.py @@ -0,0 +1,8 @@ +''' +Android module +============== + +''' + +# legacy import +from android._android import * # noqa: F401, F403 diff --git a/p4a/pythonforandroidold/recipes/android/src/android/_android.pyx b/p4a/pythonforandroidold/recipes/android/src/android/_android.pyx new file mode 100644 index 0000000..d332eed --- /dev/null +++ b/p4a/pythonforandroidold/recipes/android/src/android/_android.pyx @@ -0,0 +1,385 @@ +# Android-specific python services. + +include "config.pxi" + +IF BOOTSTRAP == 'pygame': + cdef extern int SDL_ANDROID_CheckPause() + cdef extern void SDL_ANDROID_WaitForResume() nogil + cdef extern void SDL_ANDROID_MapKey(int scancode, int keysym) + + def check_pause(): + return SDL_ANDROID_CheckPause() + + def wait_for_resume(): + android_accelerometer_enable(False) + SDL_ANDROID_WaitForResume() + android_accelerometer_enable(accelerometer_enabled) + + def map_key(scancode, keysym): + SDL_ANDROID_MapKey(scancode, keysym) + +# Android keycodes. +KEYCODE_UNKNOWN = 0 +KEYCODE_SOFT_LEFT = 1 +KEYCODE_SOFT_RIGHT = 2 +KEYCODE_HOME = 3 +KEYCODE_BACK = 4 +KEYCODE_CALL = 5 +KEYCODE_ENDCALL = 6 +KEYCODE_0 = 7 +KEYCODE_1 = 8 +KEYCODE_2 = 9 +KEYCODE_3 = 10 +KEYCODE_4 = 11 +KEYCODE_5 = 12 +KEYCODE_6 = 13 +KEYCODE_7 = 14 +KEYCODE_8 = 15 +KEYCODE_9 = 16 +KEYCODE_STAR = 17 +KEYCODE_POUND = 18 +KEYCODE_DPAD_UP = 19 +KEYCODE_DPAD_DOWN = 20 +KEYCODE_DPAD_LEFT = 21 +KEYCODE_DPAD_RIGHT = 22 +KEYCODE_DPAD_CENTER = 23 +KEYCODE_VOLUME_UP = 24 +KEYCODE_VOLUME_DOWN = 25 +KEYCODE_POWER = 26 +KEYCODE_CAMERA = 27 +KEYCODE_CLEAR = 28 +KEYCODE_A = 29 +KEYCODE_B = 30 +KEYCODE_C = 31 +KEYCODE_D = 32 +KEYCODE_E = 33 +KEYCODE_F = 34 +KEYCODE_G = 35 +KEYCODE_H = 36 +KEYCODE_I = 37 +KEYCODE_J = 38 +KEYCODE_K = 39 +KEYCODE_L = 40 +KEYCODE_M = 41 +KEYCODE_N = 42 +KEYCODE_O = 43 +KEYCODE_P = 44 +KEYCODE_Q = 45 +KEYCODE_R = 46 +KEYCODE_S = 47 +KEYCODE_T = 48 +KEYCODE_U = 49 +KEYCODE_V = 50 +KEYCODE_W = 51 +KEYCODE_X = 52 +KEYCODE_Y = 53 +KEYCODE_Z = 54 +KEYCODE_COMMA = 55 +KEYCODE_PERIOD = 56 +KEYCODE_ALT_LEFT = 57 +KEYCODE_ALT_RIGHT = 58 +KEYCODE_SHIFT_LEFT = 59 +KEYCODE_SHIFT_RIGHT = 60 +KEYCODE_TAB = 61 +KEYCODE_SPACE = 62 +KEYCODE_SYM = 63 +KEYCODE_EXPLORER = 64 +KEYCODE_ENVELOPE = 65 +KEYCODE_ENTER = 66 +KEYCODE_DEL = 67 +KEYCODE_GRAVE = 68 +KEYCODE_MINUS = 69 +KEYCODE_EQUALS = 70 +KEYCODE_LEFT_BRACKET = 71 +KEYCODE_RIGHT_BRACKET = 72 +KEYCODE_BACKSLASH = 73 +KEYCODE_SEMICOLON = 74 +KEYCODE_APOSTROPHE = 75 +KEYCODE_SLASH = 76 +KEYCODE_AT = 77 +KEYCODE_NUM = 78 +KEYCODE_HEADSETHOOK = 79 +KEYCODE_FOCUS = 80 +KEYCODE_PLUS = 81 +KEYCODE_MENU = 82 +KEYCODE_NOTIFICATION = 83 +KEYCODE_SEARCH = 84 +KEYCODE_MEDIA_PLAY_PAUSE= 85 +KEYCODE_MEDIA_STOP = 86 +KEYCODE_MEDIA_NEXT = 87 +KEYCODE_MEDIA_PREVIOUS = 88 +KEYCODE_MEDIA_REWIND = 89 +KEYCODE_MEDIA_FAST_FORWARD = 90 +KEYCODE_MUTE = 91 + +# Vibration support. +cdef extern void android_vibrate(double) + +def vibrate(s): + android_vibrate(s) + +# Accelerometer support. +cdef extern void android_accelerometer_enable(int) +cdef extern void android_accelerometer_reading(float *) + +accelerometer_enabled = False + +def accelerometer_enable(p): + global accelerometer_enabled + + android_accelerometer_enable(p) + + accelerometer_enabled = p + +def accelerometer_reading(): + cdef float rv[3] + android_accelerometer_reading(rv) + + return (rv[0], rv[1], rv[2]) + +# Wifi reading support +cdef extern void android_wifi_scanner_enable() +cdef extern char * android_wifi_scan() + +def wifi_scanner_enable(): + android_wifi_scanner_enable() + +def wifi_scan(): + cdef char * reading + reading = android_wifi_scan() + + reading_list = [] + + for line in filter(lambda l: l, reading.split('\n')): + [ssid, mac, level] = line.split('\t') + reading_list.append((ssid.strip(), mac.upper().strip(), int(level))) + + return reading_list + +# DisplayMetrics information. +cdef extern int android_get_dpi() + +def get_dpi(): + return android_get_dpi() + + +# Soft keyboard. +cdef extern void android_show_keyboard(int) +cdef extern void android_hide_keyboard() + + +from jnius import autoclass, PythonJavaClass, java_method, cast + +# API versions +api_version = autoclass('android.os.Build$VERSION').SDK_INT +version_codes = autoclass('android.os.Build$VERSION_CODES') + + +python_act = autoclass(JAVA_NAMESPACE + u'.PythonActivity') +Rect = autoclass(u'android.graphics.Rect') +mActivity = python_act.mActivity +if mActivity: + # PyGame backend already has the listener so adding + # one here leads to a crash/too much cpu usage. + # SDL2 now does not need the listener so there is + # no point adding a processor intensive layout listenere here. + height = 0 + def get_keyboard_height(): + rctx = Rect() + mActivity.getWindow().getDecorView().getWindowVisibleDisplayFrame(rctx) + # NOTE top should always be zero + rctx.top = 0 + height = mActivity.getWindowManager().getDefaultDisplay().getHeight() - (rctx.bottom - rctx.top) + return height +else: + def get_keyboard_height(): + return 0 + +# Flags for input_type, for requesting a particular type of keyboard +#android FLAGS +TYPE_CLASS_DATETIME = 4 +TYPE_CLASS_NUMBER = 2 +TYPE_NUMBER_VARIATION_NORMAL = 0 +TYPE_NUMBER_VARIATION_PASSWORD = 16 +TYPE_CLASS_TEXT = 1 +TYPE_TEXT_FLAG_AUTO_COMPLETE = 65536 +TYPE_TEXT_FLAG_AUTO_CORRECT = 32768 +TYPE_TEXT_FLAG_NO_SUGGESTIONS = 524288 +TYPE_TEXT_VARIATION_EMAIL_ADDRESS = 32 +TYPE_TEXT_VARIATION_NORMAL = 0 +TYPE_TEXT_VARIATION_PASSWORD = 128 +TYPE_TEXT_VARIATION_POSTAL_ADDRESS = 112 +TYPE_TEXT_VARIATION_URI = 16 +TYPE_CLASS_PHONE = 3 + +IF BOOTSTRAP == 'sdl2': + def remove_presplash(): + # Remove android presplash in SDL2 bootstrap. + mActivity.removeLoadingScreen() + +def show_keyboard(target, input_type): + if input_type == 'text': + _input_type = TYPE_CLASS_TEXT + elif input_type == 'number': + _input_type = TYPE_CLASS_NUMBER + elif input_type == 'url': + _input_type = \ + TYPE_CLASS_TEXT | TYPE_TEXT_VARIATION_URI + elif input_type == 'mail': + _input_type = \ + TYPE_CLASS_TEXT | TYPE_TEXT_VARIATION_EMAIL_ADDRESS + elif input_type == 'datetime': + _input_type = TYPE_CLASS_DATETIME + elif input_type == 'tel': + _input_type = TYPE_CLASS_PHONE + elif input_type == 'address': + _input_type = TYPE_TEXT_VARIATION_POSTAL_ADDRESS + + if hasattr(target, 'password') and target.password: + if _input_type == TYPE_CLASS_TEXT: + _input_type |= TYPE_TEXT_VARIATION_PASSWORD + elif _input_type == TYPE_CLASS_NUMBER: + _input_type |= TYPE_NUMBER_VARIATION_PASSWORD + + if hasattr(target, 'keyboard_suggestions') and not target.keyboard_suggestions: + if _input_type == TYPE_CLASS_TEXT: + _input_type = TYPE_CLASS_TEXT | \ + TYPE_TEXT_FLAG_NO_SUGGESTIONS + + android_show_keyboard(_input_type) + +def hide_keyboard(): + android_hide_keyboard() + +# Build info. +cdef extern char* BUILD_MANUFACTURER +cdef extern char* BUILD_MODEL +cdef extern char* BUILD_PRODUCT +cdef extern char* BUILD_VERSION_RELEASE + +cdef extern void android_get_buildinfo() + +class BuildInfo: + MANUFACTURER = None + MODEL = None + PRODUCT = None + VERSION_RELEASE = None + +def get_buildinfo(): + android_get_buildinfo() + binfo = BuildInfo() + binfo.MANUFACTURER = BUILD_MANUFACTURER + binfo.MODEL = BUILD_MODEL + binfo.PRODUCT = BUILD_PRODUCT + binfo.VERSION_RELEASE = BUILD_VERSION_RELEASE + return binfo + +IF IS_PYGAME: + # Activate input - required to receive input events. + cdef extern void android_activate_input() + + def init(): + android_activate_input() + + # Action send + cdef extern void android_action_send(char*, char*, char*, char*, char*) + def action_send(mimetype, filename=None, subject=None, text=None, + chooser_title=None): + cdef char *j_mimetype = mimetype + cdef char *j_filename = NULL + cdef char *j_subject = NULL + cdef char *j_text = NULL + cdef char *j_chooser_title = NULL + if filename is not None: + j_filename = filename + if subject is not None: + j_subject = subject + if text is not None: + j_text = text + if chooser_title is not None: + j_chooser_title = chooser_title + android_action_send(j_mimetype, j_filename, j_subject, j_text, + j_chooser_title) + + cdef extern int android_checkstop() + cdef extern void android_ackstop() + + def check_stop(): + return android_checkstop() + + def ack_stop(): + android_ackstop() + +# ------------------------------------------------------------------- +# URL Opening. +def open_url(url): + Intent = autoclass('android.content.Intent') + Uri = autoclass('android.net.Uri') + browserIntent = Intent() + browserIntent.setAction(Intent.ACTION_VIEW) + browserIntent.setData(Uri.parse(url)) + currentActivity = cast('android.app.Activity', mActivity) + currentActivity.startActivity(browserIntent) + return True + +# Web browser support. +class AndroidBrowser(object): + def open(self, url, new=0, autoraise=True): + return open_url(url) + def open_new(self, url): + return open_url(url) + def open_new_tab(self, url): + return open_url(url) + +import webbrowser +webbrowser.register('android', AndroidBrowser) + +cdef extern void android_start_service(char *, char *, char *) +def start_service(title=None, description=None, arg=None): + cdef char *j_title = NULL + cdef char *j_description = NULL + if title is not None: + j_title = title + if description is not None: + j_description = description + if arg is not None: + j_arg = arg + android_start_service(j_title, j_description, j_arg) + +cdef extern void android_stop_service() +def stop_service(): + android_stop_service() + +class AndroidService(object): + '''Android service class. + Run ``service/main.py`` from application directory as a service. + + :Parameters: + `title`: str, default to 'Python service' + Notification title. + + `description`: str, default to 'Kivy Python service started' + Notification text. + ''' + + def __init__(self, title='Python service', + description='Kivy Python service started'): + self.title = title + self.description = description + + def start(self, arg=''): + '''Start the service. + + :Parameters: + `arg`: str, default to '' + Argument to pass to a service, + through environment variable ``PYTHON_SERVICE_ARGUMENT``. + ''' + start_service(self.title, self.description, arg) + + def stop(self): + '''Stop the service. + ''' + stop_service() + + diff --git a/p4a/pythonforandroidold/recipes/android/src/android/_android_billing.pyx b/p4a/pythonforandroidold/recipes/android/src/android/_android_billing.pyx new file mode 100644 index 0000000..bd6bb2e --- /dev/null +++ b/p4a/pythonforandroidold/recipes/android/src/android/_android_billing.pyx @@ -0,0 +1,81 @@ +# ------------------------------------------------------------------- +# Billing +cdef extern void android_billing_service_start() +cdef extern void android_billing_service_stop() +cdef extern void android_billing_buy(char *sku) +cdef extern char *android_billing_get_purchased_items() +cdef extern char *android_billing_get_pending_message() + +class BillingService(object): + + BILLING_ACTION_SUPPORTED = 'billingsupported' + BILLING_ACTION_ITEMSCHANGED = 'itemschanged' + + BILLING_TYPE_INAPP = 'inapp' + BILLING_TYPE_SUBSCRIPTION = 'subs' + + def __init__(self, callback): + super(BillingService, self).__init__() + self.callback = callback + self.purchased_items = None + android_billing_service_start() + + def _stop(self): + android_billing_service_stop() + + def buy(self, sku): + cdef char *j_sku = sku + android_billing_buy(j_sku) + + def get_purchased_items(self): + cdef char *items = NULL + cdef bytes pitem + items = android_billing_get_purchased_items() + if items == NULL: + return [] + pitems = items + ret = {} + for item in pitems.split('\n'): + if not item: + continue + sku, qt = item.split(',') + ret[sku] = {'qt': int(qt)} + return ret + + def check(self, *largs): + cdef char *message + cdef bytes pymessage + + while True: + message = android_billing_get_pending_message() + if message == NULL: + break + pymessage = message + self._handle_message(pymessage) + + if self.purchased_items is None: + self._check_new_items() + + def _handle_message(self, message): + action, data = message.split('|', 1) + #print "HANDLE MESSAGE-----", (action, data) + + if action == 'billingSupported': + tp, value = data.split('|') + value = True if value == '1' else False + self.callback(BillingService.BILLING_ACTION_SUPPORTED, tp, value) + + elif action == 'requestPurchaseResponse': + self._check_new_items() + + elif action == 'purchaseStateChange': + self._check_new_items() + + elif action == 'restoreTransaction': + self._check_new_items() + + def _check_new_items(self): + items = self.get_purchased_items() + if self.purchased_items != items: + self.purchased_items = items + self.callback(BillingService.BILLING_ACTION_ITEMSCHANGED, self.purchased_items) diff --git a/p4a/pythonforandroidold/recipes/android/src/android/_android_billing_jni.c b/p4a/pythonforandroidold/recipes/android/src/android/_android_billing_jni.c new file mode 100644 index 0000000..d438df3 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/android/src/android/_android_billing_jni.c @@ -0,0 +1,120 @@ +#include +#include +#include +#include +#include + +#include "config.h" + +#define aassert(x) { if (!x) { __android_log_print(ANDROID_LOG_ERROR, "android_jni", "Assertion failed. %s:%d", __FILE__, __LINE__); abort(); }} +#define PUSH_FRAME { (*env)->PushLocalFrame(env, 16); } +#define POP_FRAME { (*env)->PopLocalFrame(env, NULL); } + +void android_billing_service_start() { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, JNI_NAMESPACE "/PythonActivity"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "billingServiceStart", "()V"); + aassert(mid); + } + + PUSH_FRAME; + (*env)->CallStaticVoidMethod(env, cls, mid); + POP_FRAME; +} + +void android_billing_service_stop() { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, JNI_NAMESPACE "/PythonActivity"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "billingServiceStop", "()V"); + aassert(mid); + } + + PUSH_FRAME; + (*env)->CallStaticVoidMethod(env, cls, mid); + POP_FRAME; +} + +void android_billing_buy(char *sku) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, JNI_NAMESPACE "/PythonActivity"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "billingBuy", "(Ljava/lang/String;)V"); + aassert(mid); + } + + PUSH_FRAME; + + (*env)->CallStaticVoidMethod( + env, cls, mid, + (*env)->NewStringUTF(env, sku) + ); + + POP_FRAME; +} + +char *android_billing_get_purchased_items() { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + jobject jreading; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, JNI_NAMESPACE "/PythonActivity"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "billingGetPurchasedItems", "()Ljava/lang/String;"); + aassert(mid); + } + + PUSH_FRAME; + jreading = (*env)->CallStaticObjectMethod(env, cls, mid); + const char * reading = (*env)->GetStringUTFChars(env, jreading, 0); + POP_FRAME; + + return reading; +} + +char *android_billing_get_pending_message() { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + jobject jreading; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, JNI_NAMESPACE "/PythonActivity"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "billingGetPendingMessage", "()Ljava/lang/String;"); + aassert(mid); + } + + PUSH_FRAME; + jreading = (*env)->CallStaticObjectMethod(env, cls, mid); + const char * reading = (*env)->GetStringUTFChars(env, jreading, 0); + POP_FRAME; + + return reading; +} + diff --git a/p4a/pythonforandroidold/recipes/android/src/android/_android_jni.c b/p4a/pythonforandroidold/recipes/android/src/android/_android_jni.c new file mode 100644 index 0000000..8eee770 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/android/src/android/_android_jni.c @@ -0,0 +1,358 @@ +#include +#include +#include +#include +#include + +#include "config.h" + +#define aassert(x) { if (!x) { __android_log_print(ANDROID_LOG_ERROR, "android_jni", "Assertion failed. %s:%d", __FILE__, __LINE__); abort(); }} +#define PUSH_FRAME { (*env)->PushLocalFrame(env, 16); } +#define POP_FRAME { (*env)->PopLocalFrame(env, NULL); } + +void android_vibrate(double seconds) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/Hardware"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "vibrate", "(D)V"); + aassert(mid); + } + + (*env)->CallStaticVoidMethod( + env, cls, mid, + (jdouble) seconds); +} + +void android_accelerometer_enable(int enable) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/Hardware"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "accelerometerEnable", "(Z)V"); + aassert(mid); + } + + (*env)->CallStaticVoidMethod( + env, cls, mid, + (jboolean) enable); +} + +void android_wifi_scanner_enable(void){ + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/Hardware"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "enableWifiScanner", "()V"); + aassert(mid); + } + + (*env)->CallStaticVoidMethod(env, cls, mid); +} + + +char * android_wifi_scan() { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + jobject jreading; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/Hardware"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "scanWifi", "()Ljava/lang/String;"); + aassert(mid); + } + + PUSH_FRAME; + jreading = (*env)->CallStaticObjectMethod(env, cls, mid); + const char * reading = (*env)->GetStringUTFChars(env, jreading, 0); + POP_FRAME; + + return reading; +} + +void android_accelerometer_reading(float *values) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + jobject jvalues; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/Hardware"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "accelerometerReading", "()[F"); + aassert(mid); + } + + PUSH_FRAME; + + jvalues = (*env)->CallStaticObjectMethod(env, cls, mid); + (*env)->GetFloatArrayRegion(env, jvalues, 0, 3, values); + + POP_FRAME; +} + +int android_get_dpi(void) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/Hardware"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "getDPI", "()I"); + aassert(mid); + } + + return (*env)->CallStaticIntMethod(env, cls, mid); +} + +void android_show_keyboard(int input_type) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/Hardware"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "showKeyboard", "(I)V"); + aassert(mid); + } + + (*env)->CallStaticVoidMethod(env, cls, mid, (jint) input_type); +} + +void android_hide_keyboard(void) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/Hardware"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "hideKeyboard", "()V"); + aassert(mid); + } + + (*env)->CallStaticVoidMethod(env, cls, mid); +} + +char* BUILD_MANUFACTURER = NULL; +char* BUILD_MODEL = NULL; +char* BUILD_PRODUCT = NULL; +char* BUILD_VERSION_RELEASE = NULL; + +void android_get_buildinfo() { + static JNIEnv *env = NULL; + + if (env == NULL) { + jclass *cls = NULL; + jfieldID fid; + jstring sval; + + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + + cls = (*env)->FindClass(env, "android/os/Build"); + + fid = (*env)->GetStaticFieldID(env, cls, "MANUFACTURER", "Ljava/lang/String;"); + sval = (jstring) (*env)->GetStaticObjectField(env, cls, fid); + BUILD_MANUFACTURER = (*env)->GetStringUTFChars(env, sval, 0); + + fid = (*env)->GetStaticFieldID(env, cls, "MODEL", "Ljava/lang/String;"); + sval = (jstring) (*env)->GetStaticObjectField(env, cls, fid); + BUILD_MODEL = (*env)->GetStringUTFChars(env, sval, 0); + + fid = (*env)->GetStaticFieldID(env, cls, "PRODUCT", "Ljava/lang/String;"); + sval = (jstring) (*env)->GetStaticObjectField(env, cls, fid); + BUILD_PRODUCT = (*env)->GetStringUTFChars(env, sval, 0); + + cls = (*env)->FindClass(env, "android/os/Build$VERSION"); + + fid = (*env)->GetStaticFieldID(env, cls, "RELEASE", "Ljava/lang/String;"); + sval = (jstring) (*env)->GetStaticObjectField(env, cls, fid); + BUILD_VERSION_RELEASE = (*env)->GetStringUTFChars(env, sval, 0); + } +} + +#if IS_PYGAME +void android_activate_input(void) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/SDLSurfaceView"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "activateInput", "()V"); + aassert(mid); + } + + (*env)->CallStaticVoidMethod(env, cls, mid); +} + +int android_checkstop(void) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/SDLSurfaceView"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "checkStop", "()I"); + aassert(mid); + } + + return (*env)->CallStaticIntMethod(env, cls, mid); +} + +void android_ackstop(void) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/SDLSurfaceView"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "ackStop", "()I"); + aassert(mid); + } + + (*env)->CallStaticIntMethod(env, cls, mid); +} + +void android_action_send(char *mimeType, char *filename, char *subject, char *text, char *chooser_title) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/Action"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "send", + "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V"); + aassert(mid); + } + + jstring j_mimeType = (*env)->NewStringUTF(env, mimeType); + jstring j_filename = NULL; + jstring j_subject = NULL; + jstring j_text = NULL; + jstring j_chooser_title = NULL; + if ( filename != NULL ) + j_filename = (*env)->NewStringUTF(env, filename); + if ( subject != NULL ) + j_subject = (*env)->NewStringUTF(env, subject); + if ( text != NULL ) + j_text = (*env)->NewStringUTF(env, text); + if ( chooser_title != NULL ) + j_chooser_title = (*env)->NewStringUTF(env, text); + + (*env)->CallStaticVoidMethod( + env, cls, mid, + j_mimeType, j_filename, j_subject, j_text, + j_chooser_title); +} + +void android_open_url(char *url) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/SDLSurfaceView"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "openUrl", "(Ljava/lang/String;)V"); + aassert(mid); + } + + PUSH_FRAME; + + (*env)->CallStaticVoidMethod( + env, cls, mid, + (*env)->NewStringUTF(env, url) + ); + + POP_FRAME; +} +#endif // IS_PYGAME + +void android_start_service(char *title, char *description, char *arg) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, JNI_NAMESPACE "/PythonActivity"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "start_service", + "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V"); + aassert(mid); + } + + jstring j_title = NULL; + jstring j_description = NULL; + jstring j_arg = NULL; + if ( title != 0 ) + j_title = (*env)->NewStringUTF(env, title); + if ( description != 0 ) + j_description = (*env)->NewStringUTF(env, description); + if ( arg != 0 ) + j_arg = (*env)->NewStringUTF(env, arg); + + (*env)->CallStaticVoidMethod(env, cls, mid, j_title, j_description, j_arg); +} + +void android_stop_service() { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + cls = (*env)->FindClass(env, JNI_NAMESPACE "/PythonActivity"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "stop_service", "()V"); + aassert(mid); + } + + (*env)->CallStaticVoidMethod(env, cls, mid); +} diff --git a/p4a/pythonforandroidold/recipes/android/src/android/_android_sound.pyx b/p4a/pythonforandroidold/recipes/android/src/android/_android_sound.pyx new file mode 100644 index 0000000..9a486eb --- /dev/null +++ b/p4a/pythonforandroidold/recipes/android/src/android/_android_sound.pyx @@ -0,0 +1,125 @@ +cdef extern void android_sound_queue(int, char *, char *, long long, long long) +cdef extern void android_sound_play(int, char *, char *, long long, long long) +cdef extern void android_sound_stop(int) +cdef extern void android_sound_seek(int, float) +cdef extern void android_sound_dequeue(int) +cdef extern void android_sound_playing_name(int, char *, int) +cdef extern void android_sound_pause(int) +cdef extern void android_sound_unpause(int) + +cdef extern void android_sound_set_volume(int, float) +cdef extern void android_sound_set_secondary_volume(int, float) +cdef extern void android_sound_set_pan(int, float) + +cdef extern int android_sound_queue_depth(int) +cdef extern int android_sound_get_pos(int) +cdef extern int android_sound_get_length(int) + +channels = set() +volumes = { } + +def queue(channel, file, name, fadein=0, tight=False): + + channels.add(channel) + + real_fn = file.name + base = getattr(file, "base", -1) + length = getattr(file, "length", -1) + + android_sound_queue(channel, name, real_fn, base, length) + +def play(channel, file, name, paused=False, fadein=0, tight=False): + + channels.add(channel) + + real_fn = file.name + base = getattr(file, "base", -1) + length = getattr(file, "length", -1) + + android_sound_play(channel, name, real_fn, base, length) + +def seek(channel, position): + android_sound_seek(channel, position) + +def stop(channel): + android_sound_stop(channel) + +def dequeue(channel, even_tight=False): + android_sound_dequeue(channel) + +def queue_depth(channel): + return android_sound_queue_depth(channel) + +def playing_name(channel): + cdef char buf[1024] + + android_sound_playing_name(channel, buf, 1024) + + rv = buf + if not len(rv): + return None + return rv + +def pause(channel): + android_sound_pause(channel) + return + +def unpause(channel): + android_sound_unpause(channel) + return + +def unpause_all(): + for i in channels: + unpause(i) + +def pause_all(): + for i in channels: + pause(i) + +def fadeout(channel, ms): + stop(channel) + +def busy(channel): + return playing_name(channel) != None + +def get_pos(channel): + return android_sound_get_pos(channel) + +def get_length(channel): + return android_sound_get_length(channel) + +def set_volume(channel, volume): + android_sound_set_volume(channel, volume) + volumes[channel] = volume + +def set_secondary_volume(channel, volume): + android_sound_set_secondary_volume(channel, volume) + +def set_pan(channel, pan): + android_sound_set_pan(channel, pan) + +def set_end_event(channel, event): + return + +def get_volume(channel): + return volumes.get(channel, 1.0) + +def init(freq, stereo, samples, status=False): + return + +def quit(): + for i in channels: + stop(i) + +def periodic(): + return + +def alloc_event(surf): + return + +def refresh_event(): + return + +def check_version(version): + return + diff --git a/p4a/pythonforandroidold/recipes/android/src/android/_android_sound_jni.c b/p4a/pythonforandroidold/recipes/android/src/android/_android_sound_jni.c new file mode 100644 index 0000000..ee6c60b --- /dev/null +++ b/p4a/pythonforandroidold/recipes/android/src/android/_android_sound_jni.c @@ -0,0 +1,308 @@ +#include +#include +#include +#include + +JNIEnv *SDL_ANDROID_GetJNIEnv(); + +#define aassert(x) { if (!x) { __android_log_print(ANDROID_LOG_ERROR, "android_sound_jni", "Assertion failed. %s:%d", __FILE__, __LINE__); abort(); }} +#define PUSH_FRAME { (*env)->PushLocalFrame(env, 16); } +#define POP_FRAME { (*env)->PopLocalFrame(env, NULL); } + + +void android_sound_queue(int channel, char *filename, char *real_fn, long long base, long long length) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/RenPySound"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "queue", "(ILjava/lang/String;Ljava/lang/String;JJ)V"); + aassert(mid); + } + + PUSH_FRAME; + + (*env)->CallStaticVoidMethod( + env, cls, mid, + channel, + (*env)->NewStringUTF(env, filename), + (*env)->NewStringUTF(env, real_fn), + (jlong) base, + (jlong) length); + + POP_FRAME; +} + +void android_sound_play(int channel, char *filename, char *real_fn, long long base, long long length) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/RenPySound"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "play", "(ILjava/lang/String;Ljava/lang/String;JJ)V"); + aassert(mid); + } + + PUSH_FRAME; + + (*env)->CallStaticVoidMethod( + env, cls, mid, + channel, + (*env)->NewStringUTF(env, filename), + (*env)->NewStringUTF(env, real_fn), + (jlong) base, + (jlong) length); + + POP_FRAME; +} + +void android_sound_seek(int channel, float position){ + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/RenPySound"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "seek", "(IF)V"); + aassert(mid); + } + + (*env)->CallStaticVoidMethod( + env, cls, mid, + channel, + (jfloat) position); +} + +void android_sound_stop(int channel) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/RenPySound"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "stop", "(I)V"); + aassert(mid); + } + + (*env)->CallStaticVoidMethod( + env, cls, mid, + channel); +} + +void android_sound_dequeue(int channel) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/RenPySound"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "dequeue", "(I)V"); + aassert(mid); + } + + (*env)->CallStaticVoidMethod( + env, cls, mid, + channel); +} + +int android_sound_queue_depth(int channel) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/RenPySound"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "queue_depth", "(I)I"); + aassert(mid); + } + + (*env)->CallStaticIntMethod( + env, cls, mid, + channel); +} + +void android_sound_playing_name(int channel, char *buf, int buflen) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + jobject s = NULL; + char *jbuf; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/RenPySound"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "playing_name", "(I)Ljava/lang/String;"); + aassert(mid); + } + + PUSH_FRAME; + + s = (*env)->CallStaticObjectMethod( + env, cls, mid, + channel); + + jbuf = (*env)->GetStringUTFChars(env, s, NULL); + strncpy(buf, jbuf, buflen); + (*env)->ReleaseStringUTFChars(env, s, jbuf); + + POP_FRAME; +} + +void android_sound_set_volume(int channel, float value) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/RenPySound"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "set_volume", "(IF)V"); + aassert(mid); + } + + (*env)->CallStaticVoidMethod( + env, cls, mid, + channel, + (jfloat) value); +} + +void android_sound_set_secondary_volume(int channel, float value) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/RenPySound"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "set_secondary_volume", "(IF)V"); + aassert(mid); + } + + (*env)->CallStaticVoidMethod( + env, cls, mid, + channel, + (jfloat) value); +} + +void android_sound_set_pan(int channel, float value) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/RenPySound"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "set_pan", "(IF)V"); + aassert(mid); + } + + (*env)->CallStaticVoidMethod( + env, cls, mid, + channel, + (jfloat) value); +} + +void android_sound_pause(int channel) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/RenPySound"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "pause", "(I)V"); + aassert(mid); + } + + (*env)->CallStaticVoidMethod( + env, cls, mid, + channel); +} + +void android_sound_unpause(int channel) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/RenPySound"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "unpause", "(I)V"); + aassert(mid); + } + + (*env)->CallStaticVoidMethod( + env, cls, mid, + channel); +} + +int android_sound_get_pos(int channel) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/RenPySound"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "get_pos", "(I)I"); + aassert(mid); + } + + return (*env)->CallStaticIntMethod( + env, cls, mid, + channel); +} + +int android_sound_get_length(int channel) { + static JNIEnv *env = NULL; + static jclass *cls = NULL; + static jmethodID mid = NULL; + + if (env == NULL) { + env = SDL_ANDROID_GetJNIEnv(); + aassert(env); + cls = (*env)->FindClass(env, "org/renpy/android/RenPySound"); + aassert(cls); + mid = (*env)->GetStaticMethodID(env, cls, "get_length", "(I)I"); + aassert(mid); + } + + return (*env)->CallStaticIntMethod( + env, cls, mid, + channel); +} diff --git a/p4a/pythonforandroidold/recipes/android/src/android/activity.py b/p4a/pythonforandroidold/recipes/android/src/android/activity.py new file mode 100644 index 0000000..cafbbda --- /dev/null +++ b/p4a/pythonforandroidold/recipes/android/src/android/activity.py @@ -0,0 +1,63 @@ +from jnius import PythonJavaClass, autoclass, java_method +from android.config import JAVA_NAMESPACE, JNI_NAMESPACE + +_activity = autoclass(JAVA_NAMESPACE + '.PythonActivity').mActivity + +_callbacks = { + 'on_new_intent': [], + 'on_activity_result': [], +} + + +class NewIntentListener(PythonJavaClass): + __javainterfaces__ = [JNI_NAMESPACE + '/PythonActivity$NewIntentListener'] + __javacontext__ = 'app' + + def __init__(self, callback, **kwargs): + super(NewIntentListener, self).__init__(**kwargs) + self.callback = callback + + @java_method('(Landroid/content/Intent;)V') + def onNewIntent(self, intent): + self.callback(intent) + + +class ActivityResultListener(PythonJavaClass): + __javainterfaces__ = [JNI_NAMESPACE + '/PythonActivity$ActivityResultListener'] + __javacontext__ = 'app' + + def __init__(self, callback): + super(ActivityResultListener, self).__init__() + self.callback = callback + + @java_method('(IILandroid/content/Intent;)V') + def onActivityResult(self, requestCode, resultCode, intent): + self.callback(requestCode, resultCode, intent) + + +def bind(**kwargs): + for event, callback in kwargs.items(): + if event not in _callbacks: + raise Exception('Unknown {!r} event'.format(event)) + elif event == 'on_new_intent': + listener = NewIntentListener(callback) + _activity.registerNewIntentListener(listener) + _callbacks[event].append(listener) + elif event == 'on_activity_result': + listener = ActivityResultListener(callback) + _activity.registerActivityResultListener(listener) + _callbacks[event].append(listener) + + +def unbind(**kwargs): + for event, callback in kwargs.items(): + if event not in _callbacks: + raise Exception('Unknown {!r} event'.format(event)) + else: + for listener in _callbacks[event][:]: + if listener.callback == callback: + _callbacks[event].remove(listener) + if event == 'on_new_intent': + _activity.unregisterNewIntentListener(listener) + elif event == 'on_activity_result': + _activity.unregisterActivityResultListener(listener) diff --git a/p4a/pythonforandroidold/recipes/android/src/android/billing.py b/p4a/pythonforandroidold/recipes/android/src/android/billing.py new file mode 100644 index 0000000..0ea1008 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/android/src/android/billing.py @@ -0,0 +1,5 @@ +''' +Android Billing API +=================== + +''' diff --git a/p4a/pythonforandroidold/recipes/android/src/android/broadcast.py b/p4a/pythonforandroidold/recipes/android/src/android/broadcast.py new file mode 100644 index 0000000..cb34cd9 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/android/src/android/broadcast.py @@ -0,0 +1,78 @@ +# ------------------------------------------------------------------- +# Broadcast receiver bridge + +from jnius import autoclass, PythonJavaClass, java_method +from android.config import JAVA_NAMESPACE, JNI_NAMESPACE + + +class BroadcastReceiver(object): + + class Callback(PythonJavaClass): + __javainterfaces__ = [JNI_NAMESPACE + '/GenericBroadcastReceiverCallback'] + __javacontext__ = 'app' + + def __init__(self, callback, *args, **kwargs): + self.callback = callback + PythonJavaClass.__init__(self, *args, **kwargs) + + @java_method('(Landroid/content/Context;Landroid/content/Intent;)V') + def onReceive(self, context, intent): + self.callback(context, intent) + + def __init__(self, callback, actions=None, categories=None): + super(BroadcastReceiver, self).__init__() + self.callback = callback + + if not actions and not categories: + raise Exception('You need to define at least actions or categories') + + def _expand_partial_name(partial_name): + if '.' in partial_name: + return partial_name # Its actually a full dotted name + else: + name = 'ACTION_{}'.format(partial_name.upper()) + if not hasattr(Intent, name): + raise Exception('The intent {} doesnt exist'.format(name)) + return getattr(Intent, name) + + # resolve actions/categories first + Intent = autoclass('android.content.Intent') + resolved_actions = [_expand_partial_name(x) for x in actions or []] + resolved_categories = [_expand_partial_name(x) for x in categories or []] + + # resolve android API + GenericBroadcastReceiver = autoclass(JAVA_NAMESPACE + '.GenericBroadcastReceiver') + IntentFilter = autoclass('android.content.IntentFilter') + HandlerThread = autoclass('android.os.HandlerThread') + + # create a thread for handling events from the receiver + self.handlerthread = HandlerThread('handlerthread') + + # create a listener + self.listener = BroadcastReceiver.Callback(self.callback) + self.receiver = GenericBroadcastReceiver(self.listener) + self.receiver_filter = IntentFilter() + for x in resolved_actions: + self.receiver_filter.addAction(x) + for x in resolved_categories: + self.receiver_filter.addCategory(x) + + def start(self): + Handler = autoclass('android.os.Handler') + self.handlerthread.start() + self.handler = Handler(self.handlerthread.getLooper()) + self.context.registerReceiver( + self.receiver, self.receiver_filter, None, self.handler) + + def stop(self): + self.context.unregisterReceiver(self.receiver) + self.handlerthread.quit() + + @property + def context(self): + from os import environ + if 'PYTHON_SERVICE_ARGUMENT' in environ: + PythonService = autoclass(JAVA_NAMESPACE + '.PythonService') + return PythonService.mService + PythonActivity = autoclass(JAVA_NAMESPACE + '.PythonActivity') + return PythonActivity.mActivity diff --git a/p4a/pythonforandroidold/recipes/android/src/android/loadingscreen.py b/p4a/pythonforandroidold/recipes/android/src/android/loadingscreen.py new file mode 100644 index 0000000..1dc1b67 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/android/src/android/loadingscreen.py @@ -0,0 +1,7 @@ + +from jnius import autoclass + + +def hide_loading_screen(): + python_activity = autoclass('org.kivy.android.PythonActivity') + python_activity.removeLoadingScreen() diff --git a/p4a/pythonforandroidold/recipes/android/src/android/mixer.py b/p4a/pythonforandroidold/recipes/android/src/android/mixer.py new file mode 100644 index 0000000..334f696 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/android/src/android/mixer.py @@ -0,0 +1,322 @@ +# This module is, as much a possible, a clone of the pygame +# mixer api. + +import android._android_sound as sound +import time +import threading +import os + +condition = threading.Condition() + + +def periodic(): + for i in range(0, num_channels): + if i in channels: + channels[i].periodic() + + +num_channels = 8 +reserved_channels = 0 + + +def init(frequency=22050, size=-16, channels=2, buffer=4096): + return None + + +def pre_init(frequency=22050, size=-16, channels=2, buffersize=4096): + return None + + +def quit(): + stop() + return None + + +def stop(): + for i in range(0, num_channels): + sound.stop(i) + + +def pause(): + for i in range(0, num_channels): + sound.pause(i) + + +def unpause(): + for i in range(0, num_channels): + sound.unpause(i) + + +def get_busy(): + for i in range(0, num_channels): + if sound.busy(i): + return True + + return False + + +def fadeout(time): + # Fadeout doesn't work - it just immediately stops playback. + stop() + + +# A map from channel number to Channel object. +channels = {} + + +def set_num_channels(count): + global num_channels + num_channels = count + + +def get_num_channels(count): + return num_channels + + +def set_reserved(count): + global reserved_channels + reserved_channels = count + + +def find_channel(force=False): + + busy = [] + + for i in range(reserved_channels, num_channels): + c = Channel(i) + + if not c.get_busy(): + return c + + busy.append(c) + + if not force: + return None + + return min(busy, key=lambda x: x.play_time) + + +class ChannelImpl(object): + + def __init__(self, id): + self.id = id + self.loop = None + self.queued = None + + self.play_time = time.time() + + def periodic(self): + qd = sound.queue_depth(self.id) + + if qd < 2: + self.queued = None + + if self.loop is not None and sound.queue_depth(self.id) < 2: + self.queue(self.loop, loops=1) + + def play(self, s, loops=0, maxtime=0, fade_ms=0): + if loops: + self.loop = s + + sound.play(self.id, s.file, s.serial) + + self.play_time = time.time() + + with condition: + condition.notify() + + def seek(self, position): + sound.seek(self.id, position) + + def stop(self): + self.loop = None + sound.stop(self.id) + + def pause(self): + sound.pause(self.id) + + def unpause(self): + sound.pause(self.id) + + def fadeout(self, time): + # No fadeout + self.stop() + + def set_volume(self, left, right=None): + sound.set_volume(self.id, left) + + def get_volume(self): + return sound.get_volume(self.id) + + def get_busy(self): + return sound.busy(self.id) + + def get_sound(self): + is_busy = sound.busy(self.id) + if not is_busy: + return + serial = sound.playing_name(self.id) + if not serial: + return + return sounds.get(serial, None) + + def queue(self, s): + self.loop = None + self.queued = s + + sound.queue(self.id, s.what, s.serial) + + with condition: + condition.notify() + + def get_queue(self): + return self.queued + + def get_pos(self): + return sound.get_pos(self.id)/1000. + + def get_length(self): + return sound.get_length(self.id)/1000. + + +def Channel(n): + """ + Gets the channel with the given number. + """ + + rv = channels.get(n, None) + if rv is None: + rv = ChannelImpl(n) + channels[n] = rv + + return rv + + +sound_serial = 0 +sounds = {} + + +class Sound(object): + + def __init__(self, what): + + # Doesn't support buffers. + + global sound_serial + + self._channel = None + self._volume = 1. + self.serial = str(sound_serial) + sound_serial += 1 + + if isinstance(what, file): # noqa F821 + self.file = what + else: + self.file = file(os.path.abspath(what), "rb") # noqa F821 + + sounds[self.serial] = self + + def play(self, loops=0, maxtime=0, fade_ms=0): + # avoid new play if the sound is already playing + # -> same behavior as standard pygame. + if self._channel is not None: + if self._channel.get_sound() is self: + return + self._channel = channel = find_channel(True) + channel.set_volume(self._volume) + channel.play(self, loops=loops) + return channel + + def stop(self): + for i in range(0, num_channels): + if Channel(i).get_sound() is self: + Channel(i).stop() + + def fadeout(self, time): + self.stop() + + def set_volume(self, left, right=None): + self._volume = left + if self._channel: + if self._channel.get_sound() is self: + self._channel.set_volume(self._volume) + + def get_volume(self): + return self._volume + + def get_num_channels(self): + rv = 0 + + for i in range(0, num_channels): + if Channel(i).get_sound() is self: + rv += 1 + + return rv + + def get_length(self): + return 1.0 + + +music_channel = Channel(256) +music_sound = None + + +class music(object): + + @staticmethod + def load(filename): + + music_channel.stop() + + global music_sound + music_sound = Sound(filename) + + @staticmethod + def play(loops=0, start=0.0): + # No start. + + music_channel.play(music_sound, loops=loops) + + @staticmethod + def rewind(): + music_channel.play(music_sound) + + @staticmethod + def seek(position): + music_channel.seek(position) + + @staticmethod + def stop(): + music_channel.stop() + + @staticmethod + def pause(): + music_channel.pause() + + @staticmethod + def unpause(): + music_channel.unpause() + + @staticmethod + def fadeout(time): + music_channel.fadeout(time) + + @staticmethod + def set_volume(value): + music_channel.set_volume(value) + + @staticmethod + def get_volume(): + return music_channel.get_volume() + + @staticmethod + def get_busy(): + return music_channel.get_busy() + + @staticmethod + def get_pos(): + return music_channel.get_pos() + + @staticmethod + def queue(filename): + return music_channel.queue(Sound(filename)) diff --git a/p4a/pythonforandroidold/recipes/android/src/android/permissions.py b/p4a/pythonforandroidold/recipes/android/src/android/permissions.py new file mode 100644 index 0000000..6c2d384 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/android/src/android/permissions.py @@ -0,0 +1,438 @@ + +try: + from jnius import autoclass +except ImportError: + # To allow importing by build/manifest-creating code without + # pyjnius being present: + def autoclass(item): + raise RuntimeError("pyjnius not available") + + +class Permission: + ACCEPT_HANDOVER = "android.permission.ACCEPT_HANDOVER" + ACCESS_COARSE_LOCATION = "android.permission.ACCESS_COARSE_LOCATION" + ACCESS_LOCATION_EXTRA_COMMANDS = ( + "android.permission.ACCESS_LOCATION_EXTRA_COMMANDS" + ) + ACCESS_NETWORK_STATE = "android.permission.ACCESS_NETWORK_STATE" + ACCESS_NOTIFICATION_POLICY = ( + "android.permission.ACCESS_NOTIFICATION_POLICY" + ) + ACCESS_WIFI_STATE = "android.permission.ACCESS_WIFI_STATE" + ADD_VOICEMAIL = "com.android.voicemail.permission.ADD_VOICEMAIL" + ANSWER_PHONE_CALLS = "android.permission.ANSWER_PHONE_CALLS" + BATTERY_STATS = "android.permission.BATTERY_STATS" + BIND_ACCESSIBILITY_SERVICE = ( + "android.permission.BIND_ACCESSIBILITY_SERVICE" + ) + BIND_AUTOFILL_SERVICE = "android.permission.BIND_AUTOFILL_SERVICE" + BIND_CARRIER_MESSAGING_SERVICE = ( # note: deprecated in api 23+ + "android.permission.BIND_CARRIER_MESSAGING_SERVICE" + ) + BIND_CARRIER_SERVICES = ( # replaces BIND_CARRIER_MESSAGING_SERVICE + "android.permission.BIND_CARRIER_SERVICES" + ) + BIND_CHOOSER_TARGET_SERVICE = ( + "android.permission.BIND_CHOOSER_TARGET_SERVICE" + ) + BIND_CONDITION_PROVIDER_SERVICE = ( + "android.permission.BIND_CONDITION_PROVIDER_SERVICE" + ) + BIND_DEVICE_ADMIN = "android.permission.BIND_DEVICE_ADMIN" + BIND_DREAM_SERVICE = "android.permission.BIND_DREAM_SERVICE" + BIND_INCALL_SERVICE = "android.permission.BIND_INCALL_SERVICE" + BIND_INPUT_METHOD = ( + "android.permission.BIND_INPUT_METHOD" + ) + BIND_MIDI_DEVICE_SERVICE = ( + "android.permission.BIND_MIDI_DEVICE_SERVICE" + ) + BIND_NFC_SERVICE = ( + "android.permission.BIND_NFC_SERVICE" + ) + BIND_NOTIFICATION_LISTENER_SERVICE = ( + "android.permission.BIND_NOTIFICATION_LISTENER_SERVICE" + ) + BIND_PRINT_SERVICE = ( + "android.permission.BIND_PRINT_SERVICE" + ) + BIND_QUICK_SETTINGS_TILE = ( + "android.permission.BIND_QUICK_SETTINGS_TILE" + ) + BIND_REMOTEVIEWS = ( + "android.permission.BIND_REMOTEVIEWS" + ) + BIND_SCREENING_SERVICE = ( + "android.permission.BIND_SCREENING_SERVICE" + ) + BIND_TELECOM_CONNECTION_SERVICE = ( + "android.permission.BIND_TELECOM_CONNECTION_SERVICE" + ) + BIND_TEXT_SERVICE = ( + "android.permission.BIND_TEXT_SERVICE" + ) + BIND_TV_INPUT = ( + "android.permission.BIND_TV_INPUT" + ) + BIND_VISUAL_VOICEMAIL_SERVICE = ( + "android.permission.BIND_VISUAL_VOICEMAIL_SERVICE" + ) + BIND_VOICE_INTERACTION = ( + "android.permission.BIND_VOICE_INTERACTION" + ) + BIND_VPN_SERVICE = ( + "android.permission.BIND_VPN_SERVICE" + ) + BIND_VR_LISTENER_SERVICE = ( + "android.permission.BIND_VR_LISTENER_SERVICE" + ) + BIND_WALLPAPER = ( + "android.permission.BIND_WALLPAPER" + ) + BLUETOOTH = ( + "android.permission.BLUETOOTH" + ) + BLUETOOTH_ADMIN = ( + "android.permission.BLUETOOTH_ADMIN" + ) + BODY_SENSORS = ( + "android.permission.BODY_SENSORS" + ) + BROADCAST_PACKAGE_REMOVED = ( + "android.permission.BROADCAST_PACKAGE_REMOVED" + ) + BROADCAST_STICKY = ( + "android.permission.BROADCAST_STICKY" + ) + CALL_PHONE = ( + "android.permission.CALL_PHONE" + ) + CALL_PRIVILEGED = ( + "android.permission.CALL_PRIVILEGED" + ) + CAMERA = ( + "android.permission.CAMERA" + ) + CAPTURE_AUDIO_OUTPUT = ( + "android.permission.CAPTURE_AUDIO_OUTPUT" + ) + CAPTURE_SECURE_VIDEO_OUTPUT = ( + "android.permission.CAPTURE_SECURE_VIDEO_OUTPUT" + ) + CAPTURE_VIDEO_OUTPUT = ( + "android.permission.CAPTURE_VIDEO_OUTPUT" + ) + CHANGE_COMPONENT_ENABLED_STATE = ( + "android.permission.CHANGE_COMPONENT_ENABLED_STATE" + ) + CHANGE_CONFIGURATION = ( + "android.permission.CHANGE_CONFIGURATION" + ) + CHANGE_NETWORK_STATE = ( + "android.permission.CHANGE_NETWORK_STATE" + ) + CHANGE_WIFI_MULTICAST_STATE = ( + "android.permission.CHANGE_WIFI_MULTICAST_STATE" + ) + CHANGE_WIFI_STATE = ( + "android.permission.CHANGE_WIFI_STATE" + ) + CLEAR_APP_CACHE = ( + "android.permission.CLEAR_APP_CACHE" + ) + CONTROL_LOCATION_UPDATES = ( + "android.permission.CONTROL_LOCATION_UPDATES" + ) + DELETE_CACHE_FILES = ( + "android.permission.DELETE_CACHE_FILES" + ) + DELETE_PACKAGES = ( + "android.permission.DELETE_PACKAGES" + ) + DIAGNOSTIC = ( + "android.permission.DIAGNOSTIC" + ) + DISABLE_KEYGUARD = ( + "android.permission.DISABLE_KEYGUARD" + ) + DUMP = ( + "android.permission.DUMP" + ) + EXPAND_STATUS_BAR = ( + "android.permission.EXPAND_STATUS_BAR" + ) + FACTORY_TEST = ( + "android.permission.FACTORY_TEST" + ) + FOREGROUND_SERVICE = ( + "android.permission.FOREGROUND_SERVICE" + ) + GET_ACCOUNTS = ( + "android.permission.GET_ACCOUNTS" + ) + GET_ACCOUNTS_PRIVILEGED = ( + "android.permission.GET_ACCOUNTS_PRIVILEGED" + ) + GET_PACKAGE_SIZE = ( + "android.permission.GET_PACKAGE_SIZE" + ) + GET_TASKS = ( + "android.permission.GET_TASKS" + ) + GLOBAL_SEARCH = ( + "android.permission.GLOBAL_SEARCH" + ) + INSTALL_LOCATION_PROVIDER = ( + "android.permission.INSTALL_LOCATION_PROVIDER" + ) + INSTALL_PACKAGES = ( + "android.permission.INSTALL_PACKAGES" + ) + INSTALL_SHORTCUT = ( + "com.android.launcher.permission.INSTALL_SHORTCUT" + ) + INSTANT_APP_FOREGROUND_SERVICE = ( + "android.permission.INSTANT_APP_FOREGROUND_SERVICE" + ) + INTERNET = ( + "android.permission.INTERNET" + ) + KILL_BACKGROUND_PROCESSES = ( + "android.permission.KILL_BACKGROUND_PROCESSES" + ) + LOCATION_HARDWARE = ( + "android.permission.LOCATION_HARDWARE" + ) + MANAGE_DOCUMENTS = ( + "android.permission.MANAGE_DOCUMENTS" + ) + MANAGE_OWN_CALLS = ( + "android.permission.MANAGE_OWN_CALLS" + ) + MASTER_CLEAR = ( + "android.permission.MASTER_CLEAR" + ) + MEDIA_CONTENT_CONTROL = ( + "android.permission.MEDIA_CONTENT_CONTROL" + ) + MODIFY_AUDIO_SETTINGS = ( + "android.permission.MODIFY_AUDIO_SETTINGS" + ) + MODIFY_PHONE_STATE = ( + "android.permission.MODIFY_PHONE_STATE" + ) + MOUNT_FORMAT_FILESYSTEMS = ( + "android.permission.MOUNT_FORMAT_FILESYSTEMS" + ) + MOUNT_UNMOUNT_FILESYSTEMS = ( + "android.permission.MOUNT_UNMOUNT_FILESYSTEMS" + ) + NFC = ( + "android.permission.NFC" + ) + NFC_TRANSACTION_EVENT = ( + "android.permission.NFC_TRANSACTION_EVENT" + ) + PACKAGE_USAGE_STATS = ( + "android.permission.PACKAGE_USAGE_STATS" + ) + PERSISTENT_ACTIVITY = ( + "android.permission.PERSISTENT_ACTIVITY" + ) + PROCESS_OUTGOING_CALLS = ( + "android.permission.PROCESS_OUTGOING_CALLS" + ) + READ_CALENDAR = ( + "android.permission.READ_CALENDAR" + ) + READ_CALL_LOG = ( + "android.permission.READ_CALL_LOG" + ) + READ_CONTACTS = ( + "android.permission.READ_CONTACTS" + ) + READ_EXTERNAL_STORAGE = ( + "android.permission.READ_EXTERNAL_STORAGE" + ) + READ_FRAME_BUFFER = ( + "android.permission.READ_FRAME_BUFFER" + ) + READ_INPUT_STATE = ( + "android.permission.READ_INPUT_STATE" + ) + READ_LOGS = ( + "android.permission.READ_LOGS" + ) + READ_PHONE_NUMBERS = ( + "android.permission.READ_PHONE_NUMBERS" + ) + READ_PHONE_STATE = ( + "android.permission.READ_PHONE_STATE" + ) + READ_SMS = ( + "android.permission.READ_SMS" + ) + READ_SYNC_SETTINGS = ( + "android.permission.READ_SYNC_SETTINGS" + ) + READ_SYNC_STATS = ( + "android.permission.READ_SYNC_STATS" + ) + READ_VOICEMAIL = ( + "com.android.voicemail.permission.READ_VOICEMAIL" + ) + REBOOT = ( + "android.permission.REBOOT" + ) + RECEIVE_BOOT_COMPLETED = ( + "android.permission.RECEIVE_BOOT_COMPLETED" + ) + RECEIVE_MMS = ( + "android.permission.RECEIVE_MMS" + ) + RECEIVE_SMS = ( + "android.permission.RECEIVE_SMS" + ) + RECEIVE_WAP_PUSH = ( + "android.permission.RECEIVE_WAP_PUSH" + ) + RECORD_AUDIO = ( + "android.permission.RECORD_AUDIO" + ) + REORDER_TASKS = ( + "android.permission.REORDER_TASKS" + ) + REQUEST_COMPANION_RUN_IN_BACKGROUND = ( + "android.permission.REQUEST_COMPANION_RUN_IN_BACKGROUND" + ) + REQUEST_COMPANION_USE_DATA_IN_BACKGROUND = ( + "android.permission.REQUEST_COMPANION_USE_DATA_IN_BACKGROUND" + ) + REQUEST_DELETE_PACKAGES = ( + "android.permission.REQUEST_DELETE_PACKAGES" + ) + REQUEST_IGNORE_BATTERY_OPTIMIZATIONS = ( + "android.permission.REQUEST_IGNORE_BATTERY_OPTIMIZATIONS" + ) + REQUEST_INSTALL_PACKAGES = ( + "android.permission.REQUEST_INSTALL_PACKAGES" + ) + RESTART_PACKAGES = ( + "android.permission.RESTART_PACKAGES" + ) + SEND_RESPOND_VIA_MESSAGE = ( + "android.permission.SEND_RESPOND_VIA_MESSAGE" + ) + SEND_SMS = ( + "android.permission.SEND_SMS" + ) + SET_ALARM = ( + "com.android.alarm.permission.SET_ALARM" + ) + SET_ALWAYS_FINISH = ( + "android.permission.SET_ALWAYS_FINISH" + ) + SET_ANIMATION_SCALE = ( + "android.permission.SET_ANIMATION_SCALE" + ) + SET_DEBUG_APP = ( + "android.permission.SET_DEBUG_APP" + ) + SET_PREFERRED_APPLICATIONS = ( + "android.permission.SET_PREFERRED_APPLICATIONS" + ) + SET_PROCESS_LIMIT = ( + "android.permission.SET_PROCESS_LIMIT" + ) + SET_TIME = ( + "android.permission.SET_TIME" + ) + SET_TIME_ZONE = ( + "android.permission.SET_TIME_ZONE" + ) + SET_WALLPAPER = ( + "android.permission.SET_WALLPAPER" + ) + SET_WALLPAPER_HINTS = ( + "android.permission.SET_WALLPAPER_HINTS" + ) + SIGNAL_PERSISTENT_PROCESSES = ( + "android.permission.SIGNAL_PERSISTENT_PROCESSES" + ) + STATUS_BAR = ( + "android.permission.STATUS_BAR" + ) + SYSTEM_ALERT_WINDOW = ( + "android.permission.SYSTEM_ALERT_WINDOW" + ) + TRANSMIT_IR = ( + "android.permission.TRANSMIT_IR" + ) + UNINSTALL_SHORTCUT = ( + "com.android.launcher.permission.UNINSTALL_SHORTCUT" + ) + UPDATE_DEVICE_STATS = ( + "android.permission.UPDATE_DEVICE_STATS" + ) + USE_BIOMETRIC = ( + "android.permission.USE_BIOMETRIC" + ) + USE_FINGERPRINT = ( + "android.permission.USE_FINGERPRINT" + ) + USE_SIP = ( + "android.permission.USE_SIP" + ) + VIBRATE = ( + "android.permission.VIBRATE" + ) + WAKE_LOCK = ( + "android.permission.WAKE_LOCK" + ) + WRITE_APN_SETTINGS = ( + "android.permission.WRITE_APN_SETTINGS" + ) + WRITE_CALENDAR = ( + "android.permission.WRITE_CALENDAR" + ) + WRITE_CALL_LOG = ( + "android.permission.WRITE_CALL_LOG" + ) + WRITE_CONTACTS = ( + "android.permission.WRITE_CONTACTS" + ) + WRITE_EXTERNAL_STORAGE = ( + "android.permission.WRITE_EXTERNAL_STORAGE" + ) + WRITE_GSERVICES = ( + "android.permission.WRITE_GSERVICES" + ) + WRITE_SECURE_SETTINGS = ( + "android.permission.WRITE_SECURE_SETTINGS" + ) + WRITE_SETTINGS = ( + "android.permission.WRITE_SETTINGS" + ) + WRITE_SYNC_SETTINGS = ( + "android.permission.WRITE_SYNC_SETTINGS" + ) + WRITE_VOICEMAIL = ( + "com.android.voicemail.permission.WRITE_VOICEMAIL" + ) + + +def request_permissions(permissions): + python_activity = autoclass('org.kivy.android.PythonActivity') + python_activity.requestPermissions(permissions) + + +def request_permission(permission): + request_permissions([permission]) + + +def check_permission(permission): + python_activity = autoclass('org.kivy.android.PythonActivity') + result = bool(python_activity.checkCurrentPermission( + permission + "" + )) + return result diff --git a/p4a/pythonforandroidold/recipes/android/src/android/runnable.py b/p4a/pythonforandroidold/recipes/android/src/android/runnable.py new file mode 100644 index 0000000..8d2d116 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/android/src/android/runnable.py @@ -0,0 +1,49 @@ +''' +Runnable +======== + +''' + +from jnius import PythonJavaClass, java_method, autoclass +from android.config import JAVA_NAMESPACE + +# reference to the activity +_PythonActivity = autoclass(JAVA_NAMESPACE + '.PythonActivity') + + +class Runnable(PythonJavaClass): + '''Wrapper around Java Runnable class. This class can be used to schedule a + call of a Python function into the PythonActivity thread. + ''' + + __javainterfaces__ = ['java/lang/Runnable'] + __runnables__ = [] + + def __init__(self, func): + super(Runnable, self).__init__() + self.func = func + + def __call__(self, *args, **kwargs): + self.args = args + self.kwargs = kwargs + Runnable.__runnables__.append(self) + _PythonActivity.mActivity.runOnUiThread(self) + + @java_method('()V') + def run(self): + try: + self.func(*self.args, **self.kwargs) + except: # noqa E722 + import traceback + traceback.print_exc() + + Runnable.__runnables__.remove(self) + + +def run_on_ui_thread(f): + '''Decorator to create automatically a :class:`Runnable` object with the + function. The function will be delayed and call into the Activity thread. + ''' + def f2(*args, **kwargs): + Runnable(f)(*args, **kwargs) + return f2 diff --git a/p4a/pythonforandroidold/recipes/android/src/setup.py b/p4a/pythonforandroidold/recipes/android/src/setup.py new file mode 100755 index 0000000..2e95a86 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/android/src/setup.py @@ -0,0 +1,34 @@ +from distutils.core import setup, Extension +import os + +library_dirs = ['libs/' + os.environ['ARCH']] +lib_dict = { + 'pygame': ['sdl'], + 'sdl2': ['SDL2', 'SDL2_image', 'SDL2_mixer', 'SDL2_ttf'] +} +sdl_libs = lib_dict.get(os.environ['BOOTSTRAP'], []) + +renpy_sound = Extension('android._android_sound', + ['android/_android_sound.c', 'android/_android_sound_jni.c', ], + libraries=sdl_libs + ['log'], + library_dirs=library_dirs) + +modules = [Extension('android._android', + ['android/_android.c', 'android/_android_jni.c'], + libraries=sdl_libs + ['log'], + library_dirs=library_dirs), + Extension('android._android_billing', + ['android/_android_billing.c', 'android/_android_billing_jni.c'], + libraries=['log'], + library_dirs=library_dirs)] + +if int(os.environ['IS_PYGAME']): + modules.append(renpy_sound) + + +setup(name='android', + version='1.0', + packages=['android'], + package_dir={'android': 'android'}, + ext_modules=modules + ) diff --git a/p4a/pythonforandroidold/recipes/apsw/__init__.py b/p4a/pythonforandroidold/recipes/apsw/__init__.py new file mode 100644 index 0000000..6098e4b --- /dev/null +++ b/p4a/pythonforandroidold/recipes/apsw/__init__.py @@ -0,0 +1,34 @@ +from pythonforandroid.recipe import PythonRecipe +from pythonforandroid.toolchain import current_directory, shprint +import sh + + +class ApswRecipe(PythonRecipe): + version = '3.15.0-r1' + url = 'https://github.com/rogerbinns/apsw/archive/{version}.tar.gz' + depends = ['sqlite3', ('python2', 'python3'), 'setuptools'] + call_hostpython_via_targetpython = False + site_packages_name = 'apsw' + + def build_arch(self, arch): + env = self.get_recipe_env(arch) + with current_directory(self.get_build_dir(arch.arch)): + # Build python bindings + hostpython = sh.Command(self.hostpython_location) + shprint(hostpython, + 'setup.py', + 'build_ext', + '--enable=fts4', _env=env) + # Install python bindings + super(ApswRecipe, self).build_arch(arch) + + def get_recipe_env(self, arch): + env = super(ApswRecipe, self).get_recipe_env(arch) + sqlite_recipe = self.get_recipe('sqlite3', self.ctx) + env['CFLAGS'] += ' -I' + sqlite_recipe.get_build_dir(arch.arch) + env['LDFLAGS'] += ' -L' + sqlite_recipe.get_lib_dir(arch) + env['LIBS'] = env.get('LIBS', '') + ' -lsqlite3' + return env + + +recipe = ApswRecipe() diff --git a/p4a/pythonforandroidold/recipes/atom/__init__.py b/p4a/pythonforandroidold/recipes/atom/__init__.py new file mode 100644 index 0000000..51923d5 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/atom/__init__.py @@ -0,0 +1,11 @@ +from pythonforandroid.recipe import CppCompiledComponentsPythonRecipe + + +class AtomRecipe(CppCompiledComponentsPythonRecipe): + site_packages_name = 'atom' + version = '0.3.10' + url = 'https://github.com/nucleic/atom/archive/master.zip' + depends = ['setuptools'] + + +recipe = AtomRecipe() diff --git a/p4a/pythonforandroidold/recipes/audiostream/__init__.py b/p4a/pythonforandroidold/recipes/audiostream/__init__.py new file mode 100644 index 0000000..4197abd --- /dev/null +++ b/p4a/pythonforandroidold/recipes/audiostream/__init__.py @@ -0,0 +1,32 @@ + +from pythonforandroid.recipe import CythonRecipe +from os.path import join + + +class AudiostreamRecipe(CythonRecipe): + version = 'master' + url = 'https://github.com/kivy/audiostream/archive/{version}.zip' + name = 'audiostream' + depends = [('python2', 'python3'), ('sdl', 'sdl2'), 'pyjnius'] + + def get_recipe_env(self, arch): + env = super(AudiostreamRecipe, self).get_recipe_env(arch) + if 'sdl' in self.ctx.recipe_build_order: + sdl_include = 'sdl' + sdl_mixer_include = 'sdl_mixer' + elif 'sdl2' in self.ctx.recipe_build_order: + sdl_include = 'SDL2' + sdl_mixer_include = 'SDL2_mixer' + env['USE_SDL2'] = 'True' + env['SDL2_INCLUDE_DIR'] = join(self.ctx.bootstrap.build_dir, 'jni', 'SDL', 'include') + + env['CFLAGS'] += ' -I{jni_path}/{sdl_include}/include -I{jni_path}/{sdl_mixer_include}'.format( + jni_path=join(self.ctx.bootstrap.build_dir, 'jni'), + sdl_include=sdl_include, + sdl_mixer_include=sdl_mixer_include) + env['NDKPLATFORM'] = self.ctx.ndk_platform + env['LIBLINK'] = 'NOTNONE' # Hacky fix. Needed by audiostream setup.py + return env + + +recipe = AudiostreamRecipe() diff --git a/p4a/pythonforandroidold/recipes/babel/__init__.py b/p4a/pythonforandroidold/recipes/babel/__init__.py new file mode 100644 index 0000000..fc17f8e --- /dev/null +++ b/p4a/pythonforandroidold/recipes/babel/__init__.py @@ -0,0 +1,15 @@ +from pythonforandroid.recipe import PythonRecipe + + +class BabelRecipe(PythonRecipe): + name = 'babel' + version = '2.2.0' + url = 'https://pypi.python.org/packages/source/B/Babel/Babel-{version}.tar.gz' + + depends = ['setuptools', 'pytz'] + + call_hostpython_via_targetpython = False + install_in_hostpython = True + + +recipe = BabelRecipe() diff --git a/p4a/pythonforandroidold/recipes/boost/__init__.py b/p4a/pythonforandroidold/recipes/boost/__init__.py new file mode 100644 index 0000000..53d9388 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/boost/__init__.py @@ -0,0 +1,104 @@ +from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory +from os.path import join, exists +from os import environ +import sh + +""" +This recipe creates a custom toolchain and bootstraps Boost from source to build Boost.Build +including python bindings +""" + + +class BoostRecipe(Recipe): + # Todo: make recipe compatible with all p4a architectures + ''' + .. note:: This recipe can be built only against API 21+ and arch armeabi-v7a + + .. versionchanged:: 0.6.0 + Rewrote recipe to support clang's build. The following changes has + been made: + + - Bumped version number to 1.68.0 + - Better version handling for url + - Added python 3 compatibility + - Default compiler for ndk's toolchain set to clang + - Python version will be detected via user-config.jam + - Changed stl's lib from ``gnustl_shared`` to ``c++_shared`` + ''' + version = '1.68.0' + url = 'http://downloads.sourceforge.net/project/boost/' \ + 'boost/{version}/boost_{version_underscore}.tar.bz2' + depends = [('python2', 'python3')] + patches = ['disable-so-version.patch', + 'use-android-libs.patch', + 'fix-android-issues.patch'] + + @property + def versioned_url(self): + if self.url is None: + return None + return self.url.format( + version=self.version, + version_underscore=self.version.replace('.', '_')) + + def should_build(self, arch): + return not exists(join(self.get_build_dir(arch.arch), 'b2')) + + def prebuild_arch(self, arch): + super(BoostRecipe, self).prebuild_arch(arch) + env = self.get_recipe_env(arch) + with current_directory(self.get_build_dir(arch.arch)): + if not exists(env['CROSSHOME']): + # Make custom toolchain + bash = sh.Command('bash') + shprint(bash, join(self.ctx.ndk_dir, 'build/tools/make-standalone-toolchain.sh'), + '--arch=' + env['ARCH'], + '--platform=android-' + str(self.ctx.android_api), + '--toolchain=' + env['CROSSHOST'] + '-' + self.ctx.toolchain_version + ':-llvm', + '--use-llvm', + '--stl=libc++', + '--install-dir=' + env['CROSSHOME'] + ) + # Set custom configuration + shutil.copyfile(join(self.get_recipe_dir(), 'user-config.jam'), + join(env['BOOST_BUILD_PATH'], 'user-config.jam')) + + def build_arch(self, arch): + super(BoostRecipe, self).build_arch(arch) + env = self.get_recipe_env(arch) + env['PYTHON_HOST'] = self.ctx.hostpython + with current_directory(self.get_build_dir(arch.arch)): + # Compile Boost.Build engine with this custom toolchain + bash = sh.Command('bash') + shprint(bash, 'bootstrap.sh') # Do not pass env + # Install app stl + shutil.copyfile( + join(self.ctx.ndk_dir, 'sources/cxx-stl/llvm-libc++/libs/' + 'armeabi-v7a/libc++_shared.so'), + join(self.ctx.get_libs_dir(arch.arch), 'libc++_shared.so')) + + def select_build_arch(self, arch): + return arch.arch.replace('eabi-v7a', '').replace('eabi', '') + + def get_recipe_env(self, arch): + # We don't use the normal env because we + # are building with a standalone toolchain + env = environ.copy() + + env['BOOST_BUILD_PATH'] = self.get_build_dir(arch.arch) # find user-config.jam + env['BOOST_ROOT'] = env['BOOST_BUILD_PATH'] # find boost source + + env['PYTHON_ROOT'] = self.ctx.python_recipe.link_root(arch.arch) + env['PYTHON_INCLUDE'] = self.ctx.python_recipe.include_root(arch.arch) + env['PYTHON_MAJOR_MINOR'] = self.ctx.python_recipe.version[:3] + env['PYTHON_LINK_VERSION'] = self.ctx.python_recipe.major_minor_version_string + if 'python3' in self.ctx.python_recipe.name: + env['PYTHON_LINK_VERSION'] += 'm' + + env['ARCH'] = self.select_build_arch(arch) + env['CROSSHOST'] = env['ARCH'] + '-linux-androideabi' + env['CROSSHOME'] = join(env['BOOST_ROOT'], 'standalone-' + env['ARCH'] + '-toolchain') + return env + + +recipe = BoostRecipe() diff --git a/p4a/pythonforandroidold/recipes/boost/disable-so-version.patch b/p4a/pythonforandroidold/recipes/boost/disable-so-version.patch new file mode 100644 index 0000000..6911f89 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/boost/disable-so-version.patch @@ -0,0 +1,12 @@ +--- boost/boostcpp.jam 2015-12-14 03:30:09.000000000 +0100 ++++ boost-patch/boostcpp.jam 2016-02-08 16:38:40.510859612 +0100 +@@ -155,8 +155,9 @@ + if $(type) = SHARED_LIB && + ! [ $(property-set).get ] in windows cygwin darwin aix && + ! [ $(property-set).get ] in pgi + { ++ return $(result) ; # disable version suffix for android + result = $(result).$(BOOST_VERSION) ; + } + + return $(result) ; diff --git a/p4a/pythonforandroidold/recipes/boost/fix-android-issues.patch b/p4a/pythonforandroidold/recipes/boost/fix-android-issues.patch new file mode 100644 index 0000000..5413480 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/boost/fix-android-issues.patch @@ -0,0 +1,68 @@ +diff -u -r boost_1_68_0.orig/boost/config/user.hpp boost_1_68_0/boost/config/user.hpp +--- boost_1_68_0.orig/boost/config/user.hpp 2018-08-01 22:50:46.000000000 +0200 ++++ boost_1_68_0/boost/config/user.hpp 2018-08-27 15:43:38.000000000 +0200 +@@ -13,6 +13,12 @@ + // configuration policy: + // + ++// Android defines ++// There is problem with std::atomic on android (and some other platforms). ++// See this link for more info: ++// https://code.google.com/p/android/issues/detail?id=42735#makechanges ++#define BOOST_ASIO_DISABLE_STD_ATOMIC 1 ++ + // define this to locate a compiler config file: + // #define BOOST_COMPILER_CONFIG + +diff -u -r boost_1_68_0.orig/boost/asio/detail/config.hpp boost_1_68_0/boost/asio/detail/config.hpp +--- boost_1_68_0.orig/boost/asio/detail/config.hpp 2018-08-01 22:50:46.000000000 +0200 ++++ boost_1_68_0/boost/asio/detail/config.hpp 2018-09-19 12:39:56.000000000 +0200 +@@ -804,7 +804,11 @@ + # if defined(__clang__) + # if (__cplusplus >= 201402) + # if __has_include() +-# define BOOST_ASIO_HAS_STD_EXPERIMENTAL_STRING_VIEW 1 ++# if __clang_major__ >= 7 ++# undef BOOST_ASIO_HAS_STD_EXPERIMENTAL_STRING_VIEW ++# else ++# define BOOST_ASIO_HAS_STD_EXPERIMENTAL_STRING_VIEW 1 ++# endif // __clang_major__ >= 7 + # endif // __has_include() + # endif // (__cplusplus >= 201402) + # endif // defined(__clang__) +diff -u -r boost_1_68_0.orig/boost/system/error_code.hpp boost_1_68_0/boost/system/error_code.hpp +--- boost_1_68_0.orig/boost/system/error_code.hpp 2018-08-01 22:50:53.000000000 +0200 ++++ boost_1_68_0/boost/system/error_code.hpp 2018-08-27 15:44:29.000000000 +0200 +@@ -17,6 +17,7 @@ + #include + #include + #include ++#include + #include + #include + #include +diff -u -r boost_1_68_0.orig/libs/filesystem/src/operations.cpp boost_1_68_0/libs/filesystem/src/operations.cpp +--- boost_1_68_0.orig/libs/filesystem/src/operations.cpp 2018-08-01 22:50:47.000000000 +0200 ++++ boost_1_68_0/libs/filesystem/src/operations.cpp 2018-08-27 15:47:15.000000000 +0200 +@@ -232,6 +232,21 @@ + + # if defined(BOOST_POSIX_API) + ++# if defined(__ANDROID__) ++# define truncate libboost_truncate_wrapper ++// truncate() is present in Android libc only starting from ABI 21, so here's a simple wrapper ++static int libboost_truncate_wrapper(const char *path, off_t length) ++{ ++ int fd = open(path, O_WRONLY); ++ if (fd == -1) { ++ return -1; ++ } ++ int status = ftruncate(fd, length); ++ close(fd); ++ return status; ++} ++# endif ++ + typedef int err_t; + + // POSIX uses a 0 return to indicate success diff --git a/p4a/pythonforandroidold/recipes/boost/use-android-libs.patch b/p4a/pythonforandroidold/recipes/boost/use-android-libs.patch new file mode 100644 index 0000000..650722d --- /dev/null +++ b/p4a/pythonforandroidold/recipes/boost/use-android-libs.patch @@ -0,0 +1,10 @@ +--- boost/tools/build/src/tools/python.jam 2015-10-16 20:55:36.000000000 +0200 ++++ boost-patch/tools/build/src/tools/python.jam 2016-02-09 13:16:09.519261546 +0100 +@@ -646,6 +646,7 @@ + + case aix : return pthread dl ; + ++ case * : return ; # use Android builtin libs + case * : return pthread dl + gcc:util linux:util ; + } diff --git a/p4a/pythonforandroidold/recipes/boost/user-config.jam b/p4a/pythonforandroidold/recipes/boost/user-config.jam new file mode 100644 index 0000000..e50b50a --- /dev/null +++ b/p4a/pythonforandroidold/recipes/boost/user-config.jam @@ -0,0 +1,61 @@ +import os ; + +local ARCH = [ os.environ ARCH ] ; +local CROSSHOME = [ os.environ CROSSHOME ] ; +local PYTHON_HOST = [ os.environ PYTHON_HOST ] ; +local PYTHON_ROOT = [ os.environ PYTHON_ROOT ] ; +local PYTHON_INCLUDE = [ os.environ PYTHON_INCLUDE ] ; +local PYTHON_LINK_VERSION = [ os.environ PYTHON_LINK_VERSION ] ; +local PYTHON_MAJOR_MINOR = [ os.environ PYTHON_MAJOR_MINOR ] ; + +using clang : $(ARCH) : $(CROSSHOME)/bin/arm-linux-androideabi-clang++ : +$(CROSSHOME)/bin/arm-linux-androideabi-ar +$(CROSSHOME)/sysroot +$(ARCH) +-fexceptions +-frtti +-fpic +-ffunction-sections +-funwind-tables +-march=armv7-a +-msoft-float +-mfpu=neon +-mthumb +-march=armv7-a +-Wl,--fix-cortex-a8 +-Os +-fomit-frame-pointer +-fno-strict-aliasing +-DANDROID +-D__ANDROID__ +-DANDROID_TOOLCHAIN=clang +-DANDROID_ABI=armv7-a +-DANDROID_STL=c++_shared +-DBOOST_ALL_NO_LIB +#-DNDEBUG +-O2 +-g +-fvisibility=hidden +-fvisibility-inlines-hidden +-fdata-sections +-D__arm__ +-D_REENTRANT +-D_GLIBCXX__PTHREADS +-Wno-long-long +-Wno-missing-field-initializers +-Wno-unused-variable +-Wl,-z,relro +-Wl,-z,now +-lc++_shared +-L$(PYTHON_ROOT) +-lpython$(PYTHON_LINK_VERSION) +-Wl,-O1 +-Wl,-Bsymbolic-functions +; + +using python : $(PYTHON_MAJOR_MINOR) + : $(PYTHON_host) + : $(PYTHON_ROOT) $(PYTHON_INCLUDE) + : $(PYTHON_ROOT)/libpython$(PYTHON_LINK_VERSION).so + : #BOOST_ALL_DYN_LINK +; \ No newline at end of file diff --git a/p4a/pythonforandroidold/recipes/brokenrecipe/__init__.py b/p4a/pythonforandroidold/recipes/brokenrecipe/__init__.py new file mode 100644 index 0000000..48e266b --- /dev/null +++ b/p4a/pythonforandroidold/recipes/brokenrecipe/__init__.py @@ -0,0 +1,9 @@ +from pythonforandroid.toolchain import Recipe + + +class BrokenRecipe(Recipe): + def __init__(self): + print('This is a broken recipe, not a real one!') + + +recipe = BrokenRecipe() diff --git a/p4a/pythonforandroidold/recipes/cdecimal/__init__.py b/p4a/pythonforandroidold/recipes/cdecimal/__init__.py new file mode 100644 index 0000000..94929c7 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/cdecimal/__init__.py @@ -0,0 +1,25 @@ +from pythonforandroid.recipe import CompiledComponentsPythonRecipe +from pythonforandroid.patching import is_darwin + + +class CdecimalRecipe(CompiledComponentsPythonRecipe): + name = 'cdecimal' + version = '2.3' + url = 'http://www.bytereef.org/software/mpdecimal/releases/cdecimal-{version}.tar.gz' + + depends = [] + + patches = ['locale.patch', + 'cross-compile.patch'] + + def prebuild_arch(self, arch): + super(CdecimalRecipe, self).prebuild_arch(arch) + if not is_darwin(): + if '64' in arch.arch: + machine = 'ansi64' + else: + machine = 'ansi32' + self.setup_extra_args = ['--with-machine=' + machine] + + +recipe = CdecimalRecipe() diff --git a/p4a/pythonforandroidold/recipes/cdecimal/cross-compile.patch b/p4a/pythonforandroidold/recipes/cdecimal/cross-compile.patch new file mode 100644 index 0000000..cc15f33 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/cdecimal/cross-compile.patch @@ -0,0 +1,12 @@ +diff -Naur cdecimal/setup.py b/setup.py +--- cdecimal/setup.py 2015-12-14 13:48:23.085997956 -0600 ++++ b/setup.py 2015-12-14 13:48:11.413805121 -0600 +@@ -229,7 +229,7 @@ + def configure(machine, cc, py_size_t): + os.chmod("./configure", 0x1ed) # pip removes execute permissions. + if machine: # string has been validated. +- os.system("./configure MACHINE=%s" % machine) ++ os.system("./configure --host=%s MACHINE=%s" % (os.environ['TOOLCHAIN_PREFIX'], machine)) + elif 'sunos' in SYSTEM and py_size_t == 8: + # cc is from sysconfig. + os.system("./configure CC='%s -m64'" % cc) diff --git a/p4a/pythonforandroidold/recipes/cdecimal/locale.patch b/p4a/pythonforandroidold/recipes/cdecimal/locale.patch new file mode 100644 index 0000000..4b8df6b --- /dev/null +++ b/p4a/pythonforandroidold/recipes/cdecimal/locale.patch @@ -0,0 +1,172 @@ +diff -Naur a/io.c b/io.c +--- a/io.c 2012-02-01 14:29:49.000000000 -0600 ++++ b/io.c 2015-12-09 17:04:00.060579230 -0600 +@@ -34,7 +34,7 @@ + #include + #include + #include +-#include ++#include "locale.h" + #include "bits.h" + #include "constants.h" + #include "memory.h" +@@ -792,15 +792,14 @@ + } + else if (*cp == 'N' || *cp == 'n') { + /* locale specific conversion */ +- struct lconv *lc; + spec->type = *cp++; + /* separator has already been specified */ + if (*spec->sep) return 0; + spec->type = (spec->type == 'N') ? 'G' : 'g'; +- lc = localeconv(); +- spec->dot = lc->decimal_point; +- spec->sep = lc->thousands_sep; +- spec->grouping = lc->grouping; ++ /* TODO: Android does not have localeconv(); we'll just use C locale values for now */ ++ spec->dot = "."; ++ spec->sep = ""; ++ spec->grouping = ""; + } + + /* check correctness */ +diff -Naur a/locale.h b/locale.h +--- a/locale.h 1969-12-31 18:00:00.000000000 -0600 ++++ b/locale.h 2015-12-09 17:04:11.128762784 -0600 +@@ -0,0 +1,136 @@ ++/* ++ * Copyright (C) 2008 The Android Open Source Project ++ * All rights reserved. ++ * ++ * Redistribution and use in source and binary forms, with or without ++ * modification, are permitted provided that the following conditions ++ * are met: ++ * * Redistributions of source code must retain the above copyright ++ * notice, this list of conditions and the following disclaimer. ++ * * Redistributions in binary form must reproduce the above copyright ++ * notice, this list of conditions and the following disclaimer in ++ * the documentation and/or other materials provided with the ++ * distribution. ++ * ++ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ++ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT ++ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS ++ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE ++ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, ++ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, ++ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS ++ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED ++ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, ++ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT ++ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF ++ * SUCH DAMAGE. ++ */ ++#ifndef _LOCALE_H_ ++#define _LOCALE_H_ ++ ++#include ++ ++__BEGIN_DECLS ++ ++enum { ++ LC_CTYPE = 0, ++ LC_NUMERIC = 1, ++ LC_TIME = 2, ++ LC_COLLATE = 3, ++ LC_MONETARY = 4, ++ LC_MESSAGES = 5, ++ LC_ALL = 6, ++ LC_PAPER = 7, ++ LC_NAME = 8, ++ LC_ADDRESS = 9, ++ ++ LC_TELEPHONE = 10, ++ LC_MEASUREMENT = 11, ++ LC_IDENTIFICATION = 12 ++}; ++ ++extern char *setlocale(int category, const char *locale); ++ ++#if 1 /* MISSING FROM BIONIC - DEFINED TO MAKE libstdc++-v3 happy */ ++/*struct lconv { };*/ ++ ++__BEGIN_NAMESPACE_STD; ++ ++/* Structure giving information about numeric and monetary notation. */ ++struct lconv ++{ ++ /* Numeric (non-monetary) information. */ ++ ++ char *decimal_point; /* Decimal point character. */ ++ char *thousands_sep; /* Thousands separator. */ ++ /* Each element is the number of digits in each group; ++ elements with higher indices are farther left. ++ An element with value CHAR_MAX means that no further grouping is done. ++ An element with value 0 means that the previous element is used ++ for all groups farther left. */ ++ char *grouping; ++ ++ /* Monetary information. */ ++ ++ /* First three chars are a currency symbol from ISO 4217. ++ Fourth char is the separator. Fifth char is '\0'. */ ++ char *int_curr_symbol; ++ char *currency_symbol; /* Local currency symbol. */ ++ char *mon_decimal_point; /* Decimal point character. */ ++ char *mon_thousands_sep; /* Thousands separator. */ ++ char *mon_grouping; /* Like `grouping' element (above). */ ++ char *positive_sign; /* Sign for positive values. */ ++ char *negative_sign; /* Sign for negative values. */ ++ char int_frac_digits; /* Int'l fractional digits. */ ++ char frac_digits; /* Local fractional digits. */ ++ /* 1 if currency_symbol precedes a positive value, 0 if succeeds. */ ++ char p_cs_precedes; ++ /* 1 iff a space separates currency_symbol from a positive value. */ ++ char p_sep_by_space; ++ /* 1 if currency_symbol precedes a negative value, 0 if succeeds. */ ++ char n_cs_precedes; ++ /* 1 iff a space separates currency_symbol from a negative value. */ ++ char n_sep_by_space; ++ /* Positive and negative sign positions: ++ 0 Parentheses surround the quantity and currency_symbol. ++ 1 The sign string precedes the quantity and currency_symbol. ++ 2 The sign string follows the quantity and currency_symbol. ++ 3 The sign string immediately precedes the currency_symbol. ++ 4 The sign string immediately follows the currency_symbol. */ ++ char p_sign_posn; ++ char n_sign_posn; ++#ifdef __USE_ISOC99 ++ /* 1 if int_curr_symbol precedes a positive value, 0 if succeeds. */ ++ char int_p_cs_precedes; ++ /* 1 iff a space separates int_curr_symbol from a positive value. */ ++ char int_p_sep_by_space; ++ /* 1 if int_curr_symbol precedes a negative value, 0 if succeeds. */ ++ char int_n_cs_precedes; ++ /* 1 iff a space separates int_curr_symbol from a negative value. */ ++ char int_n_sep_by_space; ++ /* Positive and negative sign positions: ++ 0 Parentheses surround the quantity and int_curr_symbol. ++ 1 The sign string precedes the quantity and int_curr_symbol. ++ 2 The sign string follows the quantity and int_curr_symbol. ++ 3 The sign string immediately precedes the int_curr_symbol. ++ 4 The sign string immediately follows the int_curr_symbol. */ ++ char int_p_sign_posn; ++ char int_n_sign_posn; ++#else ++ char __int_p_cs_precedes; ++ char __int_p_sep_by_space; ++ char __int_n_cs_precedes; ++ char __int_n_sep_by_space; ++ char __int_p_sign_posn; ++ char __int_n_sign_posn; ++#endif ++}; ++ ++__END_NAMESPACE_STD; ++ ++struct lconv *localeconv(void); ++#endif /* MISSING */ ++ ++__END_DECLS ++ ++#endif /* _LOCALE_H_ */ diff --git a/p4a/pythonforandroidold/recipes/cffi/__init__.py b/p4a/pythonforandroidold/recipes/cffi/__init__.py new file mode 100644 index 0000000..50458e5 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/cffi/__init__.py @@ -0,0 +1,53 @@ +import os +from pythonforandroid.recipe import CompiledComponentsPythonRecipe + + +class CffiRecipe(CompiledComponentsPythonRecipe): + """ + Extra system dependencies: autoconf, automake and libtool. + """ + name = 'cffi' + version = '1.11.5' + url = 'https://pypi.python.org/packages/source/c/cffi/cffi-{version}.tar.gz' + + depends = ['setuptools', 'pycparser', 'libffi'] + + patches = ['disable-pkg-config.patch'] + + # call_hostpython_via_targetpython = False + install_in_hostpython = True + + def get_hostrecipe_env(self, arch=None): + # fixes missing ffi.h on some host systems (e.g. gentoo) + env = super(CffiRecipe, self).get_hostrecipe_env(arch) + libffi = self.get_recipe('libffi', self.ctx) + includes = libffi.get_include_dirs(arch) + env['FFI_INC'] = ",".join(includes) + return env + + def get_recipe_env(self, arch=None): + env = super(CffiRecipe, self).get_recipe_env(arch) + libffi = self.get_recipe('libffi', self.ctx) + includes = libffi.get_include_dirs(arch) + env['CFLAGS'] = ' -I'.join([env.get('CFLAGS', '')] + includes) + env['CFLAGS'] += ' -I{}'.format(self.ctx.python_recipe.include_root(arch.arch)) + env['LDFLAGS'] = (env.get('CFLAGS', '') + ' -L' + + self.ctx.get_libs_dir(arch.arch)) + env['LDFLAGS'] += ' -L{}'.format(os.path.join(self.ctx.bootstrap.build_dir, 'libs', arch.arch)) + # required for libc and libdl + ndk_dir = self.ctx.ndk_platform + ndk_lib_dir = os.path.join(ndk_dir, 'usr', 'lib') + env['LDFLAGS'] += ' -L{}'.format(ndk_lib_dir) + env['LDFLAGS'] += " --sysroot={}".format(self.ctx.ndk_platform) + env['PYTHONPATH'] = ':'.join([ + self.ctx.get_site_packages_dir(), + env['BUILDLIB_PATH'], + ]) + env['LDFLAGS'] += ' -L{}'.format(self.ctx.python_recipe.link_root(arch.arch)) + env['LDFLAGS'] += ' -lpython{}'.format(self.ctx.python_recipe.major_minor_version_string) + if 'python3' in self.ctx.python_recipe.name: + env['LDFLAGS'] += 'm' + return env + + +recipe = CffiRecipe() diff --git a/p4a/pythonforandroidold/recipes/cffi/disable-pkg-config.patch b/p4a/pythonforandroidold/recipes/cffi/disable-pkg-config.patch new file mode 100644 index 0000000..cf2abd5 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/cffi/disable-pkg-config.patch @@ -0,0 +1,30 @@ +diff --git a/setup.py b/setup.py +index c1db368..57311c3 100644 +--- a/setup.py ++++ b/setup.py +@@ -5,8 +5,7 @@ import errno + + sources = ['c/_cffi_backend.c'] + libraries = ['ffi'] +-include_dirs = ['/usr/include/ffi', +- '/usr/include/libffi'] # may be changed by pkg-config ++include_dirs = os.environ['FFI_INC'].split(",") if 'FFI_INC' in os.environ else [] + define_macros = [] + library_dirs = [] + extra_compile_args = [] +@@ -67,14 +66,7 @@ def ask_supports_thread(): + sys.stderr.write("The above error message can be safely ignored\n") + + def use_pkg_config(): +- if sys.platform == 'darwin' and os.path.exists('/usr/local/bin/brew'): +- use_homebrew_for_libffi() +- +- _ask_pkg_config(include_dirs, '--cflags-only-I', '-I', sysroot=True) +- _ask_pkg_config(extra_compile_args, '--cflags-only-other') +- _ask_pkg_config(library_dirs, '--libs-only-L', '-L', sysroot=True) +- _ask_pkg_config(extra_link_args, '--libs-only-other') +- _ask_pkg_config(libraries, '--libs-only-l', '-l') ++ pass + + def use_homebrew_for_libffi(): + # We can build by setting: diff --git a/p4a/pythonforandroid/recipes/cherrypy/__init__.py b/p4a/pythonforandroidold/recipes/cherrypy/__init__.py similarity index 100% rename from p4a/pythonforandroid/recipes/cherrypy/__init__.py rename to p4a/pythonforandroidold/recipes/cherrypy/__init__.py diff --git a/p4a/pythonforandroidold/recipes/coverage/__init__.py b/p4a/pythonforandroidold/recipes/coverage/__init__.py new file mode 100644 index 0000000..95f08f1 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/coverage/__init__.py @@ -0,0 +1,19 @@ +from pythonforandroid.recipe import PythonRecipe + + +class CoverageRecipe(PythonRecipe): + + version = '4.1' + + url = 'https://pypi.python.org/packages/2d/10/6136c8e10644c16906edf4d9f7c782c0f2e7ed47ff2f41f067384e432088/coverage-{version}.tar.gz' + + depends = [('hostpython2', 'hostpython3'), 'setuptools'] + + patches = ['fallback-utf8.patch'] + + site_packages_name = 'coverage' + + call_hostpython_via_targetpython = False + + +recipe = CoverageRecipe() diff --git a/p4a/pythonforandroidold/recipes/coverage/fallback-utf8.patch b/p4a/pythonforandroidold/recipes/coverage/fallback-utf8.patch new file mode 100644 index 0000000..6d251c4 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/coverage/fallback-utf8.patch @@ -0,0 +1,12 @@ +--- coverage-4.1/coverage/misc.py 2016-02-13 20:04:35.000000000 +0100 ++++ patch/coverage/misc.py 2016-07-11 17:07:22.656603295 +0200 +@@ -166,7 +166,8 @@ + encoding = ( + getattr(outfile, "encoding", None) or + getattr(sys.__stdout__, "encoding", None) or +- locale.getpreferredencoding() ++ locale.getpreferredencoding() or ++ 'utf-8' + ) + return encoding + diff --git a/p4a/pythonforandroidold/recipes/cryptography/__init__.py b/p4a/pythonforandroidold/recipes/cryptography/__init__.py new file mode 100644 index 0000000..1b7baba --- /dev/null +++ b/p4a/pythonforandroidold/recipes/cryptography/__init__.py @@ -0,0 +1,23 @@ +from pythonforandroid.recipe import CompiledComponentsPythonRecipe, Recipe + + +class CryptographyRecipe(CompiledComponentsPythonRecipe): + name = 'cryptography' + version = '2.6.1' + url = 'https://github.com/pyca/cryptography/archive/{version}.tar.gz' + depends = ['openssl', 'idna', 'asn1crypto', 'six', 'setuptools', + 'enum34', 'ipaddress', 'cffi'] + call_hostpython_via_targetpython = False + + def get_recipe_env(self, arch): + env = super(CryptographyRecipe, self).get_recipe_env(arch) + + openssl_recipe = Recipe.get_recipe('openssl', self.ctx) + env['CFLAGS'] += openssl_recipe.include_flags(arch) + env['LDFLAGS'] += openssl_recipe.link_dirs_flags(arch) + env['LIBS'] = openssl_recipe.link_libs_flags() + + return env + + +recipe = CryptographyRecipe() diff --git a/p4a/pythonforandroidold/recipes/cymunk/__init__.py b/p4a/pythonforandroidold/recipes/cymunk/__init__.py new file mode 100644 index 0000000..96d4169 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/cymunk/__init__.py @@ -0,0 +1,12 @@ +from pythonforandroid.recipe import CythonRecipe + + +class CymunkRecipe(CythonRecipe): + version = 'master' + url = 'https://github.com/tito/cymunk/archive/{version}.zip' + name = 'cymunk' + + depends = [('python2', 'python3crystax', 'python3')] + + +recipe = CymunkRecipe() diff --git a/p4a/pythonforandroidold/recipes/dateutil/__init__.py b/p4a/pythonforandroidold/recipes/dateutil/__init__.py new file mode 100644 index 0000000..3367f8d --- /dev/null +++ b/p4a/pythonforandroidold/recipes/dateutil/__init__.py @@ -0,0 +1,14 @@ +from pythonforandroid.recipe import PythonRecipe + + +class DateutilRecipe(PythonRecipe): + name = 'dateutil' + version = '2.6.0' + url = 'https://pypi.python.org/packages/51/fc/39a3fbde6864942e8bb24c93663734b74e281b984d1b8c4f95d64b0c21f6/python-dateutil-2.6.0.tar.gz' + + depends = ["setuptools"] + call_hostpython_via_targetpython = False + install_in_hostpython = True + + +recipe = DateutilRecipe() diff --git a/p4a/pythonforandroidold/recipes/decorator/__init__.py b/p4a/pythonforandroidold/recipes/decorator/__init__.py new file mode 100644 index 0000000..e1001dd --- /dev/null +++ b/p4a/pythonforandroidold/recipes/decorator/__init__.py @@ -0,0 +1,13 @@ +from pythonforandroid.recipe import PythonRecipe + + +class DecoratorPyRecipe(PythonRecipe): + version = '4.2.1' + url = 'https://pypi.python.org/packages/source/d/decorator/decorator-{version}.tar.gz' + url = 'https://github.com/micheles/decorator/archive/{version}.tar.gz' + depends = ['setuptools'] + site_packages_name = 'decorator' + call_hostpython_via_targetpython = False + + +recipe = DecoratorPyRecipe() diff --git a/p4a/pythonforandroidold/recipes/enaml/0001-Update-setup.py.patch b/p4a/pythonforandroidold/recipes/enaml/0001-Update-setup.py.patch new file mode 100644 index 0000000..c84f892 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/enaml/0001-Update-setup.py.patch @@ -0,0 +1,25 @@ +From 156a0426f7350bf49bdfae1aad555e13c9494b9a Mon Sep 17 00:00:00 2001 +From: frmdstryr +Date: Thu, 23 Jun 2016 22:04:32 -0400 +Subject: [PATCH] Update setup.py + +--- + setup.py | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/setup.py b/setup.py +index 3bfd2a2..99817e5 100644 +--- a/setup.py ++++ b/setup.py +@@ -72,7 +72,7 @@ setup( + url='https://github.com/nucleic/enaml', + description='Declarative DSL for building rich user interfaces in Python', + long_description=open('README.rst').read(), +- requires=['atom', 'PyQt', 'ply', 'kiwisolver'], ++ requires=['atom', 'ply', 'kiwisolver'], + install_requires=['distribute', 'atom >= 0.3.8', 'kiwisolver >= 0.1.2', 'ply >= 3.4'], + packages=find_packages(), + package_data={ +-- +2.7.4 + diff --git a/p4a/pythonforandroidold/recipes/enaml/__init__.py b/p4a/pythonforandroidold/recipes/enaml/__init__.py new file mode 100644 index 0000000..d233520 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/enaml/__init__.py @@ -0,0 +1,12 @@ +from pythonforandroid.recipe import CppCompiledComponentsPythonRecipe + + +class EnamlRecipe(CppCompiledComponentsPythonRecipe): + site_packages_name = 'enaml' + version = '0.9.8' + url = 'https://github.com/nucleic/enaml/archive/{version}.zip' + patches = ['0001-Update-setup.py.patch'] # Remove PyQt dependency + depends = ['setuptools', 'atom', 'kiwisolver'] + + +recipe = EnamlRecipe() diff --git a/p4a/pythonforandroid/recipes/enum34/__init__.py b/p4a/pythonforandroidold/recipes/enum34/__init__.py similarity index 100% rename from p4a/pythonforandroid/recipes/enum34/__init__.py rename to p4a/pythonforandroidold/recipes/enum34/__init__.py diff --git a/p4a/pythonforandroidold/recipes/ethash/__init__.py b/p4a/pythonforandroidold/recipes/ethash/__init__.py new file mode 100644 index 0000000..b65e10a --- /dev/null +++ b/p4a/pythonforandroidold/recipes/ethash/__init__.py @@ -0,0 +1,11 @@ +from pythonforandroid.recipe import PythonRecipe + + +class EthashRecipe(PythonRecipe): + + url = 'https://github.com/ethereum/ethash/archive/master.zip' + + depends = ['setuptools'] + + +recipe = EthashRecipe() diff --git a/p4a/pythonforandroidold/recipes/evdev/__init__.py b/p4a/pythonforandroidold/recipes/evdev/__init__.py new file mode 100644 index 0000000..afd542e --- /dev/null +++ b/p4a/pythonforandroidold/recipes/evdev/__init__.py @@ -0,0 +1,25 @@ +from pythonforandroid.recipe import CompiledComponentsPythonRecipe + + +class EvdevRecipe(CompiledComponentsPythonRecipe): + name = 'evdev' + version = 'v0.4.7' + url = 'https://github.com/gvalkov/python-evdev/archive/{version}.zip' + + depends = [] + + build_cmd = 'build' + + patches = ['evcnt.patch', + 'keycnt.patch', + 'remove-uinput.patch', + 'include-dir.patch', + 'evdev-permissions.patch'] + + def get_recipe_env(self, arch=None): + env = super(EvdevRecipe, self).get_recipe_env(arch) + env['NDKPLATFORM'] = self.ctx.ndk_platform + return env + + +recipe = EvdevRecipe() diff --git a/p4a/pythonforandroidold/recipes/evdev/evcnt.patch b/p4a/pythonforandroidold/recipes/evdev/evcnt.patch new file mode 100644 index 0000000..f140ddd --- /dev/null +++ b/p4a/pythonforandroidold/recipes/evdev/evcnt.patch @@ -0,0 +1,21 @@ +diff -Naur orig/evdev/input.c v0.4.7/evdev/input.c +--- orig/evdev/input.c 2015-06-11 13:56:43.483891914 -0500 ++++ v0.4.7/evdev/input.c 2015-06-11 13:57:29.079529095 -0500 +@@ -24,6 +24,8 @@ + #include + #endif + ++#define EV_CNT (EV_MAX+1) ++ + #define MAX_NAME_SIZE 256 + + extern char* EV_NAME[EV_CNT]; +@@ -190,7 +192,7 @@ + absinfo.maximum, + absinfo.fuzz, + absinfo.flat, +- absinfo.resolution); ++ 0); + + evlong = PyLong_FromLong(ev_code); + absitem = Py_BuildValue("(OO)", evlong, py_absinfo); diff --git a/p4a/pythonforandroidold/recipes/evdev/evdev-permissions.patch b/p4a/pythonforandroidold/recipes/evdev/evdev-permissions.patch new file mode 100644 index 0000000..0faa6e7 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/evdev/evdev-permissions.patch @@ -0,0 +1,57 @@ +diff -Naur orig/evdev/util.py v0.4.7/evdev/util.py +--- orig/evdev/util.py 2015-06-12 16:31:46.532994729 -0500 ++++ v0.4.7/evdev/util.py 2015-06-12 16:32:59.489933840 -0500 +@@ -3,15 +3,53 @@ + import os + import stat + import glob ++import subprocess + + from evdev import ecodes + from evdev.events import event_factory + + ++su = False ++ ++ ++def get_su_binary(): ++ global su ++ if su is not False: ++ return su ++ ++ su_files = ['/sbin/su', '/system/bin/su', '/system/xbin/su', '/data/local/xbin/su', ++ '/data/local/bin/su', '/system/sd/xbin/su', '/system/bin/failsafe/su', ++ '/data/local/su'] ++ su = None ++ ++ for fn in su_files: ++ if os.path.exists(fn): ++ try: ++ cmd = [fn, '-c', 'id'] ++ output = subprocess.check_output(cmd) ++ except Exception: ++ pass ++ else: ++ if 'uid=0' in output: ++ su = fn ++ break ++ ++ return su ++ ++ ++def fix_permissions(nodes): ++ su = get_su_binary() ++ if su: ++ cmd = 'chmod 666 ' + ' '.join(nodes) ++ print cmd ++ subprocess.check_call(['su', '-c', cmd]) ++ ++ + def list_devices(input_device_dir='/dev/input'): + '''List readable character devices in ``input_device_dir``.''' + + fns = glob.glob('{}/event*'.format(input_device_dir)) ++ fix_permissions(fns) + fns = list(filter(is_device, fns)) + + return fns diff --git a/p4a/pythonforandroidold/recipes/evdev/include-dir.patch b/p4a/pythonforandroidold/recipes/evdev/include-dir.patch new file mode 100644 index 0000000..d6a7c81 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/evdev/include-dir.patch @@ -0,0 +1,12 @@ +diff -Naur orig/setup.py v0.4.7/setup.py +--- orig/setup.py 2015-06-11 14:16:31.315765908 -0500 ++++ v0.4.7/setup.py 2015-06-11 14:17:05.800263536 -0500 +@@ -64,7 +64,7 @@ + + #----------------------------------------------------------------------------- + def create_ecodes(): +- header = '/usr/include/linux/input.h' ++ header = os.environ['NDKPLATFORM'] + '/usr/include/linux/input.h' + + if not os.path.isfile(header): + msg = '''\ diff --git a/p4a/pythonforandroidold/recipes/evdev/keycnt.patch b/p4a/pythonforandroidold/recipes/evdev/keycnt.patch new file mode 100644 index 0000000..c0f9c12 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/evdev/keycnt.patch @@ -0,0 +1,20 @@ +diff -Naur orig/evdev/genecodes.py v0.4.7/evdev/genecodes.py +--- orig/evdev/genecodes.py 2015-06-12 11:18:39.460538902 -0500 ++++ v0.4.7/evdev/genecodes.py 2015-06-12 11:20:49.004337615 -0500 +@@ -17,6 +17,8 @@ + #include + #endif + ++#define KEY_CNT (KEY_MAX+1) ++ + /* Automatically generated by evdev.genecodes */ + /* Generated on %s */ + +@@ -88,6 +88,7 @@ + macro = regex.search(line) + if macro: + yield ' PyModule_AddIntMacro(m, %s);' % macro.group(1) ++ yield ' PyModule_AddIntMacro(m, KEY_CNT);' + + uname = list(os.uname()); del uname[1] + uname = ' '.join(uname) diff --git a/p4a/pythonforandroidold/recipes/evdev/remove-uinput.patch b/p4a/pythonforandroidold/recipes/evdev/remove-uinput.patch new file mode 100644 index 0000000..82af122 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/evdev/remove-uinput.patch @@ -0,0 +1,523 @@ +diff -Naur orig/evdev/device.py v0.4.7/evdev/device.py +--- orig/evdev/device.py 2015-06-11 14:05:00.452884781 -0500 ++++ v0.4.7/evdev/device.py 2015-06-11 14:05:47.606553546 -0500 +@@ -4,7 +4,7 @@ + from select import select + from collections import namedtuple + +-from evdev import _input, _uinput, ecodes, util ++from evdev import _input, ecodes, util + from evdev.events import InputEvent + + +@@ -203,7 +203,7 @@ + + .. + ''' +- _uinput.write(self.fd, ecodes.EV_LED, led_num, value) ++ pass + + def __eq__(self, other): + '''Two devices are equal if their :data:`info` attributes are equal.''' +diff -Naur orig/evdev/__init__.py v0.4.7/evdev/__init__.py +--- orig/evdev/__init__.py 2015-06-11 14:05:00.452884781 -0500 ++++ v0.4.7/evdev/__init__.py 2015-06-11 14:05:22.973204070 -0500 +@@ -6,7 +6,6 @@ + + from evdev.device import DeviceInfo, InputDevice, AbsInfo + from evdev.events import InputEvent, KeyEvent, RelEvent, SynEvent, AbsEvent, event_factory +-from evdev.uinput import UInput, UInputError + from evdev.util import list_devices, categorize, resolve_ecodes + from evdev import ecodes + from evdev import ff +diff -Naur orig/evdev/uinput.c v0.4.7/evdev/uinput.c +--- orig/evdev/uinput.c 2015-06-11 14:05:00.453884795 -0500 ++++ v0.4.7/evdev/uinput.c 1969-12-31 18:00:00.000000000 -0600 +@@ -1,255 +0,0 @@ +-#include +- +-#include +-#include +-#include +-#include +-#include +-#include +-#include +- +-#ifdef __FreeBSD__ +-#include +-#include +-#else +-#include +-#include +-#endif +- +-int _uinput_close(int fd) +-{ +- if (ioctl(fd, UI_DEV_DESTROY) < 0) { +- int oerrno = errno; +- close(fd); +- errno = oerrno; +- return -1; +- } +- +- return close(fd); +-} +- +- +-static PyObject * +-uinput_open(PyObject *self, PyObject *args) +-{ +- const char* devnode; +- +- int ret = PyArg_ParseTuple(args, "s", &devnode); +- if (!ret) return NULL; +- +- int fd = open(devnode, O_WRONLY | O_NONBLOCK); +- if (fd < 0) { +- PyErr_SetString(PyExc_IOError, "could not open uinput device in write mode"); +- return NULL; +- } +- +- return Py_BuildValue("i", fd); +-} +- +- +-static PyObject * +-uinput_create(PyObject *self, PyObject *args) { +- int fd, len, i, abscode; +- uint16_t vendor, product, version, bustype; +- +- PyObject *absinfo = NULL, *item = NULL; +- +- struct uinput_user_dev uidev; +- const char* name; +- +- int ret = PyArg_ParseTuple(args, "ishhhhO", &fd, &name, &vendor, +- &product, &version, &bustype, &absinfo); +- if (!ret) return NULL; +- +- memset(&uidev, 0, sizeof(uidev)); +- strncpy(uidev.name, name, UINPUT_MAX_NAME_SIZE); +- uidev.id.vendor = vendor; +- uidev.id.product = product; +- uidev.id.version = version; +- uidev.id.bustype = bustype; +- +- len = PyList_Size(absinfo); +- for (i=0; i (ABS_X, 0, 255, 0, 0) +- item = PyList_GetItem(absinfo, i); +- abscode = (int)PyLong_AsLong(PyList_GetItem(item, 0)); +- +- uidev.absmin[abscode] = PyLong_AsLong(PyList_GetItem(item, 1)); +- uidev.absmax[abscode] = PyLong_AsLong(PyList_GetItem(item, 2)); +- uidev.absfuzz[abscode] = PyLong_AsLong(PyList_GetItem(item, 3)); +- uidev.absflat[abscode] = PyLong_AsLong(PyList_GetItem(item, 4)); +- } +- +- if (write(fd, &uidev, sizeof(uidev)) != sizeof(uidev)) +- goto on_err; +- +- /* if (ioctl(fd, UI_SET_EVBIT, EV_KEY) < 0) */ +- /* goto on_err; */ +- /* int i; */ +- /* for (i=0; i= 3 +-static struct PyModuleDef moduledef = { +- PyModuleDef_HEAD_INIT, +- MODULE_NAME, +- MODULE_HELP, +- -1, /* m_size */ +- MethodTable, /* m_methods */ +- NULL, /* m_reload */ +- NULL, /* m_traverse */ +- NULL, /* m_clear */ +- NULL, /* m_free */ +-}; +- +-static PyObject * +-moduleinit(void) +-{ +- PyObject* m = PyModule_Create(&moduledef); +- if (m == NULL) return NULL; +- +- PyModule_AddIntConstant(m, "maxnamelen", UINPUT_MAX_NAME_SIZE); +- return m; +-} +- +-PyMODINIT_FUNC +-PyInit__uinput(void) +-{ +- return moduleinit(); +-} +- +-#else +-static PyObject * +-moduleinit(void) +-{ +- PyObject* m = Py_InitModule3(MODULE_NAME, MethodTable, MODULE_HELP); +- if (m == NULL) return NULL; +- +- PyModule_AddIntConstant(m, "maxnamelen", UINPUT_MAX_NAME_SIZE); +- return m; +-} +- +-PyMODINIT_FUNC +-init_uinput(void) +-{ +- moduleinit(); +-} +-#endif +diff -Naur orig/evdev/uinput.py v0.4.7/evdev/uinput.py +--- orig/evdev/uinput.py 2015-06-11 14:05:00.453884795 -0500 ++++ v0.4.7/evdev/uinput.py 1969-12-31 18:00:00.000000000 -0600 +@@ -1,208 +0,0 @@ +-# encoding: utf-8 +- +-import os +-import stat +-import time +- +-from evdev import _uinput +-from evdev import ecodes, util, device +- +- +-class UInputError(Exception): +- pass +- +- +-class UInput(object): +- ''' +- A userland input device and that can inject input events into the +- linux input subsystem. +- ''' +- +- __slots__ = ( +- 'name', 'vendor', 'product', 'version', 'bustype', +- 'events', 'devnode', 'fd', 'device', +- ) +- +- def __init__(self, +- events=None, +- name='py-evdev-uinput', +- vendor=0x1, product=0x1, version=0x1, bustype=0x3, +- devnode='/dev/uinput'): +- ''' +- :param events: the event types and codes that the uinput +- device will be able to inject - defaults to all +- key codes. +- +- :type events: dictionary of event types mapping to lists of +- event codes. +- +- :param name: the name of the input device. +- :param vendor: vendor identifier. +- :param product: product identifier. +- :param version: version identifier. +- :param bustype: bustype identifier. +- +- .. note:: If you do not specify any events, the uinput device +- will be able to inject only ``KEY_*`` and ``BTN_*`` +- event codes. +- ''' +- +- self.name = name #: Uinput device name. +- self.vendor = vendor #: Device vendor identifier. +- self.product = product #: Device product identifier. +- self.version = version #: Device version identifier. +- self.bustype = bustype #: Device bustype - eg. ``BUS_USB``. +- self.devnode = devnode #: Uinput device node - eg. ``/dev/uinput/``. +- +- if not events: +- events = {ecodes.EV_KEY: ecodes.keys.keys()} +- +- # the min, max, fuzz and flat values for the absolute axis for +- # a given code +- absinfo = [] +- +- self._verify() +- +- #: Write-only, non-blocking file descriptor to the uinput device node. +- self.fd = _uinput.open(devnode) +- +- # set device capabilities +- for etype, codes in events.items(): +- for code in codes: +- # handle max, min, fuzz, flat +- if isinstance(code, (tuple, list, device.AbsInfo)): +- # flatten (ABS_Y, (0, 255, 0, 0)) to (ABS_Y, 0, 255, 0, 0) +- f = [code[0]]; f += code[1] +- absinfo.append(f) +- code = code[0] +- +- #:todo: a lot of unnecessary packing/unpacking +- _uinput.enable(self.fd, etype, code) +- +- # create uinput device +- _uinput.create(self.fd, name, vendor, product, version, bustype, absinfo) +- +- #: An :class:`InputDevice ` instance +- #: for the fake input device. ``None`` if the device cannot be +- #: opened for reading and writing. +- self.device = self._find_device() +- +- def __enter__(self): +- return self +- +- def __exit__(self, type, value, tb): +- if hasattr(self, 'fd'): +- self.close() +- +- def __repr__(self): +- # :todo: +- v = (repr(getattr(self, i)) for i in +- ('name', 'bustype', 'vendor', 'product', 'version')) +- return '{}({})'.format(self.__class__.__name__, ', '.join(v)) +- +- def __str__(self): +- msg = ('name "{}", bus "{}", vendor "{:04x}", product "{:04x}", version "{:04x}"\n' +- 'event types: {}') +- +- evtypes = [i[0] for i in self.capabilities(True).keys()] +- msg = msg.format(self.name, ecodes.BUS[self.bustype], +- self.vendor, self.product, +- self.version, ' '.join(evtypes)) +- +- return msg +- +- def close(self): +- # close the associated InputDevice, if it was previously opened +- if self.device is not None: +- self.device.close() +- +- # destroy the uinput device +- if self.fd > -1: +- _uinput.close(self.fd) +- self.fd = -1 +- +- def write_event(self, event): +- ''' +- Inject an input event into the input subsystem. Events are +- queued until a synchronization event is received. +- +- :param event: InputEvent instance or an object with an +- ``event`` attribute (:class:`KeyEvent +- `, :class:`RelEvent +- ` etc). +- +- Example:: +- +- ev = InputEvent(1334414993, 274296, ecodes.EV_KEY, ecodes.KEY_A, 1) +- ui.write_event(ev) +- ''' +- +- if hasattr(event, 'event'): +- event = event.event +- +- self.write(event.type, event.code, event.value) +- +- def write(self, etype, code, value): +- ''' +- Inject an input event into the input subsystem. Events are +- queued until a synchronization event is received. +- +- :param etype: event type (eg. ``EV_KEY``). +- :param code: event code (eg. ``KEY_A``). +- :param value: event value (eg. 0 1 2 - depends on event type). +- +- Example:: +- +- ui.write(e.EV_KEY, e.KEY_A, 1) # key A - down +- ui.write(e.EV_KEY, e.KEY_A, 0) # key A - up +- ''' +- +- _uinput.write(self.fd, etype, code, value) +- +- def syn(self): +- ''' +- Inject a ``SYN_REPORT`` event into the input subsystem. Events +- queued by :func:`write()` will be fired. If possible, events +- will be merged into an 'atomic' event. +- ''' +- +- _uinput.write(self.fd, ecodes.EV_SYN, ecodes.SYN_REPORT, 0) +- +- def capabilities(self, verbose=False, absinfo=True): +- '''See :func:`capabilities `.''' +- if self.device is None: +- raise UInputError('input device not opened - cannot read capabilites') +- +- return self.device.capabilities(verbose, absinfo) +- +- def _verify(self): +- ''' +- Verify that an uinput device exists and is readable and writable +- by the current process. +- ''' +- +- try: +- m = os.stat(self.devnode)[stat.ST_MODE] +- if not stat.S_ISCHR(m): +- raise +- except (IndexError, OSError): +- msg = '"{}" does not exist or is not a character device file '\ +- '- verify that the uinput module is loaded' +- raise UInputError(msg.format(self.devnode)) +- +- if not os.access(self.devnode, os.W_OK): +- msg = '"{}" cannot be opened for writing' +- raise UInputError(msg.format(self.devnode)) +- +- if len(self.name) > _uinput.maxnamelen: +- msg = 'uinput device name must not be longer than {} characters' +- raise UInputError(msg.format(_uinput.maxnamelen)) +- +- def _find_device(self): +- #:bug: the device node might not be immediately available +- time.sleep(0.1) +- +- for fn in util.list_devices('/dev/input/'): +- d = device.InputDevice(fn) +- if d.name == self.name: +- return d +diff -Naur orig/setup.py v0.4.7/setup.py +--- orig/setup.py 2015-06-11 14:05:00.450884753 -0500 ++++ v0.4.7/setup.py 2015-06-11 14:06:13.050914776 -0500 +@@ -37,7 +37,6 @@ + #----------------------------------------------------------------------------- + cflags = ['-std=c99', '-Wno-error=declaration-after-statement'] + input_c = Extension('evdev._input', sources=['evdev/input.c'], extra_compile_args=cflags) +-uinput_c = Extension('evdev._uinput', sources=['evdev/uinput.c'], extra_compile_args=cflags) + ecodes_c = Extension('evdev._ecodes', sources=['evdev/ecodes.c'], extra_compile_args=cflags) + + #----------------------------------------------------------------------------- +@@ -56,7 +55,7 @@ + 'classifiers': classifiers, + + 'packages': ['evdev'], +- 'ext_modules': [input_c, uinput_c, ecodes_c], ++ 'ext_modules': [input_c, ecodes_c], + 'include_package_data': False, + 'zip_safe': True, + 'cmdclass': {}, diff --git a/p4a/pythonforandroidold/recipes/feedparser/__init__.py b/p4a/pythonforandroidold/recipes/feedparser/__init__.py new file mode 100644 index 0000000..cce88b9 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/feedparser/__init__.py @@ -0,0 +1,12 @@ +from pythonforandroid.recipe import PythonRecipe + + +class FeedparserPyRecipe(PythonRecipe): + version = '5.2.1' + url = 'https://github.com/kurtmckee/feedparser/archive/{version}.tar.gz' + depends = ['setuptools'] + site_packages_name = 'feedparser' + call_hostpython_via_targetpython = False + + +recipe = FeedparserPyRecipe() diff --git a/p4a/pythonforandroidold/recipes/ffmpeg/__init__.py b/p4a/pythonforandroidold/recipes/ffmpeg/__init__.py new file mode 100644 index 0000000..f8e3ec1 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/ffmpeg/__init__.py @@ -0,0 +1,126 @@ +from pythonforandroid.toolchain import Recipe, current_directory, shprint +from os.path import exists, join, realpath +import sh + + +class FFMpegRecipe(Recipe): + version = '3.4.5' + url = 'http://ffmpeg.org/releases/ffmpeg-{version}.tar.bz2' + depends = ['sdl2'] # Need this to build correct recipe order + opts_depends = ['openssl', 'ffpyplayer_codecs'] + patches = ['patches/configure.patch'] + + def should_build(self, arch): + build_dir = self.get_build_dir(arch.arch) + return not exists(join(build_dir, 'lib', 'libavcodec.so')) + + def prebuild_arch(self, arch): + self.apply_patches(arch) + + def get_recipe_env(self, arch): + env = super(FFMpegRecipe, self).get_recipe_env(arch) + env['NDK'] = self.ctx.ndk_dir + return env + + def build_arch(self, arch): + with current_directory(self.get_build_dir(arch.arch)): + env = arch.get_env() + + flags = ['--disable-everything'] + cflags = [] + ldflags = [] + + if 'openssl' in self.ctx.recipe_build_order: + flags += [ + '--enable-openssl', + '--enable-nonfree', + '--enable-protocol=https,tls_openssl', + ] + build_dir = Recipe.get_recipe('openssl', self.ctx).get_build_dir(arch.arch) + cflags += ['-I' + build_dir + '/include/'] + ldflags += ['-L' + build_dir] + + if 'ffpyplayer_codecs' in self.ctx.recipe_build_order: + # libx264 + flags += ['--enable-libx264'] + build_dir = Recipe.get_recipe('libx264', self.ctx).get_build_dir(arch.arch) + cflags += ['-I' + build_dir + '/include/'] + ldflags += ['-lx264', '-L' + build_dir + '/lib/'] + + # libshine + flags += ['--enable-libshine'] + build_dir = Recipe.get_recipe('libshine', self.ctx).get_build_dir(arch.arch) + cflags += ['-I' + build_dir + '/include/'] + ldflags += ['-lshine', '-L' + build_dir + '/lib/'] + + # Enable all codecs: + flags += [ + '--enable-parsers', + '--enable-decoders', + '--enable-encoders', + '--enable-muxers', + '--enable-demuxers', + ] + else: + # Enable codecs only for .mp4: + flags += [ + '--enable-parser=aac,ac3,h261,h264,mpegaudio,mpeg4video,mpegvideo,vc1', + '--enable-decoder=aac,h264,mpeg4,mpegvideo', + '--enable-muxer=h264,mov,mp4,mpeg2video', + '--enable-demuxer=aac,h264,m4v,mov,mpegvideo,vc1', + ] + + # needed to prevent _ffmpeg.so: version node not found for symbol av_init_packet@LIBAVFORMAT_52 + # /usr/bin/ld: failed to set dynamic section sizes: Bad value + flags += [ + '--disable-symver', + ] + + # disable binaries / doc + flags += [ + '--disable-ffmpeg', + '--disable-ffplay', + '--disable-ffprobe', + '--disable-ffserver', + '--disable-doc', + ] + + # other flags: + flags += [ + '--enable-filter=aresample,resample,crop,adelay,volume,scale', + '--enable-protocol=file,http', + '--enable-small', + '--enable-hwaccels', + '--enable-gpl', + '--enable-pic', + '--disable-static', + '--enable-shared', + ] + + # android: + flags += [ + '--target-os=android', + '--cross-prefix=arm-linux-androideabi-', + '--arch=arm', + '--sysroot=' + self.ctx.ndk_platform, + '--enable-neon', + '--prefix={}'.format(realpath('.')), + ] + cflags += [ + '-mfpu=vfpv3-d16', + '-mfloat-abi=softfp', + '-fPIC', + ] + + env['CFLAGS'] += ' ' + ' '.join(cflags) + env['LDFLAGS'] += ' ' + ' '.join(ldflags) + + configure = sh.Command('./configure') + shprint(configure, *flags, _env=env) + shprint(sh.make, '-j4', _env=env) + shprint(sh.make, 'install', _env=env) + # copy libs: + sh.cp('-a', sh.glob('./lib/lib*.so'), self.ctx.get_libs_dir(arch.arch)) + + +recipe = FFMpegRecipe() diff --git a/p4a/pythonforandroidold/recipes/ffmpeg/patches/configure.patch b/p4a/pythonforandroidold/recipes/ffmpeg/patches/configure.patch new file mode 100644 index 0000000..b898c7f --- /dev/null +++ b/p4a/pythonforandroidold/recipes/ffmpeg/patches/configure.patch @@ -0,0 +1,40 @@ +--- ./configure.orig 2017-12-11 00:35:18.000000000 +0300 ++++ ./configure 2017-12-19 09:47:54.104914600 +0300 +@@ -4841,9 +4841,6 @@ + add_cflags -std=c11 || + check_cflags -std=c99 + +-check_cppflags -D_FILE_OFFSET_BITS=64 +-check_cppflags -D_LARGEFILE_SOURCE +- + add_host_cppflags -D_ISOC99_SOURCE + check_host_cflags -std=c99 + check_host_cflags -Wall +@@ -5979,7 +5976,7 @@ + enabled librsvg && require_pkg_config librsvg librsvg-2.0 librsvg-2.0/librsvg/rsvg.h rsvg_handle_render_cairo + enabled librtmp && require_pkg_config librtmp librtmp librtmp/rtmp.h RTMP_Socket + enabled librubberband && require_pkg_config librubberband "rubberband >= 1.8.1" rubberband/rubberband-c.h rubberband_new +-enabled libshine && require_pkg_config libshine shine shine/layer3.h shine_encode_buffer ++enabled libshine && require "shine" shine/layer3.h shine_encode_buffer -lshine + enabled libsmbclient && { use_pkg_config libsmbclient smbclient libsmbclient.h smbc_init || + require smbclient libsmbclient.h smbc_init -lsmbclient; } + enabled libsnappy && require libsnappy snappy-c.h snappy_compress -lsnappy + +diff -Naur ffmpeg/configure ffmpeg-1/configure +--- ffmpeg/configure 2019-01-11 09:30:02.824961600 +0100 ++++ ffmpeg-1/configure 2019-01-11 09:29:54.976149600 +0100 +@@ -6068,11 +6068,11 @@ + { ! enabled cross_compile && add_cflags -isystem/opt/vc/include/IL && check_header OMX_Core.h ; } || + die "ERROR: OpenMAX IL headers not found"; } + enabled omx && require_header OMX_Core.h +-enabled openssl && { use_pkg_config openssl openssl openssl/ssl.h OPENSSL_init_ssl || ++enabled openssl && { use_pkg_config openssl openssl openssl/ssl.h OPENSSL_init_ssl || + use_pkg_config openssl openssl openssl/ssl.h SSL_library_init || +- check_lib openssl openssl/ssl.h OPENSSL_init_ssl -lssl -lcrypto || +- check_lib openssl openssl/ssl.h OPENSSL_init_ssl -lssl32 -leay32 || +- check_lib openssl openssl/ssl.h OPENSSL_init_ssl -lssl -lcrypto -lws2_32 -lgdi32 || ++ check_lib openssl openssl/ssl.h SSL_library_init -lssl -lcrypto || ++ check_lib openssl openssl/ssl.h SSL_library_init -lssl32 -leay32 || ++ check_lib openssl openssl/ssl.h SSL_library_init -lssl -lcrypto -lws2_32 -lgdi32 || + die "ERROR: openssl not found"; } + enabled rkmpp && { { require_pkg_config rockchip_mpp rockchip_mpp rockchip/rk_mpi.h mpp_create || diff --git a/p4a/pythonforandroidold/recipes/ffpyplayer/__init__.py b/p4a/pythonforandroidold/recipes/ffpyplayer/__init__.py new file mode 100644 index 0000000..9ff29b7 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/ffpyplayer/__init__.py @@ -0,0 +1,28 @@ +from pythonforandroid.recipe import CythonRecipe +from pythonforandroid.toolchain import Recipe +from os.path import join + + +class FFPyPlayerRecipe(CythonRecipe): + version = '6f7568b498715c2da88f061ebad082a042514923' + url = 'https://github.com/matham/ffpyplayer/archive/{version}.zip' + depends = [('python2', 'python3'), 'sdl2', 'ffmpeg'] + opt_depends = ['openssl', 'ffpyplayer_codecs'] + + def get_recipe_env(self, arch, with_flags_in_cc=True): + env = super(FFPyPlayerRecipe, self).get_recipe_env(arch) + + build_dir = Recipe.get_recipe('ffmpeg', self.ctx).get_build_dir(arch.arch) + env["FFMPEG_INCLUDE_DIR"] = join(build_dir, "include") + env["FFMPEG_LIB_DIR"] = join(build_dir, "lib") + + env["SDL_INCLUDE_DIR"] = join(self.ctx.bootstrap.build_dir, 'jni', 'SDL', 'include') + env["SDL_LIB_DIR"] = join(self.ctx.bootstrap.build_dir, 'libs', arch.arch) + + env["USE_SDL2_MIXER"] = '1' + env["SDL2_MIXER_INCLUDE_DIR"] = join(self.ctx.bootstrap.build_dir, 'jni', 'SDL2_mixer') + + return env + + +recipe = FFPyPlayerRecipe() diff --git a/p4a/pythonforandroidold/recipes/ffpyplayer_codecs/__init__.py b/p4a/pythonforandroidold/recipes/ffpyplayer_codecs/__init__.py new file mode 100644 index 0000000..b324194 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/ffpyplayer_codecs/__init__.py @@ -0,0 +1,11 @@ +from pythonforandroid.toolchain import Recipe + + +class FFPyPlayerCodecsRecipe(Recipe): + depends = ['libshine', 'libx264'] + + def build_arch(self, arch): + pass + + +recipe = FFPyPlayerCodecsRecipe() diff --git a/p4a/pythonforandroidold/recipes/flask/__init__.py b/p4a/pythonforandroidold/recipes/flask/__init__.py new file mode 100644 index 0000000..1a9b685 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/flask/__init__.py @@ -0,0 +1,20 @@ + +from pythonforandroid.recipe import PythonRecipe + + +class FlaskRecipe(PythonRecipe): + # The webserver of 'master' seems to fail + # after a little while on Android, so use + # 0.10.1 at least for now + version = '0.10.1' + url = 'https://github.com/pallets/flask/archive/{version}.zip' + + depends = [('python2', 'python3', 'python3crystax'), 'setuptools'] + + python_depends = ['jinja2', 'werkzeug', 'markupsafe', 'itsdangerous', 'click'] + + call_hostpython_via_targetpython = False + install_in_hostpython = False + + +recipe = FlaskRecipe() diff --git a/p4a/pythonforandroidold/recipes/fontconfig/__init__.py b/p4a/pythonforandroidold/recipes/fontconfig/__init__.py new file mode 100644 index 0000000..8ac01e4 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/fontconfig/__init__.py @@ -0,0 +1,19 @@ +from pythonforandroid.recipe import BootstrapNDKRecipe +from pythonforandroid.toolchain import current_directory, shprint +import sh + + +class FontconfigRecipe(BootstrapNDKRecipe): + version = "really_old" + url = 'https://github.com/vault/fontconfig/archive/androidbuild.zip' + depends = ['sdl2'] + dir_name = 'fontconfig' + + def build_arch(self, arch): + env = self.get_recipe_env(arch) + + with current_directory(self.get_jni_dir()): + shprint(sh.ndk_build, "V=1", 'fontconfig', _env=env) + + +recipe = FontconfigRecipe() diff --git a/p4a/pythonforandroidold/recipes/freetype/__init__.py b/p4a/pythonforandroidold/recipes/freetype/__init__.py new file mode 100644 index 0000000..36171ff --- /dev/null +++ b/p4a/pythonforandroidold/recipes/freetype/__init__.py @@ -0,0 +1,44 @@ +from pythonforandroid.toolchain import Recipe +from pythonforandroid.util import current_directory +from pythonforandroid.logger import shprint +from os.path import exists, join, realpath +import sh + + +class FreetypeRecipe(Recipe): + + version = '2.5.5' + url = 'http://download.savannah.gnu.org/releases/freetype/freetype-{version}.tar.gz' # noqa + + depends = ['harfbuzz'] + + def should_build(self, arch): + if exists(join(self.get_build_dir(arch.arch), + 'objs', '.libs', 'libfreetype.a')): + return False + return True + + def build_arch(self, arch): + env = self.get_recipe_env(arch) + + harfbuzz_recipe = Recipe.get_recipe('harfbuzz', self.ctx) + env['LDFLAGS'] = ' '.join( + [env['LDFLAGS'], + '-L{}'.format(join(harfbuzz_recipe.get_build_dir(arch.arch), + 'src', '.libs'))]) + + with current_directory(self.get_build_dir(arch.arch)): + configure = sh.Command('./configure') + shprint(configure, + '--host=arm-linux-androideabi', + '--prefix={}'.format(realpath('.')), + '--without-zlib', + '--with-png=no', + '--disable-shared', + _env=env) + shprint(sh.make, '-j5', _env=env) + + shprint(sh.cp, 'objs/.libs/libfreetype.a', self.ctx.libs_dir) + + +recipe = FreetypeRecipe() diff --git a/p4a/pythonforandroidold/recipes/genericndkbuild/__init__.py b/p4a/pythonforandroidold/recipes/genericndkbuild/__init__.py new file mode 100644 index 0000000..2d1cdb0 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/genericndkbuild/__init__.py @@ -0,0 +1,29 @@ +from pythonforandroid.recipe import BootstrapNDKRecipe +from pythonforandroid.toolchain import current_directory, shprint +import sh + + +class GenericNDKBuildRecipe(BootstrapNDKRecipe): + version = None + url = None + + depends = [('python2', 'python3', 'python3crystax')] + conflicts = ['sdl2', 'pygame', 'sdl'] + + def should_build(self, arch): + return True + + def get_recipe_env(self, arch=None, with_flags_in_cc=True, with_python=True): + env = super(GenericNDKBuildRecipe, self).get_recipe_env( + arch=arch, with_flags_in_cc=with_flags_in_cc, with_python=with_python) + env['APP_ALLOW_MISSING_DEPS'] = 'true' + return env + + def build_arch(self, arch): + env = self.get_recipe_env(arch) + + with current_directory(self.get_jni_dir()): + shprint(sh.ndk_build, "V=1", _env=env) + + +recipe = GenericNDKBuildRecipe() diff --git a/p4a/pythonforandroid/recipes/gevent-websocket/__init__.py b/p4a/pythonforandroidold/recipes/gevent-websocket/__init__.py similarity index 100% rename from p4a/pythonforandroid/recipes/gevent-websocket/__init__.py rename to p4a/pythonforandroidold/recipes/gevent-websocket/__init__.py diff --git a/p4a/pythonforandroidold/recipes/gevent/__init__.py b/p4a/pythonforandroidold/recipes/gevent/__init__.py new file mode 100644 index 0000000..5933fb3 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/gevent/__init__.py @@ -0,0 +1,32 @@ +import re +from pythonforandroid.logger import info +from pythonforandroid.recipe import CythonRecipe + + +class GeventRecipe(CythonRecipe): + version = '1.4.0' + url = 'https://pypi.python.org/packages/source/g/gevent/gevent-{version}.tar.gz' + depends = ['librt', 'greenlet'] + patches = ["cross_compiling.patch"] + + def get_recipe_env(self, arch=None, with_flags_in_cc=True): + """ + - Moves all -I -D from CFLAGS to CPPFLAGS environment. + - Moves all -l from LDFLAGS to LIBS environment. + - Fixes linker name (use cross compiler) and flags (appends LIBS) + """ + env = super(GeventRecipe, self).get_recipe_env(arch, with_flags_in_cc) + # CFLAGS may only be used to specify C compiler flags, for macro definitions use CPPFLAGS + regex = re.compile(r'(?:\s|^)-[DI][\S]+') + env['CPPFLAGS'] = ''.join(re.findall(regex, env['CFLAGS'])).strip() + env['CFLAGS'] = re.sub(regex, '', env['CFLAGS']) + info('Moved "{}" from CFLAGS to CPPFLAGS.'.format(env['CPPFLAGS'])) + # LDFLAGS may only be used to specify linker flags, for libraries use LIBS + regex = re.compile(r'(?:\s|^)-l[\w\.]+') + env['LIBS'] = ''.join(re.findall(regex, env['LDFLAGS'])).strip() + env['LDFLAGS'] = re.sub(regex, '', env['LDFLAGS']) + info('Moved "{}" from LDFLAGS to LIBS.'.format(env['LIBS'])) + return env + + +recipe = GeventRecipe() diff --git a/p4a/pythonforandroidold/recipes/gevent/cross_compiling.patch b/p4a/pythonforandroidold/recipes/gevent/cross_compiling.patch new file mode 100644 index 0000000..01e55d8 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/gevent/cross_compiling.patch @@ -0,0 +1,26 @@ +diff --git a/_setupares.py b/_setupares.py +index dd184de6..bb16bebe 100644 +--- a/_setupares.py ++++ b/_setupares.py +@@ -43,7 +43,7 @@ else: + ares_configure_command = ' '.join([ + "(cd ", quoted_dep_abspath('c-ares'), + " && if [ -r ares_build.h ]; then cp ares_build.h ares_build.h.orig; fi ", +- " && sh ./configure --disable-dependency-tracking " + _m32 + "CONFIG_COMMANDS= ", ++ " && sh ./configure --host={} --disable-dependency-tracking ".format(os.environ['TOOLCHAIN_PREFIX']) + _m32 + "CONFIG_COMMANDS= ", + " && cp ares_config.h ares_build.h \"$OLDPWD\" ", + " && cat ares_build.h ", + " && if [ -r ares_build.h.orig ]; then mv ares_build.h.orig ares_build.h; fi)", +diff --git a/_setuplibev.py b/_setuplibev.py +index 2a5841bf..b6433c94 100644 +--- a/_setuplibev.py ++++ b/_setuplibev.py +@@ -31,7 +31,7 @@ LIBEV_EMBED = should_embed('libev') + # and the PyPy branch will clean it up. + libev_configure_command = ' '.join([ + "(cd ", quoted_dep_abspath('libev'), +- " && sh ./configure ", ++ " && sh ./configure --host={} ".format(os.environ['TOOLCHAIN_PREFIX']), + " && cp config.h \"$OLDPWD\"", + ")", + '> configure-output.txt' diff --git a/p4a/pythonforandroidold/recipes/greenlet/__init__.py b/p4a/pythonforandroidold/recipes/greenlet/__init__.py new file mode 100644 index 0000000..3f2043d --- /dev/null +++ b/p4a/pythonforandroidold/recipes/greenlet/__init__.py @@ -0,0 +1,11 @@ +from pythonforandroid.recipe import CompiledComponentsPythonRecipe + + +class GreenletRecipe(CompiledComponentsPythonRecipe): + version = '0.4.15' + url = 'https://pypi.python.org/packages/source/g/greenlet/greenlet-{version}.tar.gz' + depends = ['setuptools'] + call_hostpython_via_targetpython = False + + +recipe = GreenletRecipe() diff --git a/p4a/pythonforandroidold/recipes/groestlcoin_hash/__init__.py b/p4a/pythonforandroidold/recipes/groestlcoin_hash/__init__.py new file mode 100644 index 0000000..62344f0 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/groestlcoin_hash/__init__.py @@ -0,0 +1,11 @@ +from pythonforandroid.recipe import CythonRecipe + + +class GroestlcoinHashRecipe(CythonRecipe): + version = '1.0.1' + url = 'https://github.com/Groestlcoin/groestlcoin-hash-python/archive/{version}.tar.gz' + depends = [] + cythonize = False + + +recipe = GroestlcoinHashRecipe() diff --git a/p4a/pythonforandroidold/recipes/harfbuzz/__init__.py b/p4a/pythonforandroidold/recipes/harfbuzz/__init__.py new file mode 100644 index 0000000..32f4e51 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/harfbuzz/__init__.py @@ -0,0 +1,39 @@ +from pythonforandroid.toolchain import Recipe +from pythonforandroid.util import current_directory +from pythonforandroid.logger import shprint +from os.path import exists, join +import sh + + +class HarfbuzzRecipe(Recipe): + version = '0.9.40' + url = 'http://www.freedesktop.org/software/harfbuzz/release/harfbuzz-{version}.tar.bz2' # noqa + + def should_build(self, arch): + if exists(join(self.get_build_dir(arch.arch), + 'src', '.libs', 'libharfbuzz.a')): + return False + return True + + def build_arch(self, arch): + + env = self.get_recipe_env(arch) + env['LDFLAGS'] = env['LDFLAGS'] + ' -L{}'.format( + self.ctx.get_libs_dir(arch.arch) + + '-L{}'.format(self.ctx.libs_dir)) + with current_directory(self.get_build_dir(arch.arch)): + configure = sh.Command('./configure') + shprint(configure, '--without-icu', '--host=arm-linux=androideabi', + '--prefix={}'.format( + join(self.ctx.build_dir, 'python-install')), + '--without-freetype', + '--without-glib', + '--disable-shared', + _env=env) + shprint(sh.make, '-j5', _env=env) + + shprint(sh.cp, '-L', join('src', '.libs', 'libharfbuzz.a'), + self.ctx.libs_dir) + + +recipe = HarfbuzzRecipe() diff --git a/p4a/pythonforandroid/recipes/hostpython2/__init__.py b/p4a/pythonforandroidold/recipes/hostpython2/__init__.py similarity index 100% rename from p4a/pythonforandroid/recipes/hostpython2/__init__.py rename to p4a/pythonforandroidold/recipes/hostpython2/__init__.py diff --git a/p4a/pythonforandroid/recipes/hostpython2legacy/Setup b/p4a/pythonforandroidold/recipes/hostpython2legacy/Setup similarity index 100% rename from p4a/pythonforandroid/recipes/hostpython2legacy/Setup rename to p4a/pythonforandroidold/recipes/hostpython2legacy/Setup diff --git a/p4a/pythonforandroid/recipes/hostpython2legacy/__init__.py b/p4a/pythonforandroidold/recipes/hostpython2legacy/__init__.py similarity index 100% rename from p4a/pythonforandroid/recipes/hostpython2legacy/__init__.py rename to p4a/pythonforandroidold/recipes/hostpython2legacy/__init__.py diff --git a/p4a/pythonforandroid/recipes/hostpython2legacy/fix-segfault-pygchead.patch b/p4a/pythonforandroidold/recipes/hostpython2legacy/fix-segfault-pygchead.patch similarity index 100% rename from p4a/pythonforandroid/recipes/hostpython2legacy/fix-segfault-pygchead.patch rename to p4a/pythonforandroidold/recipes/hostpython2legacy/fix-segfault-pygchead.patch diff --git a/p4a/pythonforandroidold/recipes/hostpython3/__init__.py b/p4a/pythonforandroidold/recipes/hostpython3/__init__.py new file mode 100644 index 0000000..8b268bd --- /dev/null +++ b/p4a/pythonforandroidold/recipes/hostpython3/__init__.py @@ -0,0 +1,17 @@ +from pythonforandroid.python import HostPythonRecipe + + +class Hostpython3Recipe(HostPythonRecipe): + ''' + The hostpython3's recipe. + + .. versionchanged:: 0.6.0 + Refactored into the new class + :class:`~pythonforandroid.python.HostPythonRecipe` + ''' + version = '3.7.1' + name = 'hostpython3' + conflicts = ['hostpython2', 'hostpython3crystax'] + + +recipe = Hostpython3Recipe() diff --git a/p4a/pythonforandroid/recipes/hostpython3crystax/__init__.py b/p4a/pythonforandroidold/recipes/hostpython3crystax/__init__.py similarity index 100% rename from p4a/pythonforandroid/recipes/hostpython3crystax/__init__.py rename to p4a/pythonforandroidold/recipes/hostpython3crystax/__init__.py diff --git a/p4a/pythonforandroidold/recipes/icu/__init__.py b/p4a/pythonforandroidold/recipes/icu/__init__.py new file mode 100644 index 0000000..4bb2de0 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/icu/__init__.py @@ -0,0 +1,152 @@ +import sh +import os +from os.path import join, isdir +from pythonforandroid.recipe import NDKRecipe +from pythonforandroid.toolchain import shprint +from pythonforandroid.util import current_directory, ensure_dir + + +class ICURecipe(NDKRecipe): + name = 'icu4c' + version = '57.1' + url = 'http://download.icu-project.org/files/icu4c/57.1/icu4c-57_1-src.tgz' + + depends = [('hostpython2', 'hostpython3')] # installs in python + generated_libraries = [ + 'libicui18n.so', 'libicuuc.so', 'libicudata.so', 'libicule.so'] + + def get_lib_dir(self, arch): + lib_dir = join(self.ctx.get_python_install_dir(), "lib") + ensure_dir(lib_dir) + return lib_dir + + def prepare_build_dir(self, arch): + if self.ctx.android_api > 19: + # greater versions do not have /usr/include/sys/exec_elf.h + raise RuntimeError("icu needs an android api <= 19") + + super(ICURecipe, self).prepare_build_dir(arch) + + def build_arch(self, arch, *extra_args): + env = self.get_recipe_env(arch).copy() + build_root = self.get_build_dir(arch.arch) + + def make_build_dest(dest): + build_dest = join(build_root, dest) + if not isdir(build_dest): + ensure_dir(build_dest) + return build_dest, False + + return build_dest, True + + icu_build = join(build_root, "icu_build") + build_linux, exists = make_build_dest("build_icu_linux") + + host_env = os.environ.copy() + # reduce the function set + host_env["CPPFLAGS"] = ( + "-O3 -fno-short-wchar -DU_USING_ICU_NAMESPACE=1 -fno-short-enums " + "-DU_HAVE_NL_LANGINFO_CODESET=0 -D__STDC_INT64__ -DU_TIMEZONE=0 " + "-DUCONFIG_NO_LEGACY_CONVERSION=1 " + "-DUCONFIG_NO_TRANSLITERATION=0 ") + + if not exists: + configure = sh.Command( + join(build_root, "source", "runConfigureICU")) + with current_directory(build_linux): + shprint( + configure, + "Linux", + "--prefix="+icu_build, + "--enable-extras=no", + "--enable-strict=no", + "--enable-static", + "--enable-tests=no", + "--enable-samples=no", + _env=host_env) + shprint(sh.make, "-j5", _env=host_env) + shprint(sh.make, "install", _env=host_env) + + build_android, exists = make_build_dest("build_icu_android") + if not exists: + + configure = sh.Command(join(build_root, "source", "configure")) + + include = ( + " -I{ndk}/sources/cxx-stl/gnu-libstdc++/{version}/include/" + " -I{ndk}/sources/cxx-stl/gnu-libstdc++/{version}/libs/" + "{arch}/include") + include = include.format(ndk=self.ctx.ndk_dir, + version=env["TOOLCHAIN_VERSION"], + arch=arch.arch) + env["CPPFLAGS"] = env["CXXFLAGS"] + " " + env["CPPFLAGS"] += host_env["CPPFLAGS"] + env["CPPFLAGS"] += include + + lib = "{ndk}/sources/cxx-stl/gnu-libstdc++/{version}/libs/{arch}" + lib = lib.format(ndk=self.ctx.ndk_dir, + version=env["TOOLCHAIN_VERSION"], + arch=arch.arch) + env["LDFLAGS"] += " -lgnustl_shared -L"+lib + + env.pop("CFLAGS", None) + env.pop("CXXFLAGS", None) + + with current_directory(build_android): + shprint( + configure, + "--with-cross-build="+build_linux, + "--enable-extras=no", + "--enable-strict=no", + "--enable-static", + "--enable-tests=no", + "--enable-samples=no", + "--host="+env["TOOLCHAIN_PREFIX"], + "--prefix="+icu_build, + _env=env) + shprint(sh.make, "-j5", _env=env) + shprint(sh.make, "install", _env=env) + + self.copy_files(arch) + + def copy_files(self, arch): + env = self.get_recipe_env(arch) + + lib = "{ndk}/sources/cxx-stl/gnu-libstdc++/{version}/libs/{arch}" + lib = lib.format(ndk=self.ctx.ndk_dir, + version=env["TOOLCHAIN_VERSION"], + arch=arch.arch) + stl_lib = join(lib, "libgnustl_shared.so") + dst_dir = join(self.ctx.get_site_packages_dir(), "..", "lib-dynload") + shprint(sh.cp, stl_lib, dst_dir) + + src_lib = join(self.get_build_dir(arch.arch), "icu_build", "lib") + dst_lib = self.get_lib_dir(arch) + + src_suffix = "." + self.version + dst_suffix = "." + self.version.split(".")[0] # main version + for lib in self.generated_libraries: + shprint(sh.cp, join(src_lib, lib+src_suffix), + join(dst_lib, lib+dst_suffix)) + + src_include = join( + self.get_build_dir(arch.arch), "icu_build", "include") + dst_include = join( + self.ctx.get_python_install_dir(), "include", "icu") + ensure_dir(dst_include) + shprint(sh.cp, "-r", join(src_include, "layout"), dst_include) + shprint(sh.cp, "-r", join(src_include, "unicode"), dst_include) + + # copy stl library + lib = "{ndk}/sources/cxx-stl/gnu-libstdc++/{version}/libs/{arch}" + lib = lib.format(ndk=self.ctx.ndk_dir, + version=env["TOOLCHAIN_VERSION"], + arch=arch.arch) + stl_lib = join(lib, "libgnustl_shared.so") + + dst_dir = join(self.ctx.get_python_install_dir(), "lib") + ensure_dir(dst_dir) + shprint(sh.cp, stl_lib, dst_dir) + + +recipe = ICURecipe() diff --git a/p4a/pythonforandroid/recipes/idna/__init__.py b/p4a/pythonforandroidold/recipes/idna/__init__.py similarity index 100% rename from p4a/pythonforandroid/recipes/idna/__init__.py rename to p4a/pythonforandroidold/recipes/idna/__init__.py diff --git a/p4a/pythonforandroidold/recipes/ifaddrs/__init__.py b/p4a/pythonforandroidold/recipes/ifaddrs/__init__.py new file mode 100644 index 0000000..47c0008 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/ifaddrs/__init__.py @@ -0,0 +1,54 @@ +""" ifaddrs for Android +""" +from os.path import join, exists +import sh +from pythonforandroid.logger import info, shprint +from pythonforandroid.recipe import CompiledComponentsPythonRecipe +from pythonforandroid.toolchain import current_directory + + +class IFAddrRecipe(CompiledComponentsPythonRecipe): + version = '8f9a87c' + url = 'https://github.com/morristech/android-ifaddrs/archive/{version}.zip' + depends = [('hostpython2', 'hostpython3')] + + call_hostpython_via_targetpython = False + site_packages_name = 'ifaddrs' + generated_libraries = ['libifaddrs.so'] + + def prebuild_arch(self, arch): + """Make the build and target directories""" + path = self.get_build_dir(arch.arch) + if not exists(path): + info("creating {}".format(path)) + shprint(sh.mkdir, '-p', path) + + def build_arch(self, arch): + """simple shared compile""" + env = self.get_recipe_env(arch, with_flags_in_cc=False) + for path in ( + self.get_build_dir(arch.arch), + join(self.ctx.python_recipe.get_build_dir(arch.arch), 'Lib'), + join(self.ctx.python_recipe.get_build_dir(arch.arch), 'Include')): + if not exists(path): + info("creating {}".format(path)) + shprint(sh.mkdir, '-p', path) + cli = env['CC'].split()[0] + # makes sure first CC command is the compiler rather than ccache, refs: + # https://github.com/kivy/python-for-android/issues/1398 + if 'ccache' in cli: + cli = env['CC'].split()[1] + cc = sh.Command(cli) + + with current_directory(self.get_build_dir(arch.arch)): + cflags = env['CFLAGS'].split() + cflags.extend(['-I.', '-c', '-l.', 'ifaddrs.c', '-I.']) + shprint(cc, *cflags, _env=env) + cflags = env['CFLAGS'].split() + cflags.extend(['-shared', '-I.', 'ifaddrs.o', '-o', 'libifaddrs.so']) + cflags.extend(env['LDFLAGS'].split()) + shprint(cc, *cflags, _env=env) + shprint(sh.cp, 'libifaddrs.so', self.ctx.get_libs_dir(arch.arch)) + + +recipe = IFAddrRecipe() diff --git a/p4a/pythonforandroid/recipes/ipaddress/__init__.py b/p4a/pythonforandroidold/recipes/ipaddress/__init__.py similarity index 100% rename from p4a/pythonforandroid/recipes/ipaddress/__init__.py rename to p4a/pythonforandroidold/recipes/ipaddress/__init__.py diff --git a/p4a/pythonforandroidold/recipes/jedi/__init__.py b/p4a/pythonforandroidold/recipes/jedi/__init__.py new file mode 100644 index 0000000..6338a52 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/jedi/__init__.py @@ -0,0 +1,18 @@ +from pythonforandroid.recipe import PythonRecipe + + +class JediRecipe(PythonRecipe): + version = 'v0.9.0' + url = 'https://github.com/davidhalter/jedi/archive/{version}.tar.gz' + + depends = [('python2', 'python3crystax', 'python3')] + + patches = ['fix_MergedNamesDict_get.patch'] + # This apparently should be fixed in jedi 0.10 (not released to + # pypi yet), but it still occurs on Android, I could not reproduce + # on desktop. + + call_hostpython_via_targetpython = False + + +recipe = JediRecipe() diff --git a/p4a/pythonforandroidold/recipes/jedi/fix_MergedNamesDict_get.patch b/p4a/pythonforandroidold/recipes/jedi/fix_MergedNamesDict_get.patch new file mode 100644 index 0000000..65f163c --- /dev/null +++ b/p4a/pythonforandroidold/recipes/jedi/fix_MergedNamesDict_get.patch @@ -0,0 +1,14 @@ +diff --git a/jedi/parser/fast.py b/jedi/parser/fast.py +index 35bb855..bc43359 100644 +--- a/jedi/parser/fast.py ++++ b/jedi/parser/fast.py +@@ -75,7 +75,8 @@ class MergedNamesDict(object): + return iter(set(key for dct in self.dicts for key in dct)) + + def __getitem__(self, value): +- return list(chain.from_iterable(dct.get(value, []) for dct in self.dicts)) ++ return list(chain.from_iterable((dct[value] if value in dct else []) for dct in self.dicts)) ++ # return list(chain.from_iterable(dct.get(value, []) for dct in self.dicts)) + + def items(self): + dct = {} diff --git a/p4a/pythonforandroidold/recipes/jpeg/Application.mk b/p4a/pythonforandroidold/recipes/jpeg/Application.mk new file mode 100644 index 0000000..5942a03 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/jpeg/Application.mk @@ -0,0 +1,4 @@ +APP_OPTIM := release +APP_ABI := all # or armeabi +APP_MODULES := libjpeg +APP_ALLOW_MISSING_DEPS := true diff --git a/p4a/pythonforandroidold/recipes/jpeg/__init__.py b/p4a/pythonforandroidold/recipes/jpeg/__init__.py new file mode 100644 index 0000000..1969d2c --- /dev/null +++ b/p4a/pythonforandroidold/recipes/jpeg/__init__.py @@ -0,0 +1,78 @@ +from pythonforandroid.recipe import Recipe +from pythonforandroid.logger import shprint +from pythonforandroid.util import current_directory +from os.path import join, exists +from os import environ, uname +from glob import glob +import sh + + +class JpegRecipe(Recipe): + ''' + .. versionchanged:: 0.6.0 + rewrote recipe to be build with clang and updated libraries to latest + version of the official git repo. + ''' + name = 'jpeg' + version = '2.0.1' + url = 'https://github.com/libjpeg-turbo/libjpeg-turbo/archive/{version}.tar.gz' # noqa + # we will require this below patch to build the shared library + # patches = ['remove-version.patch'] + + def should_build(self, arch): + return not exists(join(self.get_build_dir(arch.arch), + 'libturbojpeg.a')) + + def build_arch(self, arch): + super(JpegRecipe, self).build_arch(arch) + build_dir = self.get_build_dir(arch.arch) + + # TODO: Fix simd/neon + with current_directory(build_dir): + env = self.get_recipe_env(arch) + toolchain_file = join(self.ctx.ndk_dir, + 'build/cmake/android.toolchain.cmake') + + shprint(sh.rm, '-f', 'CMakeCache.txt', 'CMakeFiles/') + shprint(sh.cmake, '-G', 'Unix Makefiles', + '-DCMAKE_SYSTEM_NAME=Android', + '-DCMAKE_SYSTEM_PROCESSOR={cpu}'.format(cpu='arm'), + '-DCMAKE_POSITION_INDEPENDENT_CODE=1', + '-DCMAKE_ANDROID_ARCH_ABI={arch}'.format(arch=arch.arch), + '-DCMAKE_ANDROID_NDK=' + self.ctx.ndk_dir, + '-DCMAKE_C_COMPILER={toolchain}/bin/clang'.format( + toolchain=env['TOOLCHAIN']), + '-DCMAKE_CXX_COMPILER={toolchain}/bin/clang++'.format( + toolchain=env['TOOLCHAIN']), + '-DCMAKE_BUILD_TYPE=Release', + '-DCMAKE_INSTALL_PREFIX=./install', + '-DCMAKE_TOOLCHAIN_FILE=' + toolchain_file, + + '-DANDROID_ABI={arch}'.format(arch=arch.arch), + '-DANDROID_ARM_NEON=ON', + '-DENABLE_NEON=ON', + # '-DREQUIRE_SIMD=1', + + # Force disable shared, with the static ones is enough + '-DENABLE_SHARED=0', + '-DENABLE_STATIC=1', + _env=env) + shprint(sh.make, _env=env) + + # copy static libs to libs collection + for lib in glob(join(build_dir, '*.a')): + shprint(sh.cp, '-L', lib, self.ctx.libs_dir) + + def get_recipe_env(self, arch=None, with_flags_in_cc=False, clang=True): + env = environ.copy() + + build_platform = '{system}-{machine}'.format( + system=uname()[0], machine=uname()[-1]).lower() + env['TOOLCHAIN'] = join(self.ctx.ndk_dir, 'toolchains/llvm/' + 'prebuilt/{build_platform}'.format( + build_platform=build_platform)) + + return env + + +recipe = JpegRecipe() diff --git a/p4a/pythonforandroidold/recipes/jpeg/build-static.patch b/p4a/pythonforandroidold/recipes/jpeg/build-static.patch new file mode 100644 index 0000000..0aa9c70 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/jpeg/build-static.patch @@ -0,0 +1,85 @@ +diff -Naur jpeg/Android.mk b/Android.mk +--- jpeg/Android.mk 2015-12-14 11:37:25.900190235 -0600 ++++ b/Android.mk 2015-12-14 11:41:27.532182210 -0600 +@@ -54,8 +54,7 @@ + + LOCAL_SRC_FILES:= $(libjpeg_SOURCES_DIST) + +-LOCAL_SHARED_LIBRARIES := libcutils +-LOCAL_STATIC_LIBRARIES := libsimd ++LOCAL_STATIC_LIBRARIES := libsimd libcutils + + LOCAL_C_INCLUDES := $(LOCAL_PATH) + +@@ -68,7 +67,7 @@ + + LOCAL_MODULE := libjpeg + +-include $(BUILD_SHARED_LIBRARY) ++include $(BUILD_STATIC_LIBRARY) + + ###################################################### + ### cjpeg ### +@@ -82,7 +81,7 @@ + + LOCAL_SRC_FILES:= $(cjpeg_SOURCES) + +-LOCAL_SHARED_LIBRARIES := libjpeg ++LOCAL_STATIC_LIBRARIES := libjpeg + + LOCAL_C_INCLUDES := $(LOCAL_PATH) \ + $(LOCAL_PATH)/android +@@ -110,7 +109,7 @@ + + LOCAL_SRC_FILES:= $(djpeg_SOURCES) + +-LOCAL_SHARED_LIBRARIES := libjpeg ++LOCAL_STATIC_LIBRARIES := libjpeg + + LOCAL_C_INCLUDES := $(LOCAL_PATH) \ + $(LOCAL_PATH)/android +@@ -137,7 +136,7 @@ + + LOCAL_SRC_FILES:= $(jpegtran_SOURCES) + +-LOCAL_SHARED_LIBRARIES := libjpeg ++LOCAL_STATIC_LIBRARIES := libjpeg + + LOCAL_C_INCLUDES := $(LOCAL_PATH) \ + $(LOCAL_PATH)/android +@@ -163,7 +162,7 @@ + + LOCAL_SRC_FILES:= $(tjunittest_SOURCES) + +-LOCAL_SHARED_LIBRARIES := libjpeg ++LOCAL_STATIC_LIBRARIES := libjpeg + + LOCAL_C_INCLUDES := $(LOCAL_PATH) + +@@ -189,7 +188,7 @@ + + LOCAL_SRC_FILES:= $(tjbench_SOURCES) + +-LOCAL_SHARED_LIBRARIES := libjpeg ++LOCAL_STATIC_LIBRARIES := libjpeg + + LOCAL_C_INCLUDES := $(LOCAL_PATH) + +@@ -215,7 +214,7 @@ + + LOCAL_SRC_FILES:= $(rdjpgcom_SOURCES) + +-LOCAL_SHARED_LIBRARIES := libjpeg ++LOCAL_STATIC_LIBRARIES := libjpeg + + LOCAL_C_INCLUDES := $(LOCAL_PATH) + +@@ -240,7 +239,7 @@ + + LOCAL_SRC_FILES:= $(wrjpgcom_SOURCES) + +-LOCAL_SHARED_LIBRARIES := libjpeg ++LOCAL_STATIC_LIBRARIES := libjpeg + + LOCAL_C_INCLUDES := $(LOCAL_PATH) + diff --git a/p4a/pythonforandroidold/recipes/jpeg/remove-version.patch b/p4a/pythonforandroidold/recipes/jpeg/remove-version.patch new file mode 100644 index 0000000..311aa33 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/jpeg/remove-version.patch @@ -0,0 +1,12 @@ +--- jpeg/CMakeLists.txt.orig 2018-11-12 20:20:28.000000000 +0100 ++++ jpeg/CMakeLists.txt 2018-12-14 12:43:45.338704504 +0100 +@@ -573,6 +573,9 @@ + add_library(turbojpeg SHARED ${TURBOJPEG_SOURCES}) + set_property(TARGET turbojpeg PROPERTY COMPILE_FLAGS + "-DBMP_SUPPORTED -DPPM_SUPPORTED") ++ set_property(TARGET jpeg PROPERTY NO_SONAME 1) ++ set_property(TARGET turbojpeg PROPERTY NO_SONAME 1) ++ set(CMAKE_SHARED_LIBRARY_SONAME_C_FLAG "") + if(WIN32) + set_target_properties(turbojpeg PROPERTIES DEFINE_SYMBOL DLLDEFINE) + endif() diff --git a/p4a/pythonforandroidold/recipes/kivy/__init__.py b/p4a/pythonforandroidold/recipes/kivy/__init__.py new file mode 100644 index 0000000..d21107f --- /dev/null +++ b/p4a/pythonforandroidold/recipes/kivy/__init__.py @@ -0,0 +1,55 @@ +from pythonforandroid.recipe import CythonRecipe +from pythonforandroid.toolchain import current_directory, shprint +from os.path import exists, join, basename +import sh +import glob + + +class KivyRecipe(CythonRecipe): + # post kivy==1.10.1, `fixes SDL2 image loading (jpg)` + version = 'c4d6894' + url = 'https://github.com/kivy/kivy/archive/{version}.zip' + name = 'kivy' + + depends = [('sdl2', 'pygame'), 'pyjnius'] + + def cythonize_build(self, env, build_dir='.'): + super(KivyRecipe, self).cythonize_build(env, build_dir=build_dir) + + if not exists(join(build_dir, 'kivy', 'include')): + return + + # If kivy is new enough to use the include dir, copy it + # manually to the right location as we bypass this stage of + # the build + with current_directory(build_dir): + build_libs_dirs = glob.glob(join('build', 'lib.*')) + + for dirn in build_libs_dirs: + shprint(sh.cp, '-r', join('kivy', 'include'), + join(dirn, 'kivy')) + + def cythonize_file(self, env, build_dir, filename): + # We can ignore a few files that aren't important to the + # android build, and may not work on Android anyway + do_not_cythonize = ['window_x11.pyx', ] + if basename(filename) in do_not_cythonize: + return + super(KivyRecipe, self).cythonize_file(env, build_dir, filename) + + def get_recipe_env(self, arch): + env = super(KivyRecipe, self).get_recipe_env(arch) + if 'sdl2' in self.ctx.recipe_build_order: + env['USE_SDL2'] = '1' + env['KIVY_SPLIT_EXAMPLES'] = '1' + env['KIVY_SDL2_PATH'] = ':'.join([ + join(self.ctx.bootstrap.build_dir, 'jni', 'SDL', 'include'), + join(self.ctx.bootstrap.build_dir, 'jni', 'SDL2_image'), + join(self.ctx.bootstrap.build_dir, 'jni', 'SDL2_mixer'), + join(self.ctx.bootstrap.build_dir, 'jni', 'SDL2_ttf'), + ]) + + return env + + +recipe = KivyRecipe() diff --git a/p4a/pythonforandroidold/recipes/kiwisolver/__init__.py b/p4a/pythonforandroidold/recipes/kiwisolver/__init__.py new file mode 100644 index 0000000..ae6fa17 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/kiwisolver/__init__.py @@ -0,0 +1,11 @@ +from pythonforandroid.recipe import CppCompiledComponentsPythonRecipe + + +class KiwiSolverRecipe(CppCompiledComponentsPythonRecipe): + site_packages_name = 'kiwisolver' + version = '0.1.3' + url = 'https://github.com/nucleic/kiwi/archive/master.zip' + depends = ['setuptools'] + + +recipe = KiwiSolverRecipe() diff --git a/p4a/pythonforandroidold/recipes/leveldb/__init__.py b/p4a/pythonforandroidold/recipes/leveldb/__init__.py new file mode 100644 index 0000000..e7ebe71 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/leveldb/__init__.py @@ -0,0 +1,47 @@ +from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory +from os.path import join +import sh + + +class LevelDBRecipe(Recipe): + version = '1.18' + url = 'https://github.com/google/leveldb/archive/v{version}.tar.gz' + opt_depends = ['snappy'] + patches = ['disable-so-version.patch', 'find-snappy.patch'] + + def should_build(self, arch): + return not self.has_libs(arch, 'libleveldb.so', 'libgnustl_shared.so') + + def build_arch(self, arch): + super(LevelDBRecipe, self).build_arch(arch) + env = self.get_recipe_env(arch) + with current_directory(self.get_build_dir(arch.arch)): + if 'snappy' in recipe.ctx.recipe_build_order: + # Copy source from snappy recipe + sh.cp('-rf', self.get_recipe('snappy', self.ctx).get_build_dir(arch.arch), 'snappy') + # Build + shprint(sh.make, _env=env) + # Copy the shared library + shutil.copyfile('libleveldb.so', join(self.ctx.get_libs_dir(arch.arch), 'libleveldb.so')) + # Copy stl + shutil.copyfile(self.ctx.ndk_dir + '/sources/cxx-stl/gnu-libstdc++/' + self.ctx.toolchain_version + '/libs/' + arch.arch + '/libgnustl_shared.so', + join(self.ctx.get_libs_dir(arch.arch), 'libgnustl_shared.so')) + + def get_recipe_env(self, arch): + env = super(LevelDBRecipe, self).get_recipe_env(arch) + env['TARGET_OS'] = 'OS_ANDROID_CROSSCOMPILE' + if 'snappy' in recipe.ctx.recipe_build_order: + env['CFLAGS'] += ' -DSNAPPY' + \ + ' -I./snappy' + env['CFLAGS'] += ' -I' + self.ctx.ndk_dir + '/platforms/android-' + str(self.ctx.android_api) + '/arch-' + arch.arch.replace('eabi', '') + '/usr/include' + \ + ' -I' + self.ctx.ndk_dir + '/sources/cxx-stl/gnu-libstdc++/' + self.ctx.toolchain_version + '/include' + \ + ' -I' + self.ctx.ndk_dir + '/sources/cxx-stl/gnu-libstdc++/' + self.ctx.toolchain_version + '/libs/' + arch.arch + '/include' + env['CXXFLAGS'] = env['CFLAGS'] + env['CXXFLAGS'] += ' -frtti' + env['CXXFLAGS'] += ' -fexceptions' + env['LDFLAGS'] += ' -L' + self.ctx.ndk_dir + '/sources/cxx-stl/gnu-libstdc++/' + self.ctx.toolchain_version + '/libs/' + arch.arch + \ + ' -lgnustl_shared' + return env + + +recipe = LevelDBRecipe() diff --git a/p4a/pythonforandroid/recipes/leveldb/disable-so-version.patch b/p4a/pythonforandroidold/recipes/leveldb/disable-so-version.patch similarity index 100% rename from p4a/pythonforandroid/recipes/leveldb/disable-so-version.patch rename to p4a/pythonforandroidold/recipes/leveldb/disable-so-version.patch diff --git a/p4a/pythonforandroid/recipes/leveldb/find-snappy.patch b/p4a/pythonforandroidold/recipes/leveldb/find-snappy.patch similarity index 100% rename from p4a/pythonforandroid/recipes/leveldb/find-snappy.patch rename to p4a/pythonforandroidold/recipes/leveldb/find-snappy.patch diff --git a/p4a/pythonforandroidold/recipes/libcurl/__init__.py b/p4a/pythonforandroidold/recipes/libcurl/__init__.py new file mode 100644 index 0000000..e8cc860 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libcurl/__init__.py @@ -0,0 +1,40 @@ +import sh +from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory +from os.path import exists, join +from multiprocessing import cpu_count + + +class LibcurlRecipe(Recipe): + version = '7.55.1' + url = 'https://curl.haxx.se/download/curl-7.55.1.tar.gz' + depends = ['openssl'] + + def should_build(self, arch): + super(LibcurlRecipe, self).should_build(arch) + return not exists(join(self.ctx.get_libs_dir(arch.arch), 'libcurl.so')) + + def build_arch(self, arch): + super(LibcurlRecipe, self).build_arch(arch) + env = self.get_recipe_env(arch) + + r = self.get_recipe('openssl', self.ctx) + openssl_dir = r.get_build_dir(arch.arch) + + with current_directory(self.get_build_dir(arch.arch)): + dst_dir = join(self.get_build_dir(arch.arch), 'dist') + shprint( + sh.Command('./configure'), + '--host=arm-linux-androideabi', + '--enable-shared', + '--with-ssl={}'.format(openssl_dir), + '--prefix={}'.format(dst_dir), + _env=env) + shprint(sh.make, '-j', str(cpu_count()), _env=env) + shprint(sh.make, 'install', _env=env) + shutil.copyfile('{}/lib/libcurl.so'.format(dst_dir), + join( + self.ctx.get_libs_dir(arch.arch), + 'libcurl.so')) + + +recipe = LibcurlRecipe() diff --git a/p4a/pythonforandroidold/recipes/libexpat/__init__.py b/p4a/pythonforandroidold/recipes/libexpat/__init__.py new file mode 100644 index 0000000..ecf5265 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libexpat/__init__.py @@ -0,0 +1,38 @@ + +import sh +from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory +from os.path import exists, join +from multiprocessing import cpu_count + + +class LibexpatRecipe(Recipe): + version = 'master' + url = 'https://github.com/libexpat/libexpat/archive/{version}.zip' + depends = [] + + def should_build(self, arch): + super(LibexpatRecipe, self).should_build(arch) + return not exists( + join(self.ctx.get_libs_dir(arch.arch), 'libexpat.so')) + + def build_arch(self, arch): + super(LibexpatRecipe, self).build_arch(arch) + env = self.get_recipe_env(arch) + with current_directory(join(self.get_build_dir(arch.arch), 'expat')): + dst_dir = join(self.get_build_dir(arch.arch), 'dist') + shprint(sh.Command('./buildconf.sh'), _env=env) + shprint( + sh.Command('./configure'), + '--host=arm-linux-androideabi', + '--enable-shared', + '--without-xmlwf', + '--prefix={}'.format(dst_dir), + _env=env) + shprint(sh.make, '-j', str(cpu_count()), _env=env) + shprint(sh.make, 'install', _env=env) + shutil.copyfile( + '{}/lib/libexpat.so'.format(dst_dir), + join(self.ctx.get_libs_dir(arch.arch), 'libexpat.so')) + + +recipe = LibexpatRecipe() diff --git a/p4a/pythonforandroidold/recipes/libffi/Application.mk b/p4a/pythonforandroidold/recipes/libffi/Application.mk new file mode 100644 index 0000000..8561b77 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libffi/Application.mk @@ -0,0 +1,3 @@ +APP_OPTIM := release +APP_ABI := all # or armeabi +APP_MODULES := libffi diff --git a/p4a/pythonforandroidold/recipes/libffi/__init__.py b/p4a/pythonforandroidold/recipes/libffi/__init__.py new file mode 100644 index 0000000..31ed9c6 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libffi/__init__.py @@ -0,0 +1,53 @@ +from os.path import exists, join +from multiprocessing import cpu_count +from pythonforandroid.recipe import Recipe +from pythonforandroid.logger import shprint +from pythonforandroid.util import current_directory, ensure_dir +import sh + + +class LibffiRecipe(Recipe): + """ + Requires additional system dependencies on Ubuntu: + - `automake` for the `aclocal` binary + - `autoconf` for the `autoreconf` binary + - `libltdl-dev` which defines the `LT_SYS_SYMBOL_USCORE` macro + """ + name = 'libffi' + version = '3.2.1' + url = 'https://github.com/libffi/libffi/archive/v{version}.tar.gz' + + patches = ['remove-version-info.patch', + # This patch below is already included into libffi's master + # branch and included in the pre-release 3.3rc0...so we should + # remove this when we update the version number for libffi + 'fix-includedir.patch'] + + def should_build(self, arch): + return not exists(join(self.ctx.get_libs_dir(arch.arch), 'libffi.so')) + + def build_arch(self, arch): + env = self.get_recipe_env(arch) + with current_directory(self.get_build_dir(arch.arch)): + if not exists('configure'): + shprint(sh.Command('./autogen.sh'), _env=env) + shprint(sh.Command('autoreconf'), '-vif', _env=env) + shprint(sh.Command('./configure'), + '--host=' + arch.command_prefix, + '--prefix=' + self.get_build_dir(arch.arch), + '--disable-builddir', + '--enable-shared', _env=env) + + shprint(sh.make, '-j', str(cpu_count()), 'libffi.la', _env=env) + + host_build = self.get_build_dir(arch.arch) + ensure_dir(self.ctx.get_libs_dir(arch.arch)) + shprint(sh.cp, + join(host_build, '.libs', 'libffi.so'), + self.ctx.get_libs_dir(arch.arch)) + + def get_include_dirs(self, arch): + return [join(self.get_build_dir(arch.arch), 'include')] + + +recipe = LibffiRecipe() diff --git a/p4a/pythonforandroidold/recipes/libffi/disable-mips-check.patch b/p4a/pythonforandroidold/recipes/libffi/disable-mips-check.patch new file mode 100644 index 0000000..0f727ba --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libffi/disable-mips-check.patch @@ -0,0 +1,35 @@ +diff -Naur libffi/Android.mk b/Android.mk +--- libffi/Android.mk 2015-12-22 17:00:48.025478556 -0600 ++++ b/Android.mk 2015-12-22 17:02:23.999249390 -0600 +@@ -23,23 +23,20 @@ + # Build rules for the target. + # + +-# We only build ffi for mips. +-ifeq ($(TARGET_ARCH),mips) + +- include $(CLEAR_VARS) ++include $(CLEAR_VARS) + +- ffi_arch := $(TARGET_ARCH) +- ffi_os := $(TARGET_OS) ++ffi_arch := $(TARGET_ARCH) ++ffi_os := $(TARGET_OS) + +- # This include just keeps the nesting a bit saner. +- include $(LOCAL_PATH)/Libffi.mk ++# This include just keeps the nesting a bit saner. ++include $(LOCAL_PATH)/Libffi.mk + +- LOCAL_MODULE_TAGS := optional +- LOCAL_MODULE := libffi ++LOCAL_MODULE_TAGS := optional ++LOCAL_MODULE := libffi + +- include $(BUILD_SHARED_LIBRARY) ++include $(BUILD_SHARED_LIBRARY) + +-endif + + # Also include the rules for the test suite. + include external/libffi/testsuite/Android.mk diff --git a/p4a/pythonforandroid/recipes/libffi/fix-includedir.patch b/p4a/pythonforandroidold/recipes/libffi/fix-includedir.patch similarity index 100% rename from p4a/pythonforandroid/recipes/libffi/fix-includedir.patch rename to p4a/pythonforandroidold/recipes/libffi/fix-includedir.patch diff --git a/p4a/pythonforandroidold/recipes/libffi/remove-version-info.patch b/p4a/pythonforandroidold/recipes/libffi/remove-version-info.patch new file mode 100644 index 0000000..7bdc11a --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libffi/remove-version-info.patch @@ -0,0 +1,12 @@ +diff -Naur libffi/Makefile.am b/Makefile.am +--- libffi/Makefile.am 2014-11-12 06:00:59.000000000 -0600 ++++ b/Makefile.am 2015-12-23 15:57:10.363148806 -0600 +@@ -249,7 +249,7 @@ + AM_CFLAGS += -DFFI_DEBUG + endif + +-libffi_la_LDFLAGS = -no-undefined -version-info `grep -v '^\#' $(srcdir)/libtool-version` $(LTLDFLAGS) $(AM_LTLDFLAGS) ++libffi_la_LDFLAGS = -no-undefined -avoid-version $(LTLDFLAGS) $(AM_LTLDFLAGS) + + AM_CPPFLAGS = -I. -I$(top_srcdir)/include -Iinclude -I$(top_srcdir)/src + AM_CCASFLAGS = $(AM_CPPFLAGS) diff --git a/p4a/pythonforandroidold/recipes/libgeos/__init__.py b/p4a/pythonforandroidold/recipes/libgeos/__init__.py new file mode 100644 index 0000000..30786f8 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libgeos/__init__.py @@ -0,0 +1,44 @@ +from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory +from os.path import exists, join +import sh +from multiprocessing import cpu_count + + +class LibgeosRecipe(Recipe): + version = '3.5' + # url = 'http://download.osgeo.org/geos/geos-{version}.tar.bz2' + url = 'https://github.com/libgeos/libgeos/archive/svn-{version}.zip' + depends = [] + + def should_build(self, arch): + super(LibgeosRecipe, self).should_build(arch) + return not exists(join(self.ctx.get_libs_dir(arch.arch), 'libgeos_c.so')) + + def build_arch(self, arch): + super(LibgeosRecipe, self).build_arch(arch) + env = self.get_recipe_env(arch) + + with current_directory(self.get_build_dir(arch.arch)): + dst_dir = join(self.get_build_dir(arch.arch), 'dist') + bash = sh.Command('bash') + print("If this fails make sure you have autoconf and libtool installed") + shprint(bash, 'autogen.sh') # Requires autoconf and libtool + shprint(bash, 'configure', '--host=arm-linux-androideabi', '--enable-shared', '--prefix={}'.format(dst_dir), _env=env) + shprint(sh.make, '-j', str(cpu_count()), _env=env) + shprint(sh.make, 'install', _env=env) + shutil.copyfile('{}/lib/libgeos_c.so'.format(dst_dir), join(self.ctx.get_libs_dir(arch.arch), 'libgeos_c.so')) + + def get_recipe_env(self, arch): + env = super(LibgeosRecipe, self).get_recipe_env(arch) + env['CXXFLAGS'] += ' -I{}/sources/cxx-stl/gnu-libstdc++/4.8/include'.format(self.ctx.ndk_dir) + env['CXXFLAGS'] += ' -I{}/sources/cxx-stl/gnu-libstdc++/4.8/libs/{}/include'.format( + self.ctx.ndk_dir, arch) + env['CXXFLAGS'] += ' -L{}/sources/cxx-stl/gnu-libstdc++/4.8/libs/{}'.format( + self.ctx.ndk_dir, arch) + env['CXXFLAGS'] += ' -lgnustl_shared' + env['LDFLAGS'] += ' -L{}/sources/cxx-stl/gnu-libstdc++/4.8/libs/{}'.format( + self.ctx.ndk_dir, arch) + return env + + +recipe = LibgeosRecipe() diff --git a/p4a/pythonforandroidold/recipes/libglob/__init__.py b/p4a/pythonforandroidold/recipes/libglob/__init__.py new file mode 100644 index 0000000..e0fccfe --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libglob/__init__.py @@ -0,0 +1,66 @@ +""" + android libglob + available via '-lglob' LDFLAG +""" +from os.path import exists, join +from pythonforandroid.recipe import CompiledComponentsPythonRecipe +from pythonforandroid.toolchain import current_directory +from pythonforandroid.logger import info, shprint +import sh + + +class LibGlobRecipe(CompiledComponentsPythonRecipe): + """Make a glob.h and glob.so for the python_install_dir()""" + version = '0.0.1' + url = None + # + # glob.h and glob.c extracted from + # https://github.com/white-gecko/TokyoCabinet, e.g.: + # https://raw.githubusercontent.com/white-gecko/TokyoCabinet/master/glob.h + # https://raw.githubusercontent.com/white-gecko/TokyoCabinet/master/glob.c + # and pushed in via patch + name = 'libglob' + + depends = [('hostpython2', 'hostpython3')] + patches = ['glob.patch'] + + def should_build(self, arch): + """It's faster to build than check""" + return True + + def prebuild_arch(self, arch): + """Make the build and target directories""" + path = self.get_build_dir(arch.arch) + if not exists(path): + info("creating {}".format(path)) + shprint(sh.mkdir, '-p', path) + + def build_arch(self, arch): + """simple shared compile""" + env = self.get_recipe_env(arch, with_flags_in_cc=False) + for path in ( + self.get_build_dir(arch.arch), + join(self.ctx.python_recipe.get_build_dir(arch.arch), 'Lib'), + join(self.ctx.python_recipe.get_build_dir(arch.arch), 'Include')): + if not exists(path): + info("creating {}".format(path)) + shprint(sh.mkdir, '-p', path) + cli = env['CC'].split()[0] + # makes sure first CC command is the compiler rather than ccache, refs: + # https://github.com/kivy/python-for-android/issues/1399 + if 'ccache' in cli: + cli = env['CC'].split()[1] + cc = sh.Command(cli) + + with current_directory(self.get_build_dir(arch.arch)): + cflags = env['CFLAGS'].split() + cflags.extend(['-I.', '-c', '-l.', 'glob.c', '-I.']) + shprint(cc, *cflags, _env=env) + cflags = env['CFLAGS'].split() + cflags.extend(['-shared', '-I.', 'glob.o', '-o', 'libglob.so']) + cflags.extend(env['LDFLAGS'].split()) + shprint(cc, *cflags, _env=env) + shprint(sh.cp, 'libglob.so', join(self.ctx.libs_dir, arch.arch)) + + +recipe = LibGlobRecipe() diff --git a/p4a/pythonforandroidold/recipes/libglob/glob.patch b/p4a/pythonforandroidold/recipes/libglob/glob.patch new file mode 100644 index 0000000..c7fe817 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libglob/glob.patch @@ -0,0 +1,1016 @@ +diff -Nur /tmp/x/glob.c libglob/glob.c +--- /tmp/x/glob.c 1969-12-31 19:00:00.000000000 -0500 ++++ libglob/glob.c 2017-08-19 15:23:19.910414868 -0400 +@@ -0,0 +1,906 @@ ++/* ++ * Natanael Arndt, 2011: removed collate.h dependencies ++ * (my changes are trivial) ++ * ++ * Copyright (c) 1989, 1993 ++ * The Regents of the University of California. All rights reserved. ++ * ++ * This code is derived from software contributed to Berkeley by ++ * Guido van Rossum. ++ * ++ * Redistribution and use in source and binary forms, with or without ++ * modification, are permitted provided that the following conditions ++ * are met: ++ * 1. Redistributions of source code must retain the above copyright ++ * notice, this list of conditions and the following disclaimer. ++ * 2. Redistributions in binary form must reproduce the above copyright ++ * notice, this list of conditions and the following disclaimer in the ++ * documentation and/or other materials provided with the distribution. ++ * 4. Neither the name of the University nor the names of its contributors ++ * may be used to endorse or promote products derived from this software ++ * without specific prior written permission. ++ * ++ * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ++ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE ++ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ++ * ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE ++ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL ++ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS ++ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT ++ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY ++ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF ++ * SUCH DAMAGE. ++ */ ++ ++#if defined(LIBC_SCCS) && !defined(lint) ++static char sccsid[] = "@(#)glob.c 8.3 (Berkeley) 10/13/93"; ++#endif /* LIBC_SCCS and not lint */ ++#include ++__FBSDID("$FreeBSD$"); ++ ++/* ++ * glob(3) -- a superset of the one defined in POSIX 1003.2. ++ * ++ * The [!...] convention to negate a range is supported (SysV, Posix, ksh). ++ * ++ * Optional extra services, controlled by flags not defined by POSIX: ++ * ++ * GLOB_QUOTE: ++ * Escaping convention: \ inhibits any special meaning the following ++ * character might have (except \ at end of string is retained). ++ * GLOB_MAGCHAR: ++ * Set in gl_flags if pattern contained a globbing character. ++ * GLOB_NOMAGIC: ++ * Same as GLOB_NOCHECK, but it will only append pattern if it did ++ * not contain any magic characters. [Used in csh style globbing] ++ * GLOB_ALTDIRFUNC: ++ * Use alternately specified directory access functions. ++ * GLOB_TILDE: ++ * expand ~user/foo to the /home/dir/of/user/foo ++ * GLOB_BRACE: ++ * expand {1,2}{a,b} to 1a 1b 2a 2b ++ * gl_matchc: ++ * Number of matches in the current invocation of glob. ++ */ ++ ++/* ++ * Some notes on multibyte character support: ++ * 1. Patterns with illegal byte sequences match nothing - even if ++ * GLOB_NOCHECK is specified. ++ * 2. Illegal byte sequences in filenames are handled by treating them as ++ * single-byte characters with a value of the first byte of the sequence ++ * cast to wchar_t. ++ * 3. State-dependent encodings are not currently supported. ++ */ ++ ++#include ++#include ++ ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++ ++#define DOLLAR '$' ++#define DOT '.' ++#define EOS '\0' ++#define LBRACKET '[' ++#define NOT '!' ++#define QUESTION '?' ++#define QUOTE '\\' ++#define RANGE '-' ++#define RBRACKET ']' ++#define SEP '/' ++#define STAR '*' ++#define TILDE '~' ++#define UNDERSCORE '_' ++#define LBRACE '{' ++#define RBRACE '}' ++#define SLASH '/' ++#define COMMA ',' ++ ++#ifndef DEBUG ++ ++#define M_QUOTE 0x8000000000ULL ++#define M_PROTECT 0x4000000000ULL ++#define M_MASK 0xffffffffffULL ++#define M_CHAR 0x00ffffffffULL ++ ++typedef uint_fast64_t Char; ++ ++#else ++ ++#define M_QUOTE 0x80 ++#define M_PROTECT 0x40 ++#define M_MASK 0xff ++#define M_CHAR 0x7f ++ ++typedef char Char; ++ ++#endif ++ ++ ++#define CHAR(c) ((Char)((c)&M_CHAR)) ++#define META(c) ((Char)((c)|M_QUOTE)) ++#define M_ALL META('*') ++#define M_END META(']') ++#define M_NOT META('!') ++#define M_ONE META('?') ++#define M_RNG META('-') ++#define M_SET META('[') ++#define ismeta(c) (((c)&M_QUOTE) != 0) ++ ++ ++static int compare(const void *, const void *); ++static int g_Ctoc(const Char *, char *, size_t); ++static int g_lstat(Char *, struct stat *, glob_t *); ++static DIR *g_opendir(Char *, glob_t *); ++static const Char *g_strchr(const Char *, wchar_t); ++#ifdef notdef ++static Char *g_strcat(Char *, const Char *); ++#endif ++static int g_stat(Char *, struct stat *, glob_t *); ++static int glob0(const Char *, glob_t *, size_t *); ++static int glob1(Char *, glob_t *, size_t *); ++static int glob2(Char *, Char *, Char *, Char *, glob_t *, size_t *); ++static int glob3(Char *, Char *, Char *, Char *, Char *, glob_t *, size_t *); ++static int globextend(const Char *, glob_t *, size_t *); ++static const Char * ++ globtilde(const Char *, Char *, size_t, glob_t *); ++static int globexp1(const Char *, glob_t *, size_t *); ++static int globexp2(const Char *, const Char *, glob_t *, int *, size_t *); ++static int match(Char *, Char *, Char *); ++#ifdef DEBUG ++static void qprintf(const char *, Char *); ++#endif ++ ++int ++glob(const char *pattern, int flags, int (*errfunc)(const char *, int), glob_t *pglob) ++{ ++ const char *patnext; ++ size_t limit; ++ Char *bufnext, *bufend, patbuf[MAXPATHLEN], prot; ++ mbstate_t mbs; ++ wchar_t wc; ++ size_t clen; ++ ++ patnext = pattern; ++ if (!(flags & GLOB_APPEND)) { ++ pglob->gl_pathc = 0; ++ pglob->gl_pathv = NULL; ++ if (!(flags & GLOB_DOOFFS)) ++ pglob->gl_offs = 0; ++ } ++ if (flags & GLOB_LIMIT) { ++ limit = pglob->gl_matchc; ++ if (limit == 0) ++ limit = ARG_MAX; ++ } else ++ limit = 0; ++ pglob->gl_flags = flags & ~GLOB_MAGCHAR; ++ pglob->gl_errfunc = errfunc; ++ pglob->gl_matchc = 0; ++ ++ bufnext = patbuf; ++ bufend = bufnext + MAXPATHLEN - 1; ++ if (flags & GLOB_NOESCAPE) { ++ memset(&mbs, 0, sizeof(mbs)); ++ while (bufend - bufnext >= MB_CUR_MAX) { ++ clen = mbrtowc(&wc, patnext, MB_LEN_MAX, &mbs); ++ if (clen == (size_t)-1 || clen == (size_t)-2) ++ return (GLOB_NOMATCH); ++ else if (clen == 0) ++ break; ++ *bufnext++ = wc; ++ patnext += clen; ++ } ++ } else { ++ /* Protect the quoted characters. */ ++ memset(&mbs, 0, sizeof(mbs)); ++ while (bufend - bufnext >= MB_CUR_MAX) { ++ if (*patnext == QUOTE) { ++ if (*++patnext == EOS) { ++ *bufnext++ = QUOTE | M_PROTECT; ++ continue; ++ } ++ prot = M_PROTECT; ++ } else ++ prot = 0; ++ clen = mbrtowc(&wc, patnext, MB_LEN_MAX, &mbs); ++ if (clen == (size_t)-1 || clen == (size_t)-2) ++ return (GLOB_NOMATCH); ++ else if (clen == 0) ++ break; ++ *bufnext++ = wc | prot; ++ patnext += clen; ++ } ++ } ++ *bufnext = EOS; ++ ++ if (flags & GLOB_BRACE) ++ return globexp1(patbuf, pglob, &limit); ++ else ++ return glob0(patbuf, pglob, &limit); ++} ++ ++/* ++ * Expand recursively a glob {} pattern. When there is no more expansion ++ * invoke the standard globbing routine to glob the rest of the magic ++ * characters ++ */ ++static int ++globexp1(const Char *pattern, glob_t *pglob, size_t *limit) ++{ ++ const Char* ptr = pattern; ++ int rv; ++ ++ /* Protect a single {}, for find(1), like csh */ ++ if (pattern[0] == LBRACE && pattern[1] == RBRACE && pattern[2] == EOS) ++ return glob0(pattern, pglob, limit); ++ ++ while ((ptr = g_strchr(ptr, LBRACE)) != NULL) ++ if (!globexp2(ptr, pattern, pglob, &rv, limit)) ++ return rv; ++ ++ return glob0(pattern, pglob, limit); ++} ++ ++ ++/* ++ * Recursive brace globbing helper. Tries to expand a single brace. ++ * If it succeeds then it invokes globexp1 with the new pattern. ++ * If it fails then it tries to glob the rest of the pattern and returns. ++ */ ++static int ++globexp2(const Char *ptr, const Char *pattern, glob_t *pglob, int *rv, size_t *limit) ++{ ++ int i; ++ Char *lm, *ls; ++ const Char *pe, *pm, *pm1, *pl; ++ Char patbuf[MAXPATHLEN]; ++ ++ /* copy part up to the brace */ ++ for (lm = patbuf, pm = pattern; pm != ptr; *lm++ = *pm++) ++ continue; ++ *lm = EOS; ++ ls = lm; ++ ++ /* Find the balanced brace */ ++ for (i = 0, pe = ++ptr; *pe; pe++) ++ if (*pe == LBRACKET) { ++ /* Ignore everything between [] */ ++ for (pm = pe++; *pe != RBRACKET && *pe != EOS; pe++) ++ continue; ++ if (*pe == EOS) { ++ /* ++ * We could not find a matching RBRACKET. ++ * Ignore and just look for RBRACE ++ */ ++ pe = pm; ++ } ++ } ++ else if (*pe == LBRACE) ++ i++; ++ else if (*pe == RBRACE) { ++ if (i == 0) ++ break; ++ i--; ++ } ++ ++ /* Non matching braces; just glob the pattern */ ++ if (i != 0 || *pe == EOS) { ++ *rv = glob0(patbuf, pglob, limit); ++ return 0; ++ } ++ ++ for (i = 0, pl = pm = ptr; pm <= pe; pm++) ++ switch (*pm) { ++ case LBRACKET: ++ /* Ignore everything between [] */ ++ for (pm1 = pm++; *pm != RBRACKET && *pm != EOS; pm++) ++ continue; ++ if (*pm == EOS) { ++ /* ++ * We could not find a matching RBRACKET. ++ * Ignore and just look for RBRACE ++ */ ++ pm = pm1; ++ } ++ break; ++ ++ case LBRACE: ++ i++; ++ break; ++ ++ case RBRACE: ++ if (i) { ++ i--; ++ break; ++ } ++ /* FALLTHROUGH */ ++ case COMMA: ++ if (i && *pm == COMMA) ++ break; ++ else { ++ /* Append the current string */ ++ for (lm = ls; (pl < pm); *lm++ = *pl++) ++ continue; ++ /* ++ * Append the rest of the pattern after the ++ * closing brace ++ */ ++ for (pl = pe + 1; (*lm++ = *pl++) != EOS;) ++ continue; ++ ++ /* Expand the current pattern */ ++#ifdef DEBUG ++ qprintf("globexp2:", patbuf); ++#endif ++ *rv = globexp1(patbuf, pglob, limit); ++ ++ /* move after the comma, to the next string */ ++ pl = pm + 1; ++ } ++ break; ++ ++ default: ++ break; ++ } ++ *rv = 0; ++ return 0; ++} ++ ++ ++ ++/* ++ * expand tilde from the passwd file. ++ */ ++static const Char * ++globtilde(const Char *pattern, Char *patbuf, size_t patbuf_len, glob_t *pglob) ++{ ++ struct passwd *pwd; ++ char *h; ++ const Char *p; ++ Char *b, *eb; ++ ++ if (*pattern != TILDE || !(pglob->gl_flags & GLOB_TILDE)) ++ return pattern; ++ ++ /* ++ * Copy up to the end of the string or / ++ */ ++ eb = &patbuf[patbuf_len - 1]; ++ for (p = pattern + 1, h = (char *) patbuf; ++ h < (char *)eb && *p && *p != SLASH; *h++ = *p++) ++ continue; ++ ++ *h = EOS; ++ ++ if (((char *) patbuf)[0] == EOS) { ++ /* ++ * handle a plain ~ or ~/ by expanding $HOME first (iff ++ * we're not running setuid or setgid) and then trying ++ * the password file ++ */ ++ if (issetugid() != 0 || ++ (h = getenv("HOME")) == NULL) { ++ if (((h = getlogin()) != NULL && ++ (pwd = getpwnam(h)) != NULL) || ++ (pwd = getpwuid(getuid())) != NULL) ++ h = pwd->pw_dir; ++ else ++ return pattern; ++ } ++ } ++ else { ++ /* ++ * Expand a ~user ++ */ ++ if ((pwd = getpwnam((char*) patbuf)) == NULL) ++ return pattern; ++ else ++ h = pwd->pw_dir; ++ } ++ ++ /* Copy the home directory */ ++ for (b = patbuf; b < eb && *h; *b++ = *h++) ++ continue; ++ ++ /* Append the rest of the pattern */ ++ while (b < eb && (*b++ = *p++) != EOS) ++ continue; ++ *b = EOS; ++ ++ return patbuf; ++} ++ ++ ++/* ++ * The main glob() routine: compiles the pattern (optionally processing ++ * quotes), calls glob1() to do the real pattern matching, and finally ++ * sorts the list (unless unsorted operation is requested). Returns 0 ++ * if things went well, nonzero if errors occurred. ++ */ ++static int ++glob0(const Char *pattern, glob_t *pglob, size_t *limit) ++{ ++ const Char *qpatnext; ++ int err; ++ size_t oldpathc; ++ Char *bufnext, c, patbuf[MAXPATHLEN]; ++ ++ qpatnext = globtilde(pattern, patbuf, MAXPATHLEN, pglob); ++ oldpathc = pglob->gl_pathc; ++ bufnext = patbuf; ++ ++ /* We don't need to check for buffer overflow any more. */ ++ while ((c = *qpatnext++) != EOS) { ++ switch (c) { ++ case LBRACKET: ++ c = *qpatnext; ++ if (c == NOT) ++ ++qpatnext; ++ if (*qpatnext == EOS || ++ g_strchr(qpatnext+1, RBRACKET) == NULL) { ++ *bufnext++ = LBRACKET; ++ if (c == NOT) ++ --qpatnext; ++ break; ++ } ++ *bufnext++ = M_SET; ++ if (c == NOT) ++ *bufnext++ = M_NOT; ++ c = *qpatnext++; ++ do { ++ *bufnext++ = CHAR(c); ++ if (*qpatnext == RANGE && ++ (c = qpatnext[1]) != RBRACKET) { ++ *bufnext++ = M_RNG; ++ *bufnext++ = CHAR(c); ++ qpatnext += 2; ++ } ++ } while ((c = *qpatnext++) != RBRACKET); ++ pglob->gl_flags |= GLOB_MAGCHAR; ++ *bufnext++ = M_END; ++ break; ++ case QUESTION: ++ pglob->gl_flags |= GLOB_MAGCHAR; ++ *bufnext++ = M_ONE; ++ break; ++ case STAR: ++ pglob->gl_flags |= GLOB_MAGCHAR; ++ /* collapse adjacent stars to one, ++ * to avoid exponential behavior ++ */ ++ if (bufnext == patbuf || bufnext[-1] != M_ALL) ++ *bufnext++ = M_ALL; ++ break; ++ default: ++ *bufnext++ = CHAR(c); ++ break; ++ } ++ } ++ *bufnext = EOS; ++#ifdef DEBUG ++ qprintf("glob0:", patbuf); ++#endif ++ ++ if ((err = glob1(patbuf, pglob, limit)) != 0) ++ return(err); ++ ++ /* ++ * If there was no match we are going to append the pattern ++ * if GLOB_NOCHECK was specified or if GLOB_NOMAGIC was specified ++ * and the pattern did not contain any magic characters ++ * GLOB_NOMAGIC is there just for compatibility with csh. ++ */ ++ if (pglob->gl_pathc == oldpathc) { ++ if (((pglob->gl_flags & GLOB_NOCHECK) || ++ ((pglob->gl_flags & GLOB_NOMAGIC) && ++ !(pglob->gl_flags & GLOB_MAGCHAR)))) ++ return(globextend(pattern, pglob, limit)); ++ else ++ return(GLOB_NOMATCH); ++ } ++ if (!(pglob->gl_flags & GLOB_NOSORT)) ++ qsort(pglob->gl_pathv + pglob->gl_offs + oldpathc, ++ pglob->gl_pathc - oldpathc, sizeof(char *), compare); ++ return(0); ++} ++ ++static int ++compare(const void *p, const void *q) ++{ ++ return(strcmp(*(char **)p, *(char **)q)); ++} ++ ++static int ++glob1(Char *pattern, glob_t *pglob, size_t *limit) ++{ ++ Char pathbuf[MAXPATHLEN]; ++ ++ /* A null pathname is invalid -- POSIX 1003.1 sect. 2.4. */ ++ if (*pattern == EOS) ++ return(0); ++ return(glob2(pathbuf, pathbuf, pathbuf + MAXPATHLEN - 1, ++ pattern, pglob, limit)); ++} ++ ++/* ++ * The functions glob2 and glob3 are mutually recursive; there is one level ++ * of recursion for each segment in the pattern that contains one or more ++ * meta characters. ++ */ ++static int ++glob2(Char *pathbuf, Char *pathend, Char *pathend_last, Char *pattern, ++ glob_t *pglob, size_t *limit) ++{ ++ struct stat sb; ++ Char *p, *q; ++ int anymeta; ++ ++ /* ++ * Loop over pattern segments until end of pattern or until ++ * segment with meta character found. ++ */ ++ for (anymeta = 0;;) { ++ if (*pattern == EOS) { /* End of pattern? */ ++ *pathend = EOS; ++ if (g_lstat(pathbuf, &sb, pglob)) ++ return(0); ++ ++ if (((pglob->gl_flags & GLOB_MARK) && ++ pathend[-1] != SEP) && (S_ISDIR(sb.st_mode) ++ || (S_ISLNK(sb.st_mode) && ++ (g_stat(pathbuf, &sb, pglob) == 0) && ++ S_ISDIR(sb.st_mode)))) { ++ if (pathend + 1 > pathend_last) ++ return (GLOB_ABORTED); ++ *pathend++ = SEP; ++ *pathend = EOS; ++ } ++ ++pglob->gl_matchc; ++ return(globextend(pathbuf, pglob, limit)); ++ } ++ ++ /* Find end of next segment, copy tentatively to pathend. */ ++ q = pathend; ++ p = pattern; ++ while (*p != EOS && *p != SEP) { ++ if (ismeta(*p)) ++ anymeta = 1; ++ if (q + 1 > pathend_last) ++ return (GLOB_ABORTED); ++ *q++ = *p++; ++ } ++ ++ if (!anymeta) { /* No expansion, do next segment. */ ++ pathend = q; ++ pattern = p; ++ while (*pattern == SEP) { ++ if (pathend + 1 > pathend_last) ++ return (GLOB_ABORTED); ++ *pathend++ = *pattern++; ++ } ++ } else /* Need expansion, recurse. */ ++ return(glob3(pathbuf, pathend, pathend_last, pattern, p, ++ pglob, limit)); ++ } ++ /* NOTREACHED */ ++} ++ ++static int ++glob3(Char *pathbuf, Char *pathend, Char *pathend_last, ++ Char *pattern, Char *restpattern, ++ glob_t *pglob, size_t *limit) ++{ ++ struct dirent *dp; ++ DIR *dirp; ++ int err; ++ char buf[MAXPATHLEN]; ++ ++ /* ++ * The readdirfunc declaration can't be prototyped, because it is ++ * assigned, below, to two functions which are prototyped in glob.h ++ * and dirent.h as taking pointers to differently typed opaque ++ * structures. ++ */ ++ struct dirent *(*readdirfunc)(); ++ ++ if (pathend > pathend_last) ++ return (GLOB_ABORTED); ++ *pathend = EOS; ++ errno = 0; ++ ++ if ((dirp = g_opendir(pathbuf, pglob)) == NULL) { ++ /* TODO: don't call for ENOENT or ENOTDIR? */ ++ if (pglob->gl_errfunc) { ++ if (g_Ctoc(pathbuf, buf, sizeof(buf))) ++ return (GLOB_ABORTED); ++ if (pglob->gl_errfunc(buf, errno) || ++ pglob->gl_flags & GLOB_ERR) ++ return (GLOB_ABORTED); ++ } ++ return(0); ++ } ++ ++ err = 0; ++ ++ /* Search directory for matching names. */ ++ if (pglob->gl_flags & GLOB_ALTDIRFUNC) ++ readdirfunc = pglob->gl_readdir; ++ else ++ readdirfunc = readdir; ++ while ((dp = (*readdirfunc)(dirp))) { ++ char *sc; ++ Char *dc; ++ wchar_t wc; ++ size_t clen; ++ mbstate_t mbs; ++ ++ /* Initial DOT must be matched literally. */ ++ if (dp->d_name[0] == DOT && *pattern != DOT) ++ continue; ++ memset(&mbs, 0, sizeof(mbs)); ++ dc = pathend; ++ sc = dp->d_name; ++ while (dc < pathend_last) { ++ clen = mbrtowc(&wc, sc, MB_LEN_MAX, &mbs); ++ if (clen == (size_t)-1 || clen == (size_t)-2) { ++ wc = *sc; ++ clen = 1; ++ memset(&mbs, 0, sizeof(mbs)); ++ } ++ if ((*dc++ = wc) == EOS) ++ break; ++ sc += clen; ++ } ++ if (!match(pathend, pattern, restpattern)) { ++ *pathend = EOS; ++ continue; ++ } ++ err = glob2(pathbuf, --dc, pathend_last, restpattern, ++ pglob, limit); ++ if (err) ++ break; ++ } ++ ++ if (pglob->gl_flags & GLOB_ALTDIRFUNC) ++ (*pglob->gl_closedir)(dirp); ++ else ++ closedir(dirp); ++ return(err); ++} ++ ++ ++/* ++ * Extend the gl_pathv member of a glob_t structure to accomodate a new item, ++ * add the new item, and update gl_pathc. ++ * ++ * This assumes the BSD realloc, which only copies the block when its size ++ * crosses a power-of-two boundary; for v7 realloc, this would cause quadratic ++ * behavior. ++ * ++ * Return 0 if new item added, error code if memory couldn't be allocated. ++ * ++ * Invariant of the glob_t structure: ++ * Either gl_pathc is zero and gl_pathv is NULL; or gl_pathc > 0 and ++ * gl_pathv points to (gl_offs + gl_pathc + 1) items. ++ */ ++static int ++globextend(const Char *path, glob_t *pglob, size_t *limit) ++{ ++ char **pathv; ++ size_t i, newsize, len; ++ char *copy; ++ const Char *p; ++ ++ if (*limit && pglob->gl_pathc > *limit) { ++ errno = 0; ++ return (GLOB_NOSPACE); ++ } ++ ++ newsize = sizeof(*pathv) * (2 + pglob->gl_pathc + pglob->gl_offs); ++ pathv = pglob->gl_pathv ? ++ realloc((char *)pglob->gl_pathv, newsize) : ++ malloc(newsize); ++ if (pathv == NULL) { ++ if (pglob->gl_pathv) { ++ free(pglob->gl_pathv); ++ pglob->gl_pathv = NULL; ++ } ++ return(GLOB_NOSPACE); ++ } ++ ++ if (pglob->gl_pathv == NULL && pglob->gl_offs > 0) { ++ /* first time around -- clear initial gl_offs items */ ++ pathv += pglob->gl_offs; ++ for (i = pglob->gl_offs + 1; --i > 0; ) ++ *--pathv = NULL; ++ } ++ pglob->gl_pathv = pathv; ++ ++ for (p = path; *p++;) ++ continue; ++ len = MB_CUR_MAX * (size_t)(p - path); /* XXX overallocation */ ++ if ((copy = malloc(len)) != NULL) { ++ if (g_Ctoc(path, copy, len)) { ++ free(copy); ++ return (GLOB_NOSPACE); ++ } ++ pathv[pglob->gl_offs + pglob->gl_pathc++] = copy; ++ } ++ pathv[pglob->gl_offs + pglob->gl_pathc] = NULL; ++ return(copy == NULL ? GLOB_NOSPACE : 0); ++} ++ ++/* ++ * pattern matching function for filenames. Each occurrence of the * ++ * pattern causes a recursion level. ++ */ ++static int ++match(Char *name, Char *pat, Char *patend) ++{ ++ int ok, negate_range; ++ Char c, k; ++ ++ while (pat < patend) { ++ c = *pat++; ++ switch (c & M_MASK) { ++ case M_ALL: ++ if (pat == patend) ++ return(1); ++ do ++ if (match(name, pat, patend)) ++ return(1); ++ while (*name++ != EOS); ++ return(0); ++ case M_ONE: ++ if (*name++ == EOS) ++ return(0); ++ break; ++ case M_SET: ++ ok = 0; ++ if ((k = *name++) == EOS) ++ return(0); ++ if ((negate_range = ((*pat & M_MASK) == M_NOT)) != EOS) ++ ++pat; ++ while (((c = *pat++) & M_MASK) != M_END) ++ if ((*pat & M_MASK) == M_RNG) { ++ if (CHAR(c) <= CHAR(k) && CHAR(k) <= CHAR(pat[1])) ok = 1; ++ pat += 2; ++ } else if (c == k) ++ ok = 1; ++ if (ok == negate_range) ++ return(0); ++ break; ++ default: ++ if (*name++ != c) ++ return(0); ++ break; ++ } ++ } ++ return(*name == EOS); ++} ++ ++/* Free allocated data belonging to a glob_t structure. */ ++void ++globfree(glob_t *pglob) ++{ ++ size_t i; ++ char **pp; ++ ++ if (pglob->gl_pathv != NULL) { ++ pp = pglob->gl_pathv + pglob->gl_offs; ++ for (i = pglob->gl_pathc; i--; ++pp) ++ if (*pp) ++ free(*pp); ++ free(pglob->gl_pathv); ++ pglob->gl_pathv = NULL; ++ } ++} ++ ++static DIR * ++g_opendir(Char *str, glob_t *pglob) ++{ ++ char buf[MAXPATHLEN]; ++ ++ if (!*str) ++ strcpy(buf, "."); ++ else { ++ if (g_Ctoc(str, buf, sizeof(buf))) ++ return (NULL); ++ } ++ ++ if (pglob->gl_flags & GLOB_ALTDIRFUNC) ++ return((*pglob->gl_opendir)(buf)); ++ ++ return(opendir(buf)); ++} ++ ++static int ++g_lstat(Char *fn, struct stat *sb, glob_t *pglob) ++{ ++ char buf[MAXPATHLEN]; ++ ++ if (g_Ctoc(fn, buf, sizeof(buf))) { ++ errno = ENAMETOOLONG; ++ return (-1); ++ } ++ if (pglob->gl_flags & GLOB_ALTDIRFUNC) ++ return((*pglob->gl_lstat)(buf, sb)); ++ return(lstat(buf, sb)); ++} ++ ++static int ++g_stat(Char *fn, struct stat *sb, glob_t *pglob) ++{ ++ char buf[MAXPATHLEN]; ++ ++ if (g_Ctoc(fn, buf, sizeof(buf))) { ++ errno = ENAMETOOLONG; ++ return (-1); ++ } ++ if (pglob->gl_flags & GLOB_ALTDIRFUNC) ++ return((*pglob->gl_stat)(buf, sb)); ++ return(stat(buf, sb)); ++} ++ ++static const Char * ++g_strchr(const Char *str, wchar_t ch) ++{ ++ ++ do { ++ if (*str == ch) ++ return (str); ++ } while (*str++); ++ return (NULL); ++} ++ ++static int ++g_Ctoc(const Char *str, char *buf, size_t len) ++{ ++ mbstate_t mbs; ++ size_t clen; ++ ++ memset(&mbs, 0, sizeof(mbs)); ++ while (len >= MB_CUR_MAX) { ++ clen = wcrtomb(buf, *str, &mbs); ++ if (clen == (size_t)-1) ++ return (1); ++ if (*str == L'\0') ++ return (0); ++ str++; ++ buf += clen; ++ len -= clen; ++ } ++ return (1); ++} ++ ++#ifdef DEBUG ++static void ++qprintf(const char *str, Char *s) ++{ ++ Char *p; ++ ++ (void)printf("%s:\n", str); ++ for (p = s; *p; p++) ++ (void)printf("%c", CHAR(*p)); ++ (void)printf("\n"); ++ for (p = s; *p; p++) ++ (void)printf("%c", *p & M_PROTECT ? '"' : ' '); ++ (void)printf("\n"); ++ for (p = s; *p; p++) ++ (void)printf("%c", ismeta(*p) ? '_' : ' '); ++ (void)printf("\n"); ++} ++#endif +diff -Nur /tmp/x/glob.h libglob/glob.h +--- /tmp/x/glob.h 1969-12-31 19:00:00.000000000 -0500 ++++ libglob/glob.h 2017-08-19 15:22:18.367109399 -0400 +@@ -0,0 +1,102 @@ ++/* ++ * Copyright (c) 1989, 1993 ++ * The Regents of the University of California. All rights reserved. ++ * ++ * This code is derived from software contributed to Berkeley by ++ * Guido van Rossum. ++ * ++ * Redistribution and use in source and binary forms, with or without ++ * modification, are permitted provided that the following conditions ++ * are met: ++ * 1. Redistributions of source code must retain the above copyright ++ * notice, this list of conditions and the following disclaimer. ++ * 2. Redistributions in binary form must reproduce the above copyright ++ * notice, this list of conditions and the following disclaimer in the ++ * documentation and/or other materials provided with the distribution. ++ * 3. Neither the name of the University nor the names of its contributors ++ * may be used to endorse or promote products derived from this software ++ * without specific prior written permission. ++ * ++ * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ++ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE ++ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ++ * ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE ++ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL ++ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS ++ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT ++ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY ++ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF ++ * SUCH DAMAGE. ++ * ++ * @(#)glob.h 8.1 (Berkeley) 6/2/93 ++ * $FreeBSD$ ++ */ ++ ++#ifndef _GLOB_H_ ++#define _GLOB_H_ ++ ++#include ++#include ++#define ARG_MAX 6553 ++ ++#ifndef _SIZE_T_DECLARED ++typedef __size_t size_t; ++#define _SIZE_T_DECLARED ++#endif ++ ++struct stat; ++typedef struct { ++ size_t gl_pathc; /* Count of total paths so far. */ ++ size_t gl_matchc; /* Count of paths matching pattern. */ ++ size_t gl_offs; /* Reserved at beginning of gl_pathv. */ ++ int gl_flags; /* Copy of flags parameter to glob. */ ++ char **gl_pathv; /* List of paths matching pattern. */ ++ /* Copy of errfunc parameter to glob. */ ++ int (*gl_errfunc)(const char *, int); ++ ++ /* ++ * Alternate filesystem access methods for glob; replacement ++ * versions of closedir(3), readdir(3), opendir(3), stat(2) ++ * and lstat(2). ++ */ ++ void (*gl_closedir)(void *); ++ struct dirent *(*gl_readdir)(void *); ++ void *(*gl_opendir)(const char *); ++ int (*gl_lstat)(const char *, struct stat *); ++ int (*gl_stat)(const char *, struct stat *); ++} glob_t; ++ ++/* Believed to have been introduced in 1003.2-1992 */ ++#define GLOB_APPEND 0x0001 /* Append to output from previous call. */ ++#define GLOB_DOOFFS 0x0002 /* Use gl_offs. */ ++#define GLOB_ERR 0x0004 /* Return on error. */ ++#define GLOB_MARK 0x0008 /* Append / to matching directories. */ ++#define GLOB_NOCHECK 0x0010 /* Return pattern itself if nothing matches. */ ++#define GLOB_NOSORT 0x0020 /* Don't sort. */ ++#define GLOB_NOESCAPE 0x2000 /* Disable backslash escaping. */ ++ ++/* Error values returned by glob(3) */ ++#define GLOB_NOSPACE (-1) /* Malloc call failed. */ ++#define GLOB_ABORTED (-2) /* Unignored error. */ ++#define GLOB_NOMATCH (-3) /* No match and GLOB_NOCHECK was not set. */ ++#define GLOB_NOSYS (-4) /* Obsolete: source comptability only. */ ++ ++#define GLOB_ALTDIRFUNC 0x0040 /* Use alternately specified directory funcs. */ ++#define GLOB_BRACE 0x0080 /* Expand braces ala csh. */ ++#define GLOB_MAGCHAR 0x0100 /* Pattern had globbing characters. */ ++#define GLOB_NOMAGIC 0x0200 /* GLOB_NOCHECK without magic chars (csh). */ ++#define GLOB_QUOTE 0x0400 /* Quote special chars with \. */ ++#define GLOB_TILDE 0x0800 /* Expand tilde names from the passwd file. */ ++#define GLOB_LIMIT 0x1000 /* limit number of returned paths */ ++ ++/* source compatibility, these are the old names */ ++#define GLOB_MAXPATH GLOB_LIMIT ++#define GLOB_ABEND GLOB_ABORTED ++ ++__BEGIN_DECLS ++int glob(const char *, int, int (*)(const char *, int), glob_t *); ++void globfree(glob_t *); ++__END_DECLS ++ ++#endif /* !_GLOB_H_ */ diff --git a/p4a/pythonforandroidold/recipes/libiconv/__init__.py b/p4a/pythonforandroidold/recipes/libiconv/__init__.py new file mode 100644 index 0000000..4a64669 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libiconv/__init__.py @@ -0,0 +1,34 @@ +import os +from pythonforandroid.toolchain import shprint, current_directory +from pythonforandroid.recipe import Recipe +from multiprocessing import cpu_count +import sh + + +class LibIconvRecipe(Recipe): + + version = '1.15' + + url = 'https://ftp.gnu.org/pub/gnu/libiconv/libiconv-{version}.tar.gz' + + patches = ['libiconv-1.15-no-gets.patch'] + + def should_build(self, arch): + return not os.path.exists( + os.path.join(self.ctx.get_libs_dir(arch.arch), 'libiconv.so')) + + def build_arch(self, arch): + super(LibIconvRecipe, self).build_arch(arch) + env = self.get_recipe_env(arch) + with current_directory(self.get_build_dir(arch.arch)): + shprint( + sh.Command('./configure'), + '--host=' + arch.toolchain_prefix, + '--prefix=' + self.ctx.get_python_install_dir(), + _env=env) + shprint(sh.make, '-j' + str(cpu_count()), _env=env) + libs = ['lib/.libs/libiconv.so'] + self.install_libs(arch, *libs) + + +recipe = LibIconvRecipe() diff --git a/p4a/pythonforandroid/recipes/libiconv/libiconv-1.15-no-gets.patch b/p4a/pythonforandroidold/recipes/libiconv/libiconv-1.15-no-gets.patch similarity index 100% rename from p4a/pythonforandroid/recipes/libiconv/libiconv-1.15-no-gets.patch rename to p4a/pythonforandroidold/recipes/libiconv/libiconv-1.15-no-gets.patch diff --git a/p4a/pythonforandroidold/recipes/libmysqlclient/Linux.cmake b/p4a/pythonforandroidold/recipes/libmysqlclient/Linux.cmake new file mode 100644 index 0000000..42cf069 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libmysqlclient/Linux.cmake @@ -0,0 +1,5 @@ +asdgasdgasdg +asdg +asdg +include(${CMAKE_ROOT}/Modules/Platform/Linux.cmake) +set(CMAKE_SHARED_LIBRARY_SONAME_C_FLAG "") diff --git a/p4a/pythonforandroidold/recipes/libmysqlclient/__init__.py b/p4a/pythonforandroidold/recipes/libmysqlclient/__init__.py new file mode 100644 index 0000000..9235ad4 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libmysqlclient/__init__.py @@ -0,0 +1,67 @@ +from pythonforandroid.logger import shprint +from pythonforandroid.recipe import Recipe +from pythonforandroid.util import current_directory +import sh +from os.path import join + + +class LibmysqlclientRecipe(Recipe): + name = 'libmysqlclient' + version = 'master' + url = 'https://github.com/0x-ff/libmysql-android/archive/{version}.zip' + # version = '5.5.47' + # url = 'http://dev.mysql.com/get/Downloads/MySQL-5.5/mysql-{version}.tar.gz' + # + # depends = ['ncurses'] + # + + # patches = ['add-custom-platform.patch'] + + patches = ['disable-soversion.patch'] + + def should_build(self, arch): + return not self.has_libs(arch, 'libmysql.so') + + def build_arch(self, arch): + env = self.get_recipe_env(arch) + with current_directory(join(self.get_build_dir(arch.arch), 'libmysqlclient')): + shprint(sh.cp, '-t', '.', join(self.get_recipe_dir(), 'p4a.cmake')) + # shprint(sh.mkdir, 'Platform') + # shprint(sh.cp, '-t', 'Platform', join(self.get_recipe_dir(), 'Linux.cmake')) + shprint(sh.rm, '-f', 'CMakeCache.txt') + shprint(sh.cmake, '-G', 'Unix Makefiles', + # '-DCMAKE_MODULE_PATH=' + join(self.get_build_dir(arch.arch), 'libmysqlclient'), + '-DCMAKE_INSTALL_PREFIX=./install', + '-DCMAKE_TOOLCHAIN_FILE=p4a.cmake', _env=env) + shprint(sh.make, _env=env) + + self.install_libs(arch, join('libmysql', 'libmysql.so')) + + # def get_recipe_env(self, arch=None): + # env = super(LibmysqlclientRecipe, self).get_recipe_env(arch) + # env['WITHOUT_SERVER'] = 'ON' + # ncurses = self.get_recipe('ncurses', self) + # # env['CFLAGS'] += ' -I' + join(ncurses.get_build_dir(arch.arch), + # # 'include') + # env['CURSES_LIBRARY'] = join(self.ctx.get_libs_dir(arch.arch), 'libncurses.so') + # env['CURSES_INCLUDE_PATH'] = join(ncurses.get_build_dir(arch.arch), + # 'include') + # return env + # + # def build_arch(self, arch): + # env = self.get_recipe_env(arch) + # with current_directory(self.get_build_dir(arch.arch)): + # # configure = sh.Command('./configure') + # # TODO: should add openssl as an optional dep and compile support + # # shprint(configure, '--enable-shared', '--enable-assembler', + # # '--enable-thread-safe-client', '--with-innodb', + # # '--without-server', _env=env) + # # shprint(sh.make, _env=env) + # shprint(sh.cmake, '.', '-DCURSES_LIBRARY=' + env['CURSES_LIBRARY'], + # '-DCURSES_INCLUDE_PATH=' + env['CURSES_INCLUDE_PATH'], _env=env) + # shprint(sh.make, _env=env) + # + # self.install_libs(arch, 'libmysqlclient.so') + + +recipe = LibmysqlclientRecipe() diff --git a/p4a/pythonforandroidold/recipes/libmysqlclient/add-custom-platform.patch b/p4a/pythonforandroidold/recipes/libmysqlclient/add-custom-platform.patch new file mode 100644 index 0000000..e76c69a --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libmysqlclient/add-custom-platform.patch @@ -0,0 +1,8 @@ +--- libmysqlclient/libmysqlclient/libmysql/CMakeLists.txt 2013-02-27 00:25:45.000000000 -0600 ++++ b/libmysqlclient/libmysql/CMakeLists.txt 2016-01-11 13:28:51.142356988 -0600 +@@ -152,3 +152,5 @@ + ${CMAKE_SOURCE_DIR}/libmysql/libmysqlclient_r${CMAKE_SHARED_LIBRARY_SUFFIX} + DESTINATION "lib") + ENDIF(WIN32) ++ ++LIST(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_PREFIX}") diff --git a/p4a/pythonforandroidold/recipes/libmysqlclient/disable-soname.patch b/p4a/pythonforandroidold/recipes/libmysqlclient/disable-soname.patch new file mode 100644 index 0000000..5a4dbf2 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libmysqlclient/disable-soname.patch @@ -0,0 +1,11 @@ +--- libmysqlclient/libmysqlclient/CMakeLists.txt 2013-02-27 00:25:45.000000000 -0600 ++++ b/libmysqlclient/CMakeLists.txt 2016-01-11 13:48:41.672323738 -0600 +@@ -24,6 +24,8 @@ + SET(CMAKE_BUILD_TYPE "Release") + ENDIF(NOT CMAKE_BUILD_TYPE) + ++SET(CMAKE_SHARED_LIBRARY_SONAME_C_FLAG "") ++ + # This reads user configuration, generated by configure.js. + IF(WIN32 AND EXISTS ${CMAKE_SOURCE_DIR}/win/configure.data) + INCLUDE(${CMAKE_SOURCE_DIR}/win/configure.data) diff --git a/p4a/pythonforandroidold/recipes/libmysqlclient/disable-soversion.patch b/p4a/pythonforandroidold/recipes/libmysqlclient/disable-soversion.patch new file mode 100644 index 0000000..d6353de --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libmysqlclient/disable-soversion.patch @@ -0,0 +1,12 @@ +--- libmysqlclient/libmysqlclient/libmysql/CMakeLists.txt 2013-02-27 00:25:45.000000000 -0600 ++++ b/libmysqlclient/libmysql/CMakeLists.txt 2016-01-11 14:00:26.729332913 -0600 +@@ -97,9 +97,6 @@ + ADD_LIBRARY(libmysql SHARED ${CLIENT_SOURCES} libmysql.def) + TARGET_LINK_LIBRARIES(libmysql ${CMAKE_THREAD_LIBS_INIT}) + STRING(REGEX REPLACE "\\..+" "" LIBMYSQL_SOVERSION ${SHARED_LIB_VERSION}) +-SET_TARGET_PROPERTIES(libmysql +- PROPERTIES VERSION ${SHARED_LIB_VERSION} +- SOVERSION ${LIBMYSQL_SOVERSION}) + IF(OPENSSL_LIBRARIES) + TARGET_LINK_LIBRARIES(libmysql ${OPENSSL_LIBRARIES} ${OPENSSL_LIBCRYPTO}) + ENDIF(OPENSSL_LIBRARIES) diff --git a/p4a/pythonforandroidold/recipes/libmysqlclient/p4a.cmake b/p4a/pythonforandroidold/recipes/libmysqlclient/p4a.cmake new file mode 100644 index 0000000..9e4c343 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libmysqlclient/p4a.cmake @@ -0,0 +1,3 @@ +SET(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM BOTH) +SET(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) +SET(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) diff --git a/p4a/pythonforandroid/recipes/libnacl/__init__.py b/p4a/pythonforandroidold/recipes/libnacl/__init__.py similarity index 100% rename from p4a/pythonforandroid/recipes/libnacl/__init__.py rename to p4a/pythonforandroidold/recipes/libnacl/__init__.py diff --git a/p4a/pythonforandroidold/recipes/libogg/__init__.py b/p4a/pythonforandroidold/recipes/libogg/__init__.py new file mode 100644 index 0000000..064189e --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libogg/__init__.py @@ -0,0 +1,26 @@ +from pythonforandroid.recipe import NDKRecipe +from pythonforandroid.toolchain import current_directory, shprint +from os.path import join +import sh + + +class OggRecipe(NDKRecipe): + version = '1.3.3' + url = 'http://downloads.xiph.org/releases/ogg/libogg-{version}.tar.gz' + + generated_libraries = ['libogg.so'] + + def build_arch(self, arch): + with current_directory(self.get_build_dir(arch.arch)): + env = self.get_recipe_env(arch) + flags = [ + '--with-sysroot=' + self.ctx.ndk_platform, + '--host=' + arch.toolchain_prefix, + ] + configure = sh.Command('./configure') + shprint(configure, *flags, _env=env) + shprint(sh.make, _env=env) + self.install_libs(arch, join('src', '.libs', 'libogg.so')) + + +recipe = OggRecipe() diff --git a/p4a/pythonforandroidold/recipes/libpq/__init__.py b/p4a/pythonforandroidold/recipes/libpq/__init__.py new file mode 100644 index 0000000..45c296a --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libpq/__init__.py @@ -0,0 +1,26 @@ +from pythonforandroid.toolchain import Recipe, current_directory, shprint +import sh +import os.path + + +class LibpqRecipe(Recipe): + version = '9.5.3' + url = 'http://ftp.postgresql.org/pub/source/v{version}/postgresql-{version}.tar.bz2' + depends = [] + + def should_build(self, arch): + return not os.path.isfile('{}/libpq.a'.format(self.ctx.get_libs_dir(arch.arch))) + + def build_arch(self, arch): + env = self.get_recipe_env(arch) + + with current_directory(self.get_build_dir(arch.arch)): + configure = sh.Command('./configure') + shprint(configure, '--without-readline', '--host=arm-linux', + _env=env) + shprint(sh.make, 'submake-libpq', _env=env) + shprint(sh.cp, '-a', 'src/interfaces/libpq/libpq.a', + self.ctx.get_libs_dir(arch.arch)) + + +recipe = LibpqRecipe() diff --git a/p4a/pythonforandroidold/recipes/librt/__init__.py b/p4a/pythonforandroidold/recipes/librt/__init__.py new file mode 100644 index 0000000..9eb56b3 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/librt/__init__.py @@ -0,0 +1,55 @@ +from os import makedirs, remove +from os.path import exists, join +import sh + +from pythonforandroid.recipe import Recipe +from pythonforandroid.logger import shprint + + +class LibRt(Recipe): + ''' + This is a dumb recipe. We may need this because some recipes inserted some + flags `-lrt` without our control, case of: + + - :class:`~pythonforandroid.recipes.gevent.GeventRecipe` + - :class:`~pythonforandroid.recipes.lxml.LXMLRecipe` + + .. note:: the librt doesn't exist in android but it is integrated into + libc, so we create a symbolic link which we will remove when our build + finishes''' + + @property + def libc_path(self): + return join(self.ctx.ndk_platform, 'usr', 'lib', 'libc') + + def build_arch(self, arch): + # Create a temporary folder to add to link path with a fake librt.so: + fake_librt_temp_folder = join( + self.get_build_dir(arch.arch), + "p4a-librt-recipe-tempdir" + ) + if not exists(fake_librt_temp_folder): + makedirs(fake_librt_temp_folder) + + # Set symlinks, and make sure to update them on every build run: + if exists(join(fake_librt_temp_folder, "librt.so")): + remove(join(fake_librt_temp_folder, "librt.so")) + shprint(sh.ln, '-sf', + self.libc_path + '.so', + join(fake_librt_temp_folder, "librt.so"), + ) + if exists(join(fake_librt_temp_folder, "librt.a")): + remove(join(fake_librt_temp_folder, "librt.a")) + shprint(sh.ln, '-sf', + self.libc_path + '.a', + join(fake_librt_temp_folder, "librt.a"), + ) + + # Add folder as -L link option for all recipes if not done yet: + if fake_librt_temp_folder not in arch.extra_global_link_paths: + arch.extra_global_link_paths.append( + fake_librt_temp_folder + ) + + +recipe = LibRt() diff --git a/p4a/pythonforandroidold/recipes/libsecp256k1/__init__.py b/p4a/pythonforandroidold/recipes/libsecp256k1/__init__.py new file mode 100644 index 0000000..a855257 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libsecp256k1/__init__.py @@ -0,0 +1,32 @@ +from pythonforandroid.toolchain import shprint, current_directory +from pythonforandroid.recipe import Recipe +from multiprocessing import cpu_count +from os.path import exists +import sh + + +class LibSecp256k1Recipe(Recipe): + + url = 'https://github.com/bitcoin-core/secp256k1/archive/master.zip' + + def build_arch(self, arch): + super(LibSecp256k1Recipe, self).build_arch(arch) + env = self.get_recipe_env(arch) + with current_directory(self.get_build_dir(arch.arch)): + if not exists('configure'): + shprint(sh.Command('./autogen.sh'), _env=env) + shprint( + sh.Command('./configure'), + '--host=' + arch.toolchain_prefix, + '--prefix=' + self.ctx.get_python_install_dir(), + '--enable-shared', + '--enable-module-recovery', + '--enable-experimental', + '--enable-module-ecdh', + _env=env) + shprint(sh.make, '-j' + str(cpu_count()), _env=env) + libs = ['.libs/libsecp256k1.so'] + self.install_libs(arch, *libs) + + +recipe = LibSecp256k1Recipe() diff --git a/p4a/pythonforandroidold/recipes/libshine/__init__.py b/p4a/pythonforandroidold/recipes/libshine/__init__.py new file mode 100644 index 0000000..fe9b5b5 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libshine/__init__.py @@ -0,0 +1,30 @@ +from pythonforandroid.toolchain import Recipe, current_directory, shprint +from os.path import exists, join, realpath +import sh + + +class LibShineRecipe(Recipe): + version = 'c72aba9031bde18a0995e7c01c9b53f2e08a0e46' + url = 'https://github.com/toots/shine/archive/{version}.zip' + + def should_build(self, arch): + build_dir = self.get_build_dir(arch.arch) + return not exists(join(build_dir, 'lib', 'libshine.a')) + + def build_arch(self, arch): + with current_directory(self.get_build_dir(arch.arch)): + env = self.get_recipe_env(arch) + shprint(sh.Command('./bootstrap')) + configure = sh.Command('./configure') + shprint(configure, + '--host=arm-linux', + '--enable-pic', + '--disable-shared', + '--enable-static', + '--prefix={}'.format(realpath('.')), + _env=env) + shprint(sh.make, '-j4', _env=env) + shprint(sh.make, 'install', _env=env) + + +recipe = LibShineRecipe() diff --git a/p4a/pythonforandroidold/recipes/libsodium/__init__.py b/p4a/pythonforandroidold/recipes/libsodium/__init__.py new file mode 100644 index 0000000..9911e36 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libsodium/__init__.py @@ -0,0 +1,31 @@ +from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory +from os.path import exists, join +import sh + + +class LibsodiumRecipe(Recipe): + version = '1.0.16' + url = 'https://github.com/jedisct1/libsodium/releases/download/{version}/libsodium-{version}.tar.gz' + depends = [] + patches = ['size_max_fix.patch'] + + def should_build(self, arch): + super(LibsodiumRecipe, self).should_build(arch) + return not exists(join(self.ctx.get_libs_dir(arch.arch), 'libsodium.so')) + + def build_arch(self, arch): + super(LibsodiumRecipe, self).build_arch(arch) + env = self.get_recipe_env(arch) + with current_directory(self.get_build_dir(arch.arch)): + bash = sh.Command('bash') + shprint(bash, 'configure', '--disable-soname-versions', '--host=arm-linux-androideabi', '--enable-shared', _env=env) + shprint(sh.make, _env=env) + shutil.copyfile('src/libsodium/.libs/libsodium.so', join(self.ctx.get_libs_dir(arch.arch), 'libsodium.so')) + + def get_recipe_env(self, arch): + env = super(LibsodiumRecipe, self).get_recipe_env(arch) + env['CFLAGS'] += ' -Os' + return env + + +recipe = LibsodiumRecipe() diff --git a/p4a/pythonforandroidold/recipes/libsodium/size_max_fix.patch b/p4a/pythonforandroidold/recipes/libsodium/size_max_fix.patch new file mode 100644 index 0000000..c05477c --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libsodium/size_max_fix.patch @@ -0,0 +1,12 @@ +diff -urN libsodium-1.0.16.ori/src/libsodium/include/sodium/export.h libsodium-1.0.16/src/libsodium/include/sodium/export.h +--- libsodium-1.0.16.ori/src/libsodium/include/sodium/export.h 2017-12-12 00:03:07.000000000 +0100 ++++ libsodium-1.0.16/src/libsodium/include/sodium/export.h 2018-10-31 09:46:06.051189444 +0100 +@@ -47,6 +47,8 @@ + # endif + #endif + ++#include ++ + #define SODIUM_MIN(A, B) ((A) < (B) ? (A) : (B)) + #define SODIUM_SIZE_MAX SODIUM_MIN(UINT64_MAX, SIZE_MAX) + diff --git a/p4a/pythonforandroidold/recipes/libtorrent/__init__.py b/p4a/pythonforandroidold/recipes/libtorrent/__init__.py new file mode 100644 index 0000000..c73bb02 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libtorrent/__init__.py @@ -0,0 +1,138 @@ +from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory +from multiprocessing import cpu_count +from os.path import join, basename +from os import listdir, walk +import sh + +# This recipe builds libtorrent with Python bindings +# It depends on Boost.Build and the source of several Boost libraries present in BOOST_ROOT, +# which is all provided by the boost recipe + + +def get_lib_from(search_directory, lib_extension='.so'): + '''Scan directories recursively until find any file with the given + extension. The default extension to search is ``.so``.''' + for root, dirs, files in walk(search_directory): + for file in files: + if file.endswith(lib_extension): + print('get_lib_from: {}\n\t- {}'.format( + search_directory, join(root, file))) + return join(root, file) + return None + + +class LibtorrentRecipe(Recipe): + # Todo: make recipe compatible with all p4a architectures + ''' + .. note:: This recipe can be built only against API 21+ and arch armeabi-v7a + + .. versionchanged:: 0.6.0 + Rewrote recipe to support clang's build and boost 1.68. The following + changes has been made: + + - Bumped version number to 1.2.0 + - added python 3 compatibility + - new system to detect/copy generated libraries + ''' + version = '1_2_0' + url = 'https://github.com/arvidn/libtorrent/archive/libtorrent_{version}.tar.gz' + + depends = ['boost'] + opt_depends = ['openssl'] + patches = ['disable-so-version.patch', + 'use-soname-python.patch', + 'setup-lib-name.patch'] + + # libtorrent.so is not included because is not a system library + generated_libraries = [ + 'boost_system', 'boost_python{py_version}', 'torrent_rasterbar'] + + def should_build(self, arch): + python_version = self.ctx.python_recipe.version[:3].replace('.', '') + libs = ['lib' + lib_name.format(py_version=python_version) + + '.so' for lib_name in self.generated_libraries] + return not (self.has_libs(arch, *libs) and + self.ctx.has_package('libtorrent', arch.arch)) + + def prebuild_arch(self, arch): + super(LibtorrentRecipe, self).prebuild_arch(arch) + if 'openssl' in recipe.ctx.recipe_build_order: + # Patch boost user-config.jam to use openssl + self.get_recipe('boost', self.ctx).apply_patch( + join(self.get_recipe_dir(), 'user-config-openssl.patch'), arch.arch) + + def build_arch(self, arch): + super(LibtorrentRecipe, self).build_arch(arch) + env = self.get_recipe_env(arch) + env['PYTHON_HOST'] = self.ctx.hostpython + + # Define build variables + build_dir = self.get_build_dir(arch.arch) + ctx_libs_dir = self.ctx.get_libs_dir(arch.arch) + encryption = 'openssl' if 'openssl' in recipe.ctx.recipe_build_order else 'built-in' + build_args = [ + '-q', + # '-a', # force build, useful to debug the build + '-j' + str(cpu_count()), + '--debug-configuration', # so we know if our python is detected + # '--deprecated-functions=off', + 'toolset=clang-arm', + 'abi=aapcs', + 'binary-format=elf', + 'cxxflags=-std=c++11', + 'target-os=android', + 'threading=multi', + 'link=shared', + 'boost-link=shared', + 'libtorrent-link=shared', + 'runtime-link=shared', + 'encryption={}'.format('on' if encryption == 'openssl' else 'off'), + 'crypto=' + encryption + ] + crypto_folder = 'encryption-off' + if encryption == 'openssl': + crypto_folder = 'crypto-openssl' + build_args.extend(['openssl-lib=' + env['OPENSSL_BUILD_PATH'], + 'openssl-include=' + env['OPENSSL_INCLUDE'] + ]) + build_args.append('release') + + # Compile libtorrent with boost libraries and python bindings + with current_directory(join(build_dir, 'bindings/python')): + b2 = sh.Command(join(env['BOOST_ROOT'], 'b2')) + shprint(b2, *build_args, _env=env) + + # Copy only the boost shared libraries into the libs folder. Because + # boost build two boost_python libraries, we force to search the lib + # into the corresponding build path. + b2_build_dir = 'build/clang-linux-arm/release/{encryption}/' \ + 'lt-visibility-hidden/'.format(encryption=crypto_folder) + boost_libs_dir = join(env['BOOST_BUILD_PATH'], 'bin.v2/libs') + for boost_lib in listdir(boost_libs_dir): + lib_path = get_lib_from(join(boost_libs_dir, boost_lib, b2_build_dir)) + if lib_path: + lib_name = basename(lib_path) + shutil.copyfile(lib_path, join(ctx_libs_dir, lib_name)) + + # Copy libtorrent shared libraries into the right places + system_libtorrent = get_lib_from(join(build_dir, 'bin')) + if system_libtorrent: + shutil.copyfile(system_libtorrent, + join(ctx_libs_dir, 'libtorrent_rasterbar.so')) + + python_libtorrent = get_lib_from(join(build_dir, 'bindings/python/bin')) + shutil.copyfile(python_libtorrent, + join(self.ctx.get_site_packages_dir(arch.arch), 'libtorrent.so')) + + def get_recipe_env(self, arch): + # Use environment from boost recipe, cause we use b2 tool from boost + env = self.get_recipe('boost', self.ctx).get_recipe_env(arch) + if 'openssl' in recipe.ctx.recipe_build_order: + r = self.get_recipe('openssl', self.ctx) + env['OPENSSL_BUILD_PATH'] = r.get_build_dir(arch.arch) + env['OPENSSL_INCLUDE'] = join(r.get_build_dir(arch.arch), 'include') + env['OPENSSL_VERSION'] = r.version + return env + + +recipe = LibtorrentRecipe() diff --git a/p4a/pythonforandroidold/recipes/libtorrent/disable-so-version.patch b/p4a/pythonforandroidold/recipes/libtorrent/disable-so-version.patch new file mode 100644 index 0000000..df0e320 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libtorrent/disable-so-version.patch @@ -0,0 +1,10 @@ +--- libtorrent/Jamfile 2016-01-17 23:52:45.000000000 +0100 ++++ libtorrent-patch/Jamfile 2016-02-09 13:37:57.499561750 +0100 +@@ -325,6 +325,7 @@ + if $(type) = SHARED_LIB && + ( ! ( [ $(property-set).get ] in windows cygwin ) ) + { ++ return "libtorrent_rasterbar.so" ; # linked by python bindings .so + name = $(name).$(VERSION) ; + } + diff --git a/p4a/pythonforandroidold/recipes/libtorrent/setup-lib-name.patch b/p4a/pythonforandroidold/recipes/libtorrent/setup-lib-name.patch new file mode 100644 index 0000000..183705c --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libtorrent/setup-lib-name.patch @@ -0,0 +1,20 @@ +--- libtorrent/bindings/python/setup.py.orig 2018-11-26 22:21:48.772142135 +0100 ++++ libtorrent/bindings/python/setup.py 2018-11-26 22:23:23.092141235 +0100 +@@ -167,7 +167,7 @@ + extra_compile = flags.parse(extra_cmd) + + ext = [Extension( +- 'libtorrent', ++ 'libtorrent_rasterbar', + sources=sorted(source_list), + language='c++', + include_dirs=flags.include_dirs, +@@ -178,7 +178,7 @@ + ] + + setup( +- name='python-libtorrent', ++ name='libtorrent', + version='1.2.0', + author='Arvid Norberg', + author_email='arvid@libtorrent.org', diff --git a/p4a/pythonforandroidold/recipes/libtorrent/use-soname-python.patch b/p4a/pythonforandroidold/recipes/libtorrent/use-soname-python.patch new file mode 100644 index 0000000..1456220 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libtorrent/use-soname-python.patch @@ -0,0 +1,11 @@ +--- libtorrent/bindings/python/Jamfile.orig 2018-12-07 16:46:50.851838981 +0100 ++++ libtorrent/bindings/python/Jamfile 2018-12-07 16:49:09.099837663 +0100 +@@ -113,7 +113,7 @@ + + if ( gcc in $(properties) ) + { +- result += -Wl,-Bsymbolic ; ++ result += -Wl,-soname=libtorrent.so,-Bsymbolic ; + } + } + diff --git a/p4a/pythonforandroidold/recipes/libtorrent/user-config-openssl.patch b/p4a/pythonforandroidold/recipes/libtorrent/user-config-openssl.patch new file mode 100644 index 0000000..6a54071 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libtorrent/user-config-openssl.patch @@ -0,0 +1,21 @@ +--- boost/user-config.jam.orig 2018-12-07 14:16:45.911924859 +0100 ++++ boost/user-config.jam 2018-12-07 14:20:16.243922853 +0100 +@@ -9,6 +9,8 @@ + local PYTHON_INCLUDE = [ os.environ PYTHON_INCLUDE ] ; + local PYTHON_LINK_VERSION = [ os.environ PYTHON_LINK_VERSION ] ; + local PYTHON_MAJOR_MINOR = [ os.environ PYTHON_MAJOR_MINOR ] ; ++local OPENSSL_BUILD_PATH = [ os.environ OPENSSL_BUILD_PATH ] ; ++local OPENSSL_VERSION = [ os.environ OPENSSL_VERSION ] ; + + #using clang : $(ARCH) : $(ANDROID_BINARIES_PATH)/clang++ : + #$(ANDROID_BINARIES_PATH)/llvm-ar +@@ -56,6 +58,9 @@ + -Wl,-z,relro + -Wl,-z,now + -lc++_shared ++-L$(OPENSSL_BUILD_PATH) ++-lcrypto$(OPENSSL_VERSION) ++-lssl$(OPENSSL_VERSION) + -L$(PYTHON_ROOT) + -lpython$(PYTHON_LINK_VERSION) + -Wl,-O1 diff --git a/p4a/pythonforandroidold/recipes/libtribler/__init__.py b/p4a/pythonforandroidold/recipes/libtribler/__init__.py new file mode 100644 index 0000000..134ed9e --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libtribler/__init__.py @@ -0,0 +1,29 @@ +from pythonforandroid.recipe import PythonRecipe + +""" +Privacy with BitTorrent and resilient to shut down + +http://www.tribler.org +""" + + +class LibTriblerRecipe(PythonRecipe): + + version = 'devel' + + url = 'git+https://github.com/Tribler/tribler.git' + + depends = ['apsw', 'cryptography', 'ffmpeg', 'libsodium', 'libtorrent', 'm2crypto', + 'netifaces', 'openssl', 'pil', 'pycrypto', 'pyleveldb', 'twisted', + ] + + conflicts = ['python3'] + + python_depends = ['chardet', 'cherrypy', 'configobj', 'decorator', 'feedparser', + 'libnacl', 'pyasn1', 'requests', 'six', + ] + + site_packages_name = 'Tribler' + + +recipe = LibTriblerRecipe() diff --git a/p4a/pythonforandroidold/recipes/libvorbis/__init__.py b/p4a/pythonforandroidold/recipes/libvorbis/__init__.py new file mode 100644 index 0000000..87c7a44 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libvorbis/__init__.py @@ -0,0 +1,37 @@ +from pythonforandroid.recipe import NDKRecipe +from pythonforandroid.toolchain import current_directory, shprint +from os.path import join +import sh + + +class VorbisRecipe(NDKRecipe): + version = '1.3.6' + url = 'http://downloads.xiph.org/releases/vorbis/libvorbis-{version}.tar.gz' + opt_depends = ['libogg'] + + generated_libraries = ['libvorbis.so', 'libvorbisfile.so', 'libvorbisenc.so'] + + def get_recipe_env(self, arch=None): + env = super(VorbisRecipe, self).get_recipe_env(arch) + ogg = self.get_recipe('libogg', self.ctx) + env['CFLAGS'] += ' -I{}'.format(join(ogg.get_build_dir(arch.arch), 'include')) + return env + + def build_arch(self, arch): + with current_directory(self.get_build_dir(arch.arch)): + env = self.get_recipe_env(arch) + flags = [ + '--with-sysroot=' + self.ctx.ndk_platform, + '--host=' + arch.toolchain_prefix, + ] + configure = sh.Command('./configure') + shprint(configure, *flags, _env=env) + shprint(sh.make, _env=env) + self.install_libs( + arch, + join('lib', '.libs', 'libvorbis.so'), + join('lib', '.libs', 'libvorbisfile.so'), + join('lib', '.libs', 'libvorbisenc.so')) + + +recipe = VorbisRecipe() diff --git a/p4a/pythonforandroidold/recipes/libx264/__init__.py b/p4a/pythonforandroidold/recipes/libx264/__init__.py new file mode 100644 index 0000000..c139b4c --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libx264/__init__.py @@ -0,0 +1,32 @@ +from pythonforandroid.toolchain import Recipe, current_directory, shprint +from os.path import exists, join, realpath +import sh + + +class LibX264Recipe(Recipe): + version = 'x264-snapshot-20171218-2245-stable' # using mirror url since can't use ftp + url = 'http://mirror.yandex.ru/mirrors/ftp.videolan.org/x264/snapshots/{version}.tar.bz2' + + def should_build(self, arch): + build_dir = self.get_build_dir(arch.arch) + return not exists(join(build_dir, 'lib', 'libx264.a')) + + def build_arch(self, arch): + with current_directory(self.get_build_dir(arch.arch)): + env = self.get_recipe_env(arch) + configure = sh.Command('./configure') + shprint(configure, + '--cross-prefix=arm-linux-androideabi-', + '--host=arm-linux', + '--disable-asm', + '--disable-cli', + '--enable-pic', + '--disable-shared', + '--enable-static', + '--prefix={}'.format(realpath('.')), + _env=env) + shprint(sh.make, '-j4', _env=env) + shprint(sh.make, 'install', _env=env) + + +recipe = LibX264Recipe() diff --git a/p4a/pythonforandroidold/recipes/libxml2/__init__.py b/p4a/pythonforandroidold/recipes/libxml2/__init__.py new file mode 100644 index 0000000..cdeaf88 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libxml2/__init__.py @@ -0,0 +1,60 @@ +from pythonforandroid.recipe import Recipe +from pythonforandroid.toolchain import shprint, shutil, current_directory +from os.path import exists, join +import sh + + +class Libxml2Recipe(Recipe): + version = '2.9.8' + url = 'http://xmlsoft.org/sources/libxml2-{version}.tar.gz' + depends = [] + patches = ['add-glob.c.patch'] + + def should_build(self, arch): + super(Libxml2Recipe, self).should_build(arch) + return not exists( + join(self.get_build_dir(arch.arch), '.libs', 'libxml2.a')) + + def build_arch(self, arch): + super(Libxml2Recipe, self).build_arch(arch) + env = self.get_recipe_env(arch) + with current_directory(self.get_build_dir(arch.arch)): + + if not exists('configure'): + shprint(sh.Command('./autogen.sh'), _env=env) + shprint(sh.Command('autoreconf'), '-vif', _env=env) + build_arch = shprint( + sh.gcc, '-dumpmachine').stdout.decode('utf-8').split('\n')[0] + shprint(sh.Command('./configure'), + '--build=' + build_arch, + '--host=' + arch.command_prefix, + '--target=' + arch.command_prefix, + '--without-modules', + '--without-legacy', + '--without-history', + '--without-debug', + '--without-docbook', + '--without-python', + '--without-threads', + '--without-iconv', + '--without-lzma', + '--disable-shared', + '--enable-static', + _env=env) + + # Ensure we only build libxml2.la as if we do everything + # we'll need the glob dependency which is a big headache + shprint(sh.make, "libxml2.la", _env=env) + + shutil.copyfile('.libs/libxml2.a', + join(self.ctx.libs_dir, 'libxml2.a')) + + def get_recipe_env(self, arch): + env = super(Libxml2Recipe, self).get_recipe_env(arch) + env['CONFIG_SHELL'] = '/bin/bash' + env['SHELL'] = '/bin/bash' + env['CC'] += ' -I' + self.get_build_dir(arch.arch) + return env + + +recipe = Libxml2Recipe() diff --git a/p4a/pythonforandroidold/recipes/libxml2/add-glob.c.patch b/p4a/pythonforandroidold/recipes/libxml2/add-glob.c.patch new file mode 100644 index 0000000..776c0c4 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libxml2/add-glob.c.patch @@ -0,0 +1,1038 @@ +From c97da18834aa41637e3e550bccb70bd2dd0ca3b9 Mon Sep 17 00:00:00 2001 +From: Zachary Goldberg +Date: Wed, 20 Apr 2016 21:21:52 -0700 +Subject: [PATCH] Add glob + +--- + glob.c | 906 ++++++++++++++++++++++++++++++++ + glob.h | 105 ++++ + 2 files changed, 1011 insertions(+) + create mode 100644 glob.c + create mode 100644 glob.h + +diff --git a/glob.c b/glob.c +new file mode 100644 +index 0000000..cec80ed +--- /dev/null ++++ b/glob.c +@@ -0,0 +1,906 @@ ++/* ++ * Natanael Arndt, 2011: removed collate.h dependencies ++ * (my changes are trivial) ++ * ++ * Copyright (c) 1989, 1993 ++ * The Regents of the University of California. All rights reserved. ++ * ++ * This code is derived from software contributed to Berkeley by ++ * Guido van Rossum. ++ * ++ * Redistribution and use in source and binary forms, with or without ++ * modification, are permitted provided that the following conditions ++ * are met: ++ * 1. Redistributions of source code must retain the above copyright ++ * notice, this list of conditions and the following disclaimer. ++ * 2. Redistributions in binary form must reproduce the above copyright ++ * notice, this list of conditions and the following disclaimer in the ++ * documentation and/or other materials provided with the distribution. ++ * 4. Neither the name of the University nor the names of its contributors ++ * may be used to endorse or promote products derived from this software ++ * without specific prior written permission. ++ * ++ * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ++ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE ++ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ++ * ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE ++ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL ++ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS ++ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT ++ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY ++ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF ++ * SUCH DAMAGE. ++ */ ++ ++#if defined(LIBC_SCCS) && !defined(lint) ++static char sccsid[] = "@(#)glob.c 8.3 (Berkeley) 10/13/93"; ++#endif /* LIBC_SCCS and not lint */ ++#include ++__FBSDID("$FreeBSD$"); ++ ++/* ++ * glob(3) -- a superset of the one defined in POSIX 1003.2. ++ * ++ * The [!...] convention to negate a range is supported (SysV, Posix, ksh). ++ * ++ * Optional extra services, controlled by flags not defined by POSIX: ++ * ++ * GLOB_QUOTE: ++ * Escaping convention: \ inhibits any special meaning the following ++ * character might have (except \ at end of string is retained). ++ * GLOB_MAGCHAR: ++ * Set in gl_flags if pattern contained a globbing character. ++ * GLOB_NOMAGIC: ++ * Same as GLOB_NOCHECK, but it will only append pattern if it did ++ * not contain any magic characters. [Used in csh style globbing] ++ * GLOB_ALTDIRFUNC: ++ * Use alternately specified directory access functions. ++ * GLOB_TILDE: ++ * expand ~user/foo to the /home/dir/of/user/foo ++ * GLOB_BRACE: ++ * expand {1,2}{a,b} to 1a 1b 2a 2b ++ * gl_matchc: ++ * Number of matches in the current invocation of glob. ++ */ ++ ++/* ++ * Some notes on multibyte character support: ++ * 1. Patterns with illegal byte sequences match nothing - even if ++ * GLOB_NOCHECK is specified. ++ * 2. Illegal byte sequences in filenames are handled by treating them as ++ * single-byte characters with a value of the first byte of the sequence ++ * cast to wchar_t. ++ * 3. State-dependent encodings are not currently supported. ++ */ ++ ++#include ++#include ++ ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++ ++#define DOLLAR '$' ++#define DOT '.' ++#define EOS '\0' ++#define LBRACKET '[' ++#define NOT '!' ++#define QUESTION '?' ++#define QUOTE '\\' ++#define RANGE '-' ++#define RBRACKET ']' ++#define SEP '/' ++#define STAR '*' ++#define TILDE '~' ++#define UNDERSCORE '_' ++#define LBRACE '{' ++#define RBRACE '}' ++#define SLASH '/' ++#define COMMA ',' ++ ++#ifndef DEBUG ++ ++#define M_QUOTE 0x8000000000ULL ++#define M_PROTECT 0x4000000000ULL ++#define M_MASK 0xffffffffffULL ++#define M_CHAR 0x00ffffffffULL ++ ++typedef uint_fast64_t Char; ++ ++#else ++ ++#define M_QUOTE 0x80 ++#define M_PROTECT 0x40 ++#define M_MASK 0xff ++#define M_CHAR 0x7f ++ ++typedef char Char; ++ ++#endif ++ ++ ++#define CHAR(c) ((Char)((c)&M_CHAR)) ++#define META(c) ((Char)((c)|M_QUOTE)) ++#define M_ALL META('*') ++#define M_END META(']') ++#define M_NOT META('!') ++#define M_ONE META('?') ++#define M_RNG META('-') ++#define M_SET META('[') ++#define ismeta(c) (((c)&M_QUOTE) != 0) ++ ++ ++static int compare(const void *, const void *); ++static int g_Ctoc(const Char *, char *, size_t); ++static int g_lstat(Char *, struct stat *, glob_t *); ++static DIR *g_opendir(Char *, glob_t *); ++static const Char *g_strchr(const Char *, wchar_t); ++#ifdef notdef ++static Char *g_strcat(Char *, const Char *); ++#endif ++static int g_stat(Char *, struct stat *, glob_t *); ++static int glob0(const Char *, glob_t *, size_t *); ++static int glob1(Char *, glob_t *, size_t *); ++static int glob2(Char *, Char *, Char *, Char *, glob_t *, size_t *); ++static int glob3(Char *, Char *, Char *, Char *, Char *, glob_t *, size_t *); ++static int globextend(const Char *, glob_t *, size_t *); ++static const Char * ++ globtilde(const Char *, Char *, size_t, glob_t *); ++static int globexp1(const Char *, glob_t *, size_t *); ++static int globexp2(const Char *, const Char *, glob_t *, int *, size_t *); ++static int match(Char *, Char *, Char *); ++#ifdef DEBUG ++static void qprintf(const char *, Char *); ++#endif ++ ++int ++glob(const char *pattern, int flags, int (*errfunc)(const char *, int), glob_t *pglob) ++{ ++ const char *patnext; ++ size_t limit; ++ Char *bufnext, *bufend, patbuf[MAXPATHLEN], prot; ++ mbstate_t mbs; ++ wchar_t wc; ++ size_t clen; ++ ++ patnext = pattern; ++ if (!(flags & GLOB_APPEND)) { ++ pglob->gl_pathc = 0; ++ pglob->gl_pathv = NULL; ++ if (!(flags & GLOB_DOOFFS)) ++ pglob->gl_offs = 0; ++ } ++ if (flags & GLOB_LIMIT) { ++ limit = pglob->gl_matchc; ++ if (limit == 0) ++ limit = ARG_MAX; ++ } else ++ limit = 0; ++ pglob->gl_flags = flags & ~GLOB_MAGCHAR; ++ pglob->gl_errfunc = errfunc; ++ pglob->gl_matchc = 0; ++ ++ bufnext = patbuf; ++ bufend = bufnext + MAXPATHLEN - 1; ++ if (flags & GLOB_NOESCAPE) { ++ memset(&mbs, 0, sizeof(mbs)); ++ while (bufend - bufnext >= MB_CUR_MAX) { ++ clen = mbrtowc(&wc, patnext, MB_LEN_MAX, &mbs); ++ if (clen == (size_t)-1 || clen == (size_t)-2) ++ return (GLOB_NOMATCH); ++ else if (clen == 0) ++ break; ++ *bufnext++ = wc; ++ patnext += clen; ++ } ++ } else { ++ /* Protect the quoted characters. */ ++ memset(&mbs, 0, sizeof(mbs)); ++ while (bufend - bufnext >= MB_CUR_MAX) { ++ if (*patnext == QUOTE) { ++ if (*++patnext == EOS) { ++ *bufnext++ = QUOTE | M_PROTECT; ++ continue; ++ } ++ prot = M_PROTECT; ++ } else ++ prot = 0; ++ clen = mbrtowc(&wc, patnext, MB_LEN_MAX, &mbs); ++ if (clen == (size_t)-1 || clen == (size_t)-2) ++ return (GLOB_NOMATCH); ++ else if (clen == 0) ++ break; ++ *bufnext++ = wc | prot; ++ patnext += clen; ++ } ++ } ++ *bufnext = EOS; ++ ++ if (flags & GLOB_BRACE) ++ return globexp1(patbuf, pglob, &limit); ++ else ++ return glob0(patbuf, pglob, &limit); ++} ++ ++/* ++ * Expand recursively a glob {} pattern. When there is no more expansion ++ * invoke the standard globbing routine to glob the rest of the magic ++ * characters ++ */ ++static int ++globexp1(const Char *pattern, glob_t *pglob, size_t *limit) ++{ ++ const Char* ptr = pattern; ++ int rv; ++ ++ /* Protect a single {}, for find(1), like csh */ ++ if (pattern[0] == LBRACE && pattern[1] == RBRACE && pattern[2] == EOS) ++ return glob0(pattern, pglob, limit); ++ ++ while ((ptr = g_strchr(ptr, LBRACE)) != NULL) ++ if (!globexp2(ptr, pattern, pglob, &rv, limit)) ++ return rv; ++ ++ return glob0(pattern, pglob, limit); ++} ++ ++ ++/* ++ * Recursive brace globbing helper. Tries to expand a single brace. ++ * If it succeeds then it invokes globexp1 with the new pattern. ++ * If it fails then it tries to glob the rest of the pattern and returns. ++ */ ++static int ++globexp2(const Char *ptr, const Char *pattern, glob_t *pglob, int *rv, size_t *limit) ++{ ++ int i; ++ Char *lm, *ls; ++ const Char *pe, *pm, *pm1, *pl; ++ Char patbuf[MAXPATHLEN]; ++ ++ /* copy part up to the brace */ ++ for (lm = patbuf, pm = pattern; pm != ptr; *lm++ = *pm++) ++ continue; ++ *lm = EOS; ++ ls = lm; ++ ++ /* Find the balanced brace */ ++ for (i = 0, pe = ++ptr; *pe; pe++) ++ if (*pe == LBRACKET) { ++ /* Ignore everything between [] */ ++ for (pm = pe++; *pe != RBRACKET && *pe != EOS; pe++) ++ continue; ++ if (*pe == EOS) { ++ /* ++ * We could not find a matching RBRACKET. ++ * Ignore and just look for RBRACE ++ */ ++ pe = pm; ++ } ++ } ++ else if (*pe == LBRACE) ++ i++; ++ else if (*pe == RBRACE) { ++ if (i == 0) ++ break; ++ i--; ++ } ++ ++ /* Non matching braces; just glob the pattern */ ++ if (i != 0 || *pe == EOS) { ++ *rv = glob0(patbuf, pglob, limit); ++ return 0; ++ } ++ ++ for (i = 0, pl = pm = ptr; pm <= pe; pm++) ++ switch (*pm) { ++ case LBRACKET: ++ /* Ignore everything between [] */ ++ for (pm1 = pm++; *pm != RBRACKET && *pm != EOS; pm++) ++ continue; ++ if (*pm == EOS) { ++ /* ++ * We could not find a matching RBRACKET. ++ * Ignore and just look for RBRACE ++ */ ++ pm = pm1; ++ } ++ break; ++ ++ case LBRACE: ++ i++; ++ break; ++ ++ case RBRACE: ++ if (i) { ++ i--; ++ break; ++ } ++ /* FALLTHROUGH */ ++ case COMMA: ++ if (i && *pm == COMMA) ++ break; ++ else { ++ /* Append the current string */ ++ for (lm = ls; (pl < pm); *lm++ = *pl++) ++ continue; ++ /* ++ * Append the rest of the pattern after the ++ * closing brace ++ */ ++ for (pl = pe + 1; (*lm++ = *pl++) != EOS;) ++ continue; ++ ++ /* Expand the current pattern */ ++#ifdef DEBUG ++ qprintf("globexp2:", patbuf); ++#endif ++ *rv = globexp1(patbuf, pglob, limit); ++ ++ /* move after the comma, to the next string */ ++ pl = pm + 1; ++ } ++ break; ++ ++ default: ++ break; ++ } ++ *rv = 0; ++ return 0; ++} ++ ++ ++ ++/* ++ * expand tilde from the passwd file. ++ */ ++static const Char * ++globtilde(const Char *pattern, Char *patbuf, size_t patbuf_len, glob_t *pglob) ++{ ++ struct passwd *pwd; ++ char *h; ++ const Char *p; ++ Char *b, *eb; ++ ++ if (*pattern != TILDE || !(pglob->gl_flags & GLOB_TILDE)) ++ return pattern; ++ ++ /* ++ * Copy up to the end of the string or / ++ */ ++ eb = &patbuf[patbuf_len - 1]; ++ for (p = pattern + 1, h = (char *) patbuf; ++ h < (char *)eb && *p && *p != SLASH; *h++ = *p++) ++ continue; ++ ++ *h = EOS; ++ ++ if (((char *) patbuf)[0] == EOS) { ++ /* ++ * handle a plain ~ or ~/ by expanding $HOME first (iff ++ * we're not running setuid or setgid) and then trying ++ * the password file ++ */ ++ if (issetugid() != 0 || ++ (h = getenv("HOME")) == NULL) { ++ if (((h = getlogin()) != NULL && ++ (pwd = getpwnam(h)) != NULL) || ++ (pwd = getpwuid(getuid())) != NULL) ++ h = pwd->pw_dir; ++ else ++ return pattern; ++ } ++ } ++ else { ++ /* ++ * Expand a ~user ++ */ ++ if ((pwd = getpwnam((char*) patbuf)) == NULL) ++ return pattern; ++ else ++ h = pwd->pw_dir; ++ } ++ ++ /* Copy the home directory */ ++ for (b = patbuf; b < eb && *h; *b++ = *h++) ++ continue; ++ ++ /* Append the rest of the pattern */ ++ while (b < eb && (*b++ = *p++) != EOS) ++ continue; ++ *b = EOS; ++ ++ return patbuf; ++} ++ ++ ++/* ++ * The main glob() routine: compiles the pattern (optionally processing ++ * quotes), calls glob1() to do the real pattern matching, and finally ++ * sorts the list (unless unsorted operation is requested). Returns 0 ++ * if things went well, nonzero if errors occurred. ++ */ ++static int ++glob0(const Char *pattern, glob_t *pglob, size_t *limit) ++{ ++ const Char *qpatnext; ++ int err; ++ size_t oldpathc; ++ Char *bufnext, c, patbuf[MAXPATHLEN]; ++ ++ qpatnext = globtilde(pattern, patbuf, MAXPATHLEN, pglob); ++ oldpathc = pglob->gl_pathc; ++ bufnext = patbuf; ++ ++ /* We don't need to check for buffer overflow any more. */ ++ while ((c = *qpatnext++) != EOS) { ++ switch (c) { ++ case LBRACKET: ++ c = *qpatnext; ++ if (c == NOT) ++ ++qpatnext; ++ if (*qpatnext == EOS || ++ g_strchr(qpatnext+1, RBRACKET) == NULL) { ++ *bufnext++ = LBRACKET; ++ if (c == NOT) ++ --qpatnext; ++ break; ++ } ++ *bufnext++ = M_SET; ++ if (c == NOT) ++ *bufnext++ = M_NOT; ++ c = *qpatnext++; ++ do { ++ *bufnext++ = CHAR(c); ++ if (*qpatnext == RANGE && ++ (c = qpatnext[1]) != RBRACKET) { ++ *bufnext++ = M_RNG; ++ *bufnext++ = CHAR(c); ++ qpatnext += 2; ++ } ++ } while ((c = *qpatnext++) != RBRACKET); ++ pglob->gl_flags |= GLOB_MAGCHAR; ++ *bufnext++ = M_END; ++ break; ++ case QUESTION: ++ pglob->gl_flags |= GLOB_MAGCHAR; ++ *bufnext++ = M_ONE; ++ break; ++ case STAR: ++ pglob->gl_flags |= GLOB_MAGCHAR; ++ /* collapse adjacent stars to one, ++ * to avoid exponential behavior ++ */ ++ if (bufnext == patbuf || bufnext[-1] != M_ALL) ++ *bufnext++ = M_ALL; ++ break; ++ default: ++ *bufnext++ = CHAR(c); ++ break; ++ } ++ } ++ *bufnext = EOS; ++#ifdef DEBUG ++ qprintf("glob0:", patbuf); ++#endif ++ ++ if ((err = glob1(patbuf, pglob, limit)) != 0) ++ return(err); ++ ++ /* ++ * If there was no match we are going to append the pattern ++ * if GLOB_NOCHECK was specified or if GLOB_NOMAGIC was specified ++ * and the pattern did not contain any magic characters ++ * GLOB_NOMAGIC is there just for compatibility with csh. ++ */ ++ if (pglob->gl_pathc == oldpathc) { ++ if (((pglob->gl_flags & GLOB_NOCHECK) || ++ ((pglob->gl_flags & GLOB_NOMAGIC) && ++ !(pglob->gl_flags & GLOB_MAGCHAR)))) ++ return(globextend(pattern, pglob, limit)); ++ else ++ return(GLOB_NOMATCH); ++ } ++ if (!(pglob->gl_flags & GLOB_NOSORT)) ++ qsort(pglob->gl_pathv + pglob->gl_offs + oldpathc, ++ pglob->gl_pathc - oldpathc, sizeof(char *), compare); ++ return(0); ++} ++ ++static int ++compare(const void *p, const void *q) ++{ ++ return(strcmp(*(char **)p, *(char **)q)); ++} ++ ++static int ++glob1(Char *pattern, glob_t *pglob, size_t *limit) ++{ ++ Char pathbuf[MAXPATHLEN]; ++ ++ /* A null pathname is invalid -- POSIX 1003.1 sect. 2.4. */ ++ if (*pattern == EOS) ++ return(0); ++ return(glob2(pathbuf, pathbuf, pathbuf + MAXPATHLEN - 1, ++ pattern, pglob, limit)); ++} ++ ++/* ++ * The functions glob2 and glob3 are mutually recursive; there is one level ++ * of recursion for each segment in the pattern that contains one or more ++ * meta characters. ++ */ ++static int ++glob2(Char *pathbuf, Char *pathend, Char *pathend_last, Char *pattern, ++ glob_t *pglob, size_t *limit) ++{ ++ struct stat sb; ++ Char *p, *q; ++ int anymeta; ++ ++ /* ++ * Loop over pattern segments until end of pattern or until ++ * segment with meta character found. ++ */ ++ for (anymeta = 0;;) { ++ if (*pattern == EOS) { /* End of pattern? */ ++ *pathend = EOS; ++ if (g_lstat(pathbuf, &sb, pglob)) ++ return(0); ++ ++ if (((pglob->gl_flags & GLOB_MARK) && ++ pathend[-1] != SEP) && (S_ISDIR(sb.st_mode) ++ || (S_ISLNK(sb.st_mode) && ++ (g_stat(pathbuf, &sb, pglob) == 0) && ++ S_ISDIR(sb.st_mode)))) { ++ if (pathend + 1 > pathend_last) ++ return (GLOB_ABORTED); ++ *pathend++ = SEP; ++ *pathend = EOS; ++ } ++ ++pglob->gl_matchc; ++ return(globextend(pathbuf, pglob, limit)); ++ } ++ ++ /* Find end of next segment, copy tentatively to pathend. */ ++ q = pathend; ++ p = pattern; ++ while (*p != EOS && *p != SEP) { ++ if (ismeta(*p)) ++ anymeta = 1; ++ if (q + 1 > pathend_last) ++ return (GLOB_ABORTED); ++ *q++ = *p++; ++ } ++ ++ if (!anymeta) { /* No expansion, do next segment. */ ++ pathend = q; ++ pattern = p; ++ while (*pattern == SEP) { ++ if (pathend + 1 > pathend_last) ++ return (GLOB_ABORTED); ++ *pathend++ = *pattern++; ++ } ++ } else /* Need expansion, recurse. */ ++ return(glob3(pathbuf, pathend, pathend_last, pattern, p, ++ pglob, limit)); ++ } ++ /* NOTREACHED */ ++} ++ ++static int ++glob3(Char *pathbuf, Char *pathend, Char *pathend_last, ++ Char *pattern, Char *restpattern, ++ glob_t *pglob, size_t *limit) ++{ ++ struct dirent *dp; ++ DIR *dirp; ++ int err; ++ char buf[MAXPATHLEN]; ++ ++ /* ++ * The readdirfunc declaration can't be prototyped, because it is ++ * assigned, below, to two functions which are prototyped in glob.h ++ * and dirent.h as taking pointers to differently typed opaque ++ * structures. ++ */ ++ struct dirent *(*readdirfunc)(); ++ ++ if (pathend > pathend_last) ++ return (GLOB_ABORTED); ++ *pathend = EOS; ++ errno = 0; ++ ++ if ((dirp = g_opendir(pathbuf, pglob)) == NULL) { ++ /* TODO: don't call for ENOENT or ENOTDIR? */ ++ if (pglob->gl_errfunc) { ++ if (g_Ctoc(pathbuf, buf, sizeof(buf))) ++ return (GLOB_ABORTED); ++ if (pglob->gl_errfunc(buf, errno) || ++ pglob->gl_flags & GLOB_ERR) ++ return (GLOB_ABORTED); ++ } ++ return(0); ++ } ++ ++ err = 0; ++ ++ /* Search directory for matching names. */ ++ if (pglob->gl_flags & GLOB_ALTDIRFUNC) ++ readdirfunc = pglob->gl_readdir; ++ else ++ readdirfunc = readdir; ++ while ((dp = (*readdirfunc)(dirp))) { ++ char *sc; ++ Char *dc; ++ wchar_t wc; ++ size_t clen; ++ mbstate_t mbs; ++ ++ /* Initial DOT must be matched literally. */ ++ if (dp->d_name[0] == DOT && *pattern != DOT) ++ continue; ++ memset(&mbs, 0, sizeof(mbs)); ++ dc = pathend; ++ sc = dp->d_name; ++ while (dc < pathend_last) { ++ clen = mbrtowc(&wc, sc, MB_LEN_MAX, &mbs); ++ if (clen == (size_t)-1 || clen == (size_t)-2) { ++ wc = *sc; ++ clen = 1; ++ memset(&mbs, 0, sizeof(mbs)); ++ } ++ if ((*dc++ = wc) == EOS) ++ break; ++ sc += clen; ++ } ++ if (!match(pathend, pattern, restpattern)) { ++ *pathend = EOS; ++ continue; ++ } ++ err = glob2(pathbuf, --dc, pathend_last, restpattern, ++ pglob, limit); ++ if (err) ++ break; ++ } ++ ++ if (pglob->gl_flags & GLOB_ALTDIRFUNC) ++ (*pglob->gl_closedir)(dirp); ++ else ++ closedir(dirp); ++ return(err); ++} ++ ++ ++/* ++ * Extend the gl_pathv member of a glob_t structure to accomodate a new item, ++ * add the new item, and update gl_pathc. ++ * ++ * This assumes the BSD realloc, which only copies the block when its size ++ * crosses a power-of-two boundary; for v7 realloc, this would cause quadratic ++ * behavior. ++ * ++ * Return 0 if new item added, error code if memory couldn't be allocated. ++ * ++ * Invariant of the glob_t structure: ++ * Either gl_pathc is zero and gl_pathv is NULL; or gl_pathc > 0 and ++ * gl_pathv points to (gl_offs + gl_pathc + 1) items. ++ */ ++static int ++globextend(const Char *path, glob_t *pglob, size_t *limit) ++{ ++ char **pathv; ++ size_t i, newsize, len; ++ char *copy; ++ const Char *p; ++ ++ if (*limit && pglob->gl_pathc > *limit) { ++ errno = 0; ++ return (GLOB_NOSPACE); ++ } ++ ++ newsize = sizeof(*pathv) * (2 + pglob->gl_pathc + pglob->gl_offs); ++ pathv = pglob->gl_pathv ? ++ realloc((char *)pglob->gl_pathv, newsize) : ++ malloc(newsize); ++ if (pathv == NULL) { ++ if (pglob->gl_pathv) { ++ free(pglob->gl_pathv); ++ pglob->gl_pathv = NULL; ++ } ++ return(GLOB_NOSPACE); ++ } ++ ++ if (pglob->gl_pathv == NULL && pglob->gl_offs > 0) { ++ /* first time around -- clear initial gl_offs items */ ++ pathv += pglob->gl_offs; ++ for (i = pglob->gl_offs + 1; --i > 0; ) ++ *--pathv = NULL; ++ } ++ pglob->gl_pathv = pathv; ++ ++ for (p = path; *p++;) ++ continue; ++ len = MB_CUR_MAX * (size_t)(p - path); /* XXX overallocation */ ++ if ((copy = malloc(len)) != NULL) { ++ if (g_Ctoc(path, copy, len)) { ++ free(copy); ++ return (GLOB_NOSPACE); ++ } ++ pathv[pglob->gl_offs + pglob->gl_pathc++] = copy; ++ } ++ pathv[pglob->gl_offs + pglob->gl_pathc] = NULL; ++ return(copy == NULL ? GLOB_NOSPACE : 0); ++} ++ ++/* ++ * pattern matching function for filenames. Each occurrence of the * ++ * pattern causes a recursion level. ++ */ ++static int ++match(Char *name, Char *pat, Char *patend) ++{ ++ int ok, negate_range; ++ Char c, k; ++ ++ while (pat < patend) { ++ c = *pat++; ++ switch (c & M_MASK) { ++ case M_ALL: ++ if (pat == patend) ++ return(1); ++ do ++ if (match(name, pat, patend)) ++ return(1); ++ while (*name++ != EOS); ++ return(0); ++ case M_ONE: ++ if (*name++ == EOS) ++ return(0); ++ break; ++ case M_SET: ++ ok = 0; ++ if ((k = *name++) == EOS) ++ return(0); ++ if ((negate_range = ((*pat & M_MASK) == M_NOT)) != EOS) ++ ++pat; ++ while (((c = *pat++) & M_MASK) != M_END) ++ if ((*pat & M_MASK) == M_RNG) { ++ if (CHAR(c) <= CHAR(k) && CHAR(k) <= CHAR(pat[1])) ok = 1; ++ pat += 2; ++ } else if (c == k) ++ ok = 1; ++ if (ok == negate_range) ++ return(0); ++ break; ++ default: ++ if (*name++ != c) ++ return(0); ++ break; ++ } ++ } ++ return(*name == EOS); ++} ++ ++/* Free allocated data belonging to a glob_t structure. */ ++void ++globfree(glob_t *pglob) ++{ ++ size_t i; ++ char **pp; ++ ++ if (pglob->gl_pathv != NULL) { ++ pp = pglob->gl_pathv + pglob->gl_offs; ++ for (i = pglob->gl_pathc; i--; ++pp) ++ if (*pp) ++ free(*pp); ++ free(pglob->gl_pathv); ++ pglob->gl_pathv = NULL; ++ } ++} ++ ++static DIR * ++g_opendir(Char *str, glob_t *pglob) ++{ ++ char buf[MAXPATHLEN]; ++ ++ if (!*str) ++ strcpy(buf, "."); ++ else { ++ if (g_Ctoc(str, buf, sizeof(buf))) ++ return (NULL); ++ } ++ ++ if (pglob->gl_flags & GLOB_ALTDIRFUNC) ++ return((*pglob->gl_opendir)(buf)); ++ ++ return(opendir(buf)); ++} ++ ++static int ++g_lstat(Char *fn, struct stat *sb, glob_t *pglob) ++{ ++ char buf[MAXPATHLEN]; ++ ++ if (g_Ctoc(fn, buf, sizeof(buf))) { ++ errno = ENAMETOOLONG; ++ return (-1); ++ } ++ if (pglob->gl_flags & GLOB_ALTDIRFUNC) ++ return((*pglob->gl_lstat)(buf, sb)); ++ return(lstat(buf, sb)); ++} ++ ++static int ++g_stat(Char *fn, struct stat *sb, glob_t *pglob) ++{ ++ char buf[MAXPATHLEN]; ++ ++ if (g_Ctoc(fn, buf, sizeof(buf))) { ++ errno = ENAMETOOLONG; ++ return (-1); ++ } ++ if (pglob->gl_flags & GLOB_ALTDIRFUNC) ++ return((*pglob->gl_stat)(buf, sb)); ++ return(stat(buf, sb)); ++} ++ ++static const Char * ++g_strchr(const Char *str, wchar_t ch) ++{ ++ ++ do { ++ if (*str == ch) ++ return (str); ++ } while (*str++); ++ return (NULL); ++} ++ ++static int ++g_Ctoc(const Char *str, char *buf, size_t len) ++{ ++ mbstate_t mbs; ++ size_t clen; ++ ++ memset(&mbs, 0, sizeof(mbs)); ++ while (len >= MB_CUR_MAX) { ++ clen = wcrtomb(buf, *str, &mbs); ++ if (clen == (size_t)-1) ++ return (1); ++ if (*str == L'\0') ++ return (0); ++ str++; ++ buf += clen; ++ len -= clen; ++ } ++ return (1); ++} ++ ++#ifdef DEBUG ++static void ++qprintf(const char *str, Char *s) ++{ ++ Char *p; ++ ++ (void)printf("%s:\n", str); ++ for (p = s; *p; p++) ++ (void)printf("%c", CHAR(*p)); ++ (void)printf("\n"); ++ for (p = s; *p; p++) ++ (void)printf("%c", *p & M_PROTECT ? '"' : ' '); ++ (void)printf("\n"); ++ for (p = s; *p; p++) ++ (void)printf("%c", ismeta(*p) ? '_' : ' '); ++ (void)printf("\n"); ++} ++#endif +diff --git a/glob.h b/glob.h +new file mode 100644 +index 0000000..351b6c4 +--- /dev/null ++++ b/glob.h +@@ -0,0 +1,105 @@ ++/* ++ * Copyright (c) 1989, 1993 ++ * The Regents of the University of California. All rights reserved. ++ * ++ * This code is derived from software contributed to Berkeley by ++ * Guido van Rossum. ++ * ++ * Redistribution and use in source and binary forms, with or without ++ * modification, are permitted provided that the following conditions ++ * are met: ++ * 1. Redistributions of source code must retain the above copyright ++ * notice, this list of conditions and the following disclaimer. ++ * 2. Redistributions in binary form must reproduce the above copyright ++ * notice, this list of conditions and the following disclaimer in the ++ * documentation and/or other materials provided with the distribution. ++ * 3. Neither the name of the University nor the names of its contributors ++ * may be used to endorse or promote products derived from this software ++ * without specific prior written permission. ++ * ++ * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ++ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE ++ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ++ * ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE ++ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL ++ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS ++ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT ++ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY ++ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF ++ * SUCH DAMAGE. ++ * ++ * @(#)glob.h 8.1 (Berkeley) 6/2/93 ++ * $FreeBSD$ ++ */ ++ ++#ifndef _GLOB_H_ ++#define _GLOB_H_ ++ ++#include ++#include ++ ++#ifndef _SIZE_T_DECLARED ++typedef __size_t size_t; ++#define _SIZE_T_DECLARED ++#endif ++ ++struct stat; ++typedef struct { ++ size_t gl_pathc; /* Count of total paths so far. */ ++ size_t gl_matchc; /* Count of paths matching pattern. */ ++ size_t gl_offs; /* Reserved at beginning of gl_pathv. */ ++ int gl_flags; /* Copy of flags parameter to glob. */ ++ char **gl_pathv; /* List of paths matching pattern. */ ++ /* Copy of errfunc parameter to glob. */ ++ int (*gl_errfunc)(const char *, int); ++ ++ /* ++ * Alternate filesystem access methods for glob; replacement ++ * versions of closedir(3), readdir(3), opendir(3), stat(2) ++ * and lstat(2). ++ */ ++ void (*gl_closedir)(void *); ++ struct dirent *(*gl_readdir)(void *); ++ void *(*gl_opendir)(const char *); ++ int (*gl_lstat)(const char *, struct stat *); ++ int (*gl_stat)(const char *, struct stat *); ++} glob_t; ++ ++#if __POSIX_VISIBLE >= 199209 ++/* Believed to have been introduced in 1003.2-1992 */ ++#define GLOB_APPEND 0x0001 /* Append to output from previous call. */ ++#define GLOB_DOOFFS 0x0002 /* Use gl_offs. */ ++#define GLOB_ERR 0x0004 /* Return on error. */ ++#define GLOB_MARK 0x0008 /* Append / to matching directories. */ ++#define GLOB_NOCHECK 0x0010 /* Return pattern itself if nothing matches. */ ++#define GLOB_NOSORT 0x0020 /* Don't sort. */ ++#define GLOB_NOESCAPE 0x2000 /* Disable backslash escaping. */ ++ ++/* Error values returned by glob(3) */ ++#define GLOB_NOSPACE (-1) /* Malloc call failed. */ ++#define GLOB_ABORTED (-2) /* Unignored error. */ ++#define GLOB_NOMATCH (-3) /* No match and GLOB_NOCHECK was not set. */ ++#define GLOB_NOSYS (-4) /* Obsolete: source comptability only. */ ++#endif /* __POSIX_VISIBLE >= 199209 */ ++ ++#if __BSD_VISIBLE ++#define GLOB_ALTDIRFUNC 0x0040 /* Use alternately specified directory funcs. */ ++#define GLOB_BRACE 0x0080 /* Expand braces ala csh. */ ++#define GLOB_MAGCHAR 0x0100 /* Pattern had globbing characters. */ ++#define GLOB_NOMAGIC 0x0200 /* GLOB_NOCHECK without magic chars (csh). */ ++#define GLOB_QUOTE 0x0400 /* Quote special chars with \. */ ++#define GLOB_TILDE 0x0800 /* Expand tilde names from the passwd file. */ ++#define GLOB_LIMIT 0x1000 /* limit number of returned paths */ ++ ++/* source compatibility, these are the old names */ ++#define GLOB_MAXPATH GLOB_LIMIT ++#define GLOB_ABEND GLOB_ABORTED ++#endif /* __BSD_VISIBLE */ ++ ++__BEGIN_DECLS ++int glob(const char *, int, int (*)(const char *, int), glob_t *); ++void globfree(glob_t *); ++__END_DECLS ++ ++#endif /* !_GLOB_H_ */ +-- +1.9.1 + diff --git a/p4a/pythonforandroidold/recipes/libxml2/glob.c b/p4a/pythonforandroidold/recipes/libxml2/glob.c new file mode 100644 index 0000000..cec80ed --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libxml2/glob.c @@ -0,0 +1,906 @@ +/* + * Natanael Arndt, 2011: removed collate.h dependencies + * (my changes are trivial) + * + * Copyright (c) 1989, 1993 + * The Regents of the University of California. All rights reserved. + * + * This code is derived from software contributed to Berkeley by + * Guido van Rossum. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 4. Neither the name of the University nor the names of its contributors + * may be used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + */ + +#if defined(LIBC_SCCS) && !defined(lint) +static char sccsid[] = "@(#)glob.c 8.3 (Berkeley) 10/13/93"; +#endif /* LIBC_SCCS and not lint */ +#include +__FBSDID("$FreeBSD$"); + +/* + * glob(3) -- a superset of the one defined in POSIX 1003.2. + * + * The [!...] convention to negate a range is supported (SysV, Posix, ksh). + * + * Optional extra services, controlled by flags not defined by POSIX: + * + * GLOB_QUOTE: + * Escaping convention: \ inhibits any special meaning the following + * character might have (except \ at end of string is retained). + * GLOB_MAGCHAR: + * Set in gl_flags if pattern contained a globbing character. + * GLOB_NOMAGIC: + * Same as GLOB_NOCHECK, but it will only append pattern if it did + * not contain any magic characters. [Used in csh style globbing] + * GLOB_ALTDIRFUNC: + * Use alternately specified directory access functions. + * GLOB_TILDE: + * expand ~user/foo to the /home/dir/of/user/foo + * GLOB_BRACE: + * expand {1,2}{a,b} to 1a 1b 2a 2b + * gl_matchc: + * Number of matches in the current invocation of glob. + */ + +/* + * Some notes on multibyte character support: + * 1. Patterns with illegal byte sequences match nothing - even if + * GLOB_NOCHECK is specified. + * 2. Illegal byte sequences in filenames are handled by treating them as + * single-byte characters with a value of the first byte of the sequence + * cast to wchar_t. + * 3. State-dependent encodings are not currently supported. + */ + +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#define DOLLAR '$' +#define DOT '.' +#define EOS '\0' +#define LBRACKET '[' +#define NOT '!' +#define QUESTION '?' +#define QUOTE '\\' +#define RANGE '-' +#define RBRACKET ']' +#define SEP '/' +#define STAR '*' +#define TILDE '~' +#define UNDERSCORE '_' +#define LBRACE '{' +#define RBRACE '}' +#define SLASH '/' +#define COMMA ',' + +#ifndef DEBUG + +#define M_QUOTE 0x8000000000ULL +#define M_PROTECT 0x4000000000ULL +#define M_MASK 0xffffffffffULL +#define M_CHAR 0x00ffffffffULL + +typedef uint_fast64_t Char; + +#else + +#define M_QUOTE 0x80 +#define M_PROTECT 0x40 +#define M_MASK 0xff +#define M_CHAR 0x7f + +typedef char Char; + +#endif + + +#define CHAR(c) ((Char)((c)&M_CHAR)) +#define META(c) ((Char)((c)|M_QUOTE)) +#define M_ALL META('*') +#define M_END META(']') +#define M_NOT META('!') +#define M_ONE META('?') +#define M_RNG META('-') +#define M_SET META('[') +#define ismeta(c) (((c)&M_QUOTE) != 0) + + +static int compare(const void *, const void *); +static int g_Ctoc(const Char *, char *, size_t); +static int g_lstat(Char *, struct stat *, glob_t *); +static DIR *g_opendir(Char *, glob_t *); +static const Char *g_strchr(const Char *, wchar_t); +#ifdef notdef +static Char *g_strcat(Char *, const Char *); +#endif +static int g_stat(Char *, struct stat *, glob_t *); +static int glob0(const Char *, glob_t *, size_t *); +static int glob1(Char *, glob_t *, size_t *); +static int glob2(Char *, Char *, Char *, Char *, glob_t *, size_t *); +static int glob3(Char *, Char *, Char *, Char *, Char *, glob_t *, size_t *); +static int globextend(const Char *, glob_t *, size_t *); +static const Char * + globtilde(const Char *, Char *, size_t, glob_t *); +static int globexp1(const Char *, glob_t *, size_t *); +static int globexp2(const Char *, const Char *, glob_t *, int *, size_t *); +static int match(Char *, Char *, Char *); +#ifdef DEBUG +static void qprintf(const char *, Char *); +#endif + +int +glob(const char *pattern, int flags, int (*errfunc)(const char *, int), glob_t *pglob) +{ + const char *patnext; + size_t limit; + Char *bufnext, *bufend, patbuf[MAXPATHLEN], prot; + mbstate_t mbs; + wchar_t wc; + size_t clen; + + patnext = pattern; + if (!(flags & GLOB_APPEND)) { + pglob->gl_pathc = 0; + pglob->gl_pathv = NULL; + if (!(flags & GLOB_DOOFFS)) + pglob->gl_offs = 0; + } + if (flags & GLOB_LIMIT) { + limit = pglob->gl_matchc; + if (limit == 0) + limit = ARG_MAX; + } else + limit = 0; + pglob->gl_flags = flags & ~GLOB_MAGCHAR; + pglob->gl_errfunc = errfunc; + pglob->gl_matchc = 0; + + bufnext = patbuf; + bufend = bufnext + MAXPATHLEN - 1; + if (flags & GLOB_NOESCAPE) { + memset(&mbs, 0, sizeof(mbs)); + while (bufend - bufnext >= MB_CUR_MAX) { + clen = mbrtowc(&wc, patnext, MB_LEN_MAX, &mbs); + if (clen == (size_t)-1 || clen == (size_t)-2) + return (GLOB_NOMATCH); + else if (clen == 0) + break; + *bufnext++ = wc; + patnext += clen; + } + } else { + /* Protect the quoted characters. */ + memset(&mbs, 0, sizeof(mbs)); + while (bufend - bufnext >= MB_CUR_MAX) { + if (*patnext == QUOTE) { + if (*++patnext == EOS) { + *bufnext++ = QUOTE | M_PROTECT; + continue; + } + prot = M_PROTECT; + } else + prot = 0; + clen = mbrtowc(&wc, patnext, MB_LEN_MAX, &mbs); + if (clen == (size_t)-1 || clen == (size_t)-2) + return (GLOB_NOMATCH); + else if (clen == 0) + break; + *bufnext++ = wc | prot; + patnext += clen; + } + } + *bufnext = EOS; + + if (flags & GLOB_BRACE) + return globexp1(patbuf, pglob, &limit); + else + return glob0(patbuf, pglob, &limit); +} + +/* + * Expand recursively a glob {} pattern. When there is no more expansion + * invoke the standard globbing routine to glob the rest of the magic + * characters + */ +static int +globexp1(const Char *pattern, glob_t *pglob, size_t *limit) +{ + const Char* ptr = pattern; + int rv; + + /* Protect a single {}, for find(1), like csh */ + if (pattern[0] == LBRACE && pattern[1] == RBRACE && pattern[2] == EOS) + return glob0(pattern, pglob, limit); + + while ((ptr = g_strchr(ptr, LBRACE)) != NULL) + if (!globexp2(ptr, pattern, pglob, &rv, limit)) + return rv; + + return glob0(pattern, pglob, limit); +} + + +/* + * Recursive brace globbing helper. Tries to expand a single brace. + * If it succeeds then it invokes globexp1 with the new pattern. + * If it fails then it tries to glob the rest of the pattern and returns. + */ +static int +globexp2(const Char *ptr, const Char *pattern, glob_t *pglob, int *rv, size_t *limit) +{ + int i; + Char *lm, *ls; + const Char *pe, *pm, *pm1, *pl; + Char patbuf[MAXPATHLEN]; + + /* copy part up to the brace */ + for (lm = patbuf, pm = pattern; pm != ptr; *lm++ = *pm++) + continue; + *lm = EOS; + ls = lm; + + /* Find the balanced brace */ + for (i = 0, pe = ++ptr; *pe; pe++) + if (*pe == LBRACKET) { + /* Ignore everything between [] */ + for (pm = pe++; *pe != RBRACKET && *pe != EOS; pe++) + continue; + if (*pe == EOS) { + /* + * We could not find a matching RBRACKET. + * Ignore and just look for RBRACE + */ + pe = pm; + } + } + else if (*pe == LBRACE) + i++; + else if (*pe == RBRACE) { + if (i == 0) + break; + i--; + } + + /* Non matching braces; just glob the pattern */ + if (i != 0 || *pe == EOS) { + *rv = glob0(patbuf, pglob, limit); + return 0; + } + + for (i = 0, pl = pm = ptr; pm <= pe; pm++) + switch (*pm) { + case LBRACKET: + /* Ignore everything between [] */ + for (pm1 = pm++; *pm != RBRACKET && *pm != EOS; pm++) + continue; + if (*pm == EOS) { + /* + * We could not find a matching RBRACKET. + * Ignore and just look for RBRACE + */ + pm = pm1; + } + break; + + case LBRACE: + i++; + break; + + case RBRACE: + if (i) { + i--; + break; + } + /* FALLTHROUGH */ + case COMMA: + if (i && *pm == COMMA) + break; + else { + /* Append the current string */ + for (lm = ls; (pl < pm); *lm++ = *pl++) + continue; + /* + * Append the rest of the pattern after the + * closing brace + */ + for (pl = pe + 1; (*lm++ = *pl++) != EOS;) + continue; + + /* Expand the current pattern */ +#ifdef DEBUG + qprintf("globexp2:", patbuf); +#endif + *rv = globexp1(patbuf, pglob, limit); + + /* move after the comma, to the next string */ + pl = pm + 1; + } + break; + + default: + break; + } + *rv = 0; + return 0; +} + + + +/* + * expand tilde from the passwd file. + */ +static const Char * +globtilde(const Char *pattern, Char *patbuf, size_t patbuf_len, glob_t *pglob) +{ + struct passwd *pwd; + char *h; + const Char *p; + Char *b, *eb; + + if (*pattern != TILDE || !(pglob->gl_flags & GLOB_TILDE)) + return pattern; + + /* + * Copy up to the end of the string or / + */ + eb = &patbuf[patbuf_len - 1]; + for (p = pattern + 1, h = (char *) patbuf; + h < (char *)eb && *p && *p != SLASH; *h++ = *p++) + continue; + + *h = EOS; + + if (((char *) patbuf)[0] == EOS) { + /* + * handle a plain ~ or ~/ by expanding $HOME first (iff + * we're not running setuid or setgid) and then trying + * the password file + */ + if (issetugid() != 0 || + (h = getenv("HOME")) == NULL) { + if (((h = getlogin()) != NULL && + (pwd = getpwnam(h)) != NULL) || + (pwd = getpwuid(getuid())) != NULL) + h = pwd->pw_dir; + else + return pattern; + } + } + else { + /* + * Expand a ~user + */ + if ((pwd = getpwnam((char*) patbuf)) == NULL) + return pattern; + else + h = pwd->pw_dir; + } + + /* Copy the home directory */ + for (b = patbuf; b < eb && *h; *b++ = *h++) + continue; + + /* Append the rest of the pattern */ + while (b < eb && (*b++ = *p++) != EOS) + continue; + *b = EOS; + + return patbuf; +} + + +/* + * The main glob() routine: compiles the pattern (optionally processing + * quotes), calls glob1() to do the real pattern matching, and finally + * sorts the list (unless unsorted operation is requested). Returns 0 + * if things went well, nonzero if errors occurred. + */ +static int +glob0(const Char *pattern, glob_t *pglob, size_t *limit) +{ + const Char *qpatnext; + int err; + size_t oldpathc; + Char *bufnext, c, patbuf[MAXPATHLEN]; + + qpatnext = globtilde(pattern, patbuf, MAXPATHLEN, pglob); + oldpathc = pglob->gl_pathc; + bufnext = patbuf; + + /* We don't need to check for buffer overflow any more. */ + while ((c = *qpatnext++) != EOS) { + switch (c) { + case LBRACKET: + c = *qpatnext; + if (c == NOT) + ++qpatnext; + if (*qpatnext == EOS || + g_strchr(qpatnext+1, RBRACKET) == NULL) { + *bufnext++ = LBRACKET; + if (c == NOT) + --qpatnext; + break; + } + *bufnext++ = M_SET; + if (c == NOT) + *bufnext++ = M_NOT; + c = *qpatnext++; + do { + *bufnext++ = CHAR(c); + if (*qpatnext == RANGE && + (c = qpatnext[1]) != RBRACKET) { + *bufnext++ = M_RNG; + *bufnext++ = CHAR(c); + qpatnext += 2; + } + } while ((c = *qpatnext++) != RBRACKET); + pglob->gl_flags |= GLOB_MAGCHAR; + *bufnext++ = M_END; + break; + case QUESTION: + pglob->gl_flags |= GLOB_MAGCHAR; + *bufnext++ = M_ONE; + break; + case STAR: + pglob->gl_flags |= GLOB_MAGCHAR; + /* collapse adjacent stars to one, + * to avoid exponential behavior + */ + if (bufnext == patbuf || bufnext[-1] != M_ALL) + *bufnext++ = M_ALL; + break; + default: + *bufnext++ = CHAR(c); + break; + } + } + *bufnext = EOS; +#ifdef DEBUG + qprintf("glob0:", patbuf); +#endif + + if ((err = glob1(patbuf, pglob, limit)) != 0) + return(err); + + /* + * If there was no match we are going to append the pattern + * if GLOB_NOCHECK was specified or if GLOB_NOMAGIC was specified + * and the pattern did not contain any magic characters + * GLOB_NOMAGIC is there just for compatibility with csh. + */ + if (pglob->gl_pathc == oldpathc) { + if (((pglob->gl_flags & GLOB_NOCHECK) || + ((pglob->gl_flags & GLOB_NOMAGIC) && + !(pglob->gl_flags & GLOB_MAGCHAR)))) + return(globextend(pattern, pglob, limit)); + else + return(GLOB_NOMATCH); + } + if (!(pglob->gl_flags & GLOB_NOSORT)) + qsort(pglob->gl_pathv + pglob->gl_offs + oldpathc, + pglob->gl_pathc - oldpathc, sizeof(char *), compare); + return(0); +} + +static int +compare(const void *p, const void *q) +{ + return(strcmp(*(char **)p, *(char **)q)); +} + +static int +glob1(Char *pattern, glob_t *pglob, size_t *limit) +{ + Char pathbuf[MAXPATHLEN]; + + /* A null pathname is invalid -- POSIX 1003.1 sect. 2.4. */ + if (*pattern == EOS) + return(0); + return(glob2(pathbuf, pathbuf, pathbuf + MAXPATHLEN - 1, + pattern, pglob, limit)); +} + +/* + * The functions glob2 and glob3 are mutually recursive; there is one level + * of recursion for each segment in the pattern that contains one or more + * meta characters. + */ +static int +glob2(Char *pathbuf, Char *pathend, Char *pathend_last, Char *pattern, + glob_t *pglob, size_t *limit) +{ + struct stat sb; + Char *p, *q; + int anymeta; + + /* + * Loop over pattern segments until end of pattern or until + * segment with meta character found. + */ + for (anymeta = 0;;) { + if (*pattern == EOS) { /* End of pattern? */ + *pathend = EOS; + if (g_lstat(pathbuf, &sb, pglob)) + return(0); + + if (((pglob->gl_flags & GLOB_MARK) && + pathend[-1] != SEP) && (S_ISDIR(sb.st_mode) + || (S_ISLNK(sb.st_mode) && + (g_stat(pathbuf, &sb, pglob) == 0) && + S_ISDIR(sb.st_mode)))) { + if (pathend + 1 > pathend_last) + return (GLOB_ABORTED); + *pathend++ = SEP; + *pathend = EOS; + } + ++pglob->gl_matchc; + return(globextend(pathbuf, pglob, limit)); + } + + /* Find end of next segment, copy tentatively to pathend. */ + q = pathend; + p = pattern; + while (*p != EOS && *p != SEP) { + if (ismeta(*p)) + anymeta = 1; + if (q + 1 > pathend_last) + return (GLOB_ABORTED); + *q++ = *p++; + } + + if (!anymeta) { /* No expansion, do next segment. */ + pathend = q; + pattern = p; + while (*pattern == SEP) { + if (pathend + 1 > pathend_last) + return (GLOB_ABORTED); + *pathend++ = *pattern++; + } + } else /* Need expansion, recurse. */ + return(glob3(pathbuf, pathend, pathend_last, pattern, p, + pglob, limit)); + } + /* NOTREACHED */ +} + +static int +glob3(Char *pathbuf, Char *pathend, Char *pathend_last, + Char *pattern, Char *restpattern, + glob_t *pglob, size_t *limit) +{ + struct dirent *dp; + DIR *dirp; + int err; + char buf[MAXPATHLEN]; + + /* + * The readdirfunc declaration can't be prototyped, because it is + * assigned, below, to two functions which are prototyped in glob.h + * and dirent.h as taking pointers to differently typed opaque + * structures. + */ + struct dirent *(*readdirfunc)(); + + if (pathend > pathend_last) + return (GLOB_ABORTED); + *pathend = EOS; + errno = 0; + + if ((dirp = g_opendir(pathbuf, pglob)) == NULL) { + /* TODO: don't call for ENOENT or ENOTDIR? */ + if (pglob->gl_errfunc) { + if (g_Ctoc(pathbuf, buf, sizeof(buf))) + return (GLOB_ABORTED); + if (pglob->gl_errfunc(buf, errno) || + pglob->gl_flags & GLOB_ERR) + return (GLOB_ABORTED); + } + return(0); + } + + err = 0; + + /* Search directory for matching names. */ + if (pglob->gl_flags & GLOB_ALTDIRFUNC) + readdirfunc = pglob->gl_readdir; + else + readdirfunc = readdir; + while ((dp = (*readdirfunc)(dirp))) { + char *sc; + Char *dc; + wchar_t wc; + size_t clen; + mbstate_t mbs; + + /* Initial DOT must be matched literally. */ + if (dp->d_name[0] == DOT && *pattern != DOT) + continue; + memset(&mbs, 0, sizeof(mbs)); + dc = pathend; + sc = dp->d_name; + while (dc < pathend_last) { + clen = mbrtowc(&wc, sc, MB_LEN_MAX, &mbs); + if (clen == (size_t)-1 || clen == (size_t)-2) { + wc = *sc; + clen = 1; + memset(&mbs, 0, sizeof(mbs)); + } + if ((*dc++ = wc) == EOS) + break; + sc += clen; + } + if (!match(pathend, pattern, restpattern)) { + *pathend = EOS; + continue; + } + err = glob2(pathbuf, --dc, pathend_last, restpattern, + pglob, limit); + if (err) + break; + } + + if (pglob->gl_flags & GLOB_ALTDIRFUNC) + (*pglob->gl_closedir)(dirp); + else + closedir(dirp); + return(err); +} + + +/* + * Extend the gl_pathv member of a glob_t structure to accomodate a new item, + * add the new item, and update gl_pathc. + * + * This assumes the BSD realloc, which only copies the block when its size + * crosses a power-of-two boundary; for v7 realloc, this would cause quadratic + * behavior. + * + * Return 0 if new item added, error code if memory couldn't be allocated. + * + * Invariant of the glob_t structure: + * Either gl_pathc is zero and gl_pathv is NULL; or gl_pathc > 0 and + * gl_pathv points to (gl_offs + gl_pathc + 1) items. + */ +static int +globextend(const Char *path, glob_t *pglob, size_t *limit) +{ + char **pathv; + size_t i, newsize, len; + char *copy; + const Char *p; + + if (*limit && pglob->gl_pathc > *limit) { + errno = 0; + return (GLOB_NOSPACE); + } + + newsize = sizeof(*pathv) * (2 + pglob->gl_pathc + pglob->gl_offs); + pathv = pglob->gl_pathv ? + realloc((char *)pglob->gl_pathv, newsize) : + malloc(newsize); + if (pathv == NULL) { + if (pglob->gl_pathv) { + free(pglob->gl_pathv); + pglob->gl_pathv = NULL; + } + return(GLOB_NOSPACE); + } + + if (pglob->gl_pathv == NULL && pglob->gl_offs > 0) { + /* first time around -- clear initial gl_offs items */ + pathv += pglob->gl_offs; + for (i = pglob->gl_offs + 1; --i > 0; ) + *--pathv = NULL; + } + pglob->gl_pathv = pathv; + + for (p = path; *p++;) + continue; + len = MB_CUR_MAX * (size_t)(p - path); /* XXX overallocation */ + if ((copy = malloc(len)) != NULL) { + if (g_Ctoc(path, copy, len)) { + free(copy); + return (GLOB_NOSPACE); + } + pathv[pglob->gl_offs + pglob->gl_pathc++] = copy; + } + pathv[pglob->gl_offs + pglob->gl_pathc] = NULL; + return(copy == NULL ? GLOB_NOSPACE : 0); +} + +/* + * pattern matching function for filenames. Each occurrence of the * + * pattern causes a recursion level. + */ +static int +match(Char *name, Char *pat, Char *patend) +{ + int ok, negate_range; + Char c, k; + + while (pat < patend) { + c = *pat++; + switch (c & M_MASK) { + case M_ALL: + if (pat == patend) + return(1); + do + if (match(name, pat, patend)) + return(1); + while (*name++ != EOS); + return(0); + case M_ONE: + if (*name++ == EOS) + return(0); + break; + case M_SET: + ok = 0; + if ((k = *name++) == EOS) + return(0); + if ((negate_range = ((*pat & M_MASK) == M_NOT)) != EOS) + ++pat; + while (((c = *pat++) & M_MASK) != M_END) + if ((*pat & M_MASK) == M_RNG) { + if (CHAR(c) <= CHAR(k) && CHAR(k) <= CHAR(pat[1])) ok = 1; + pat += 2; + } else if (c == k) + ok = 1; + if (ok == negate_range) + return(0); + break; + default: + if (*name++ != c) + return(0); + break; + } + } + return(*name == EOS); +} + +/* Free allocated data belonging to a glob_t structure. */ +void +globfree(glob_t *pglob) +{ + size_t i; + char **pp; + + if (pglob->gl_pathv != NULL) { + pp = pglob->gl_pathv + pglob->gl_offs; + for (i = pglob->gl_pathc; i--; ++pp) + if (*pp) + free(*pp); + free(pglob->gl_pathv); + pglob->gl_pathv = NULL; + } +} + +static DIR * +g_opendir(Char *str, glob_t *pglob) +{ + char buf[MAXPATHLEN]; + + if (!*str) + strcpy(buf, "."); + else { + if (g_Ctoc(str, buf, sizeof(buf))) + return (NULL); + } + + if (pglob->gl_flags & GLOB_ALTDIRFUNC) + return((*pglob->gl_opendir)(buf)); + + return(opendir(buf)); +} + +static int +g_lstat(Char *fn, struct stat *sb, glob_t *pglob) +{ + char buf[MAXPATHLEN]; + + if (g_Ctoc(fn, buf, sizeof(buf))) { + errno = ENAMETOOLONG; + return (-1); + } + if (pglob->gl_flags & GLOB_ALTDIRFUNC) + return((*pglob->gl_lstat)(buf, sb)); + return(lstat(buf, sb)); +} + +static int +g_stat(Char *fn, struct stat *sb, glob_t *pglob) +{ + char buf[MAXPATHLEN]; + + if (g_Ctoc(fn, buf, sizeof(buf))) { + errno = ENAMETOOLONG; + return (-1); + } + if (pglob->gl_flags & GLOB_ALTDIRFUNC) + return((*pglob->gl_stat)(buf, sb)); + return(stat(buf, sb)); +} + +static const Char * +g_strchr(const Char *str, wchar_t ch) +{ + + do { + if (*str == ch) + return (str); + } while (*str++); + return (NULL); +} + +static int +g_Ctoc(const Char *str, char *buf, size_t len) +{ + mbstate_t mbs; + size_t clen; + + memset(&mbs, 0, sizeof(mbs)); + while (len >= MB_CUR_MAX) { + clen = wcrtomb(buf, *str, &mbs); + if (clen == (size_t)-1) + return (1); + if (*str == L'\0') + return (0); + str++; + buf += clen; + len -= clen; + } + return (1); +} + +#ifdef DEBUG +static void +qprintf(const char *str, Char *s) +{ + Char *p; + + (void)printf("%s:\n", str); + for (p = s; *p; p++) + (void)printf("%c", CHAR(*p)); + (void)printf("\n"); + for (p = s; *p; p++) + (void)printf("%c", *p & M_PROTECT ? '"' : ' '); + (void)printf("\n"); + for (p = s; *p; p++) + (void)printf("%c", ismeta(*p) ? '_' : ' '); + (void)printf("\n"); +} +#endif diff --git a/p4a/pythonforandroidold/recipes/libxml2/glob.h b/p4a/pythonforandroidold/recipes/libxml2/glob.h new file mode 100644 index 0000000..351b6c4 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libxml2/glob.h @@ -0,0 +1,105 @@ +/* + * Copyright (c) 1989, 1993 + * The Regents of the University of California. All rights reserved. + * + * This code is derived from software contributed to Berkeley by + * Guido van Rossum. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. Neither the name of the University nor the names of its contributors + * may be used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + * + * @(#)glob.h 8.1 (Berkeley) 6/2/93 + * $FreeBSD$ + */ + +#ifndef _GLOB_H_ +#define _GLOB_H_ + +#include +#include + +#ifndef _SIZE_T_DECLARED +typedef __size_t size_t; +#define _SIZE_T_DECLARED +#endif + +struct stat; +typedef struct { + size_t gl_pathc; /* Count of total paths so far. */ + size_t gl_matchc; /* Count of paths matching pattern. */ + size_t gl_offs; /* Reserved at beginning of gl_pathv. */ + int gl_flags; /* Copy of flags parameter to glob. */ + char **gl_pathv; /* List of paths matching pattern. */ + /* Copy of errfunc parameter to glob. */ + int (*gl_errfunc)(const char *, int); + + /* + * Alternate filesystem access methods for glob; replacement + * versions of closedir(3), readdir(3), opendir(3), stat(2) + * and lstat(2). + */ + void (*gl_closedir)(void *); + struct dirent *(*gl_readdir)(void *); + void *(*gl_opendir)(const char *); + int (*gl_lstat)(const char *, struct stat *); + int (*gl_stat)(const char *, struct stat *); +} glob_t; + +#if __POSIX_VISIBLE >= 199209 +/* Believed to have been introduced in 1003.2-1992 */ +#define GLOB_APPEND 0x0001 /* Append to output from previous call. */ +#define GLOB_DOOFFS 0x0002 /* Use gl_offs. */ +#define GLOB_ERR 0x0004 /* Return on error. */ +#define GLOB_MARK 0x0008 /* Append / to matching directories. */ +#define GLOB_NOCHECK 0x0010 /* Return pattern itself if nothing matches. */ +#define GLOB_NOSORT 0x0020 /* Don't sort. */ +#define GLOB_NOESCAPE 0x2000 /* Disable backslash escaping. */ + +/* Error values returned by glob(3) */ +#define GLOB_NOSPACE (-1) /* Malloc call failed. */ +#define GLOB_ABORTED (-2) /* Unignored error. */ +#define GLOB_NOMATCH (-3) /* No match and GLOB_NOCHECK was not set. */ +#define GLOB_NOSYS (-4) /* Obsolete: source comptability only. */ +#endif /* __POSIX_VISIBLE >= 199209 */ + +#if __BSD_VISIBLE +#define GLOB_ALTDIRFUNC 0x0040 /* Use alternately specified directory funcs. */ +#define GLOB_BRACE 0x0080 /* Expand braces ala csh. */ +#define GLOB_MAGCHAR 0x0100 /* Pattern had globbing characters. */ +#define GLOB_NOMAGIC 0x0200 /* GLOB_NOCHECK without magic chars (csh). */ +#define GLOB_QUOTE 0x0400 /* Quote special chars with \. */ +#define GLOB_TILDE 0x0800 /* Expand tilde names from the passwd file. */ +#define GLOB_LIMIT 0x1000 /* limit number of returned paths */ + +/* source compatibility, these are the old names */ +#define GLOB_MAXPATH GLOB_LIMIT +#define GLOB_ABEND GLOB_ABORTED +#endif /* __BSD_VISIBLE */ + +__BEGIN_DECLS +int glob(const char *, int, int (*)(const char *, int), glob_t *); +void globfree(glob_t *); +__END_DECLS + +#endif /* !_GLOB_H_ */ diff --git a/p4a/pythonforandroidold/recipes/libxslt/__init__.py b/p4a/pythonforandroidold/recipes/libxslt/__init__.py new file mode 100644 index 0000000..076d6cc --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libxslt/__init__.py @@ -0,0 +1,74 @@ +from pythonforandroid.recipe import Recipe +from pythonforandroid.toolchain import shprint, shutil, current_directory +from os.path import exists, join +import sh + + +class LibxsltRecipe(Recipe): + version = '1.1.32' + url = 'http://xmlsoft.org/sources/libxslt-{version}.tar.gz' + depends = ['libxml2'] + patches = ['fix-dlopen.patch'] + + call_hostpython_via_targetpython = False + + def should_build(self, arch): + return not exists( + join(self.get_build_dir(arch.arch), + 'libxslt', '.libs', 'libxslt.a')) + + def build_arch(self, arch): + super(LibxsltRecipe, self).build_arch(arch) + env = self.get_recipe_env(arch) + build_dir = self.get_build_dir(arch.arch) + with current_directory(build_dir): + # If the build is done with /bin/sh things blow up, + # try really hard to use bash + libxml2_recipe = Recipe.get_recipe('libxml2', self.ctx) + libxml2_build_dir = libxml2_recipe.get_build_dir(arch.arch) + build_arch = shprint(sh.gcc, '-dumpmachine').stdout.decode( + 'utf-8').split('\n')[0] + + if not exists('configure'): + shprint(sh.Command('./autogen.sh'), _env=env) + shprint(sh.Command('autoreconf'), '-vif', _env=env) + shprint(sh.Command('./configure'), + '--build=' + build_arch, + '--host=' + arch.command_prefix, + '--target=' + arch.command_prefix, + '--without-plugins', + '--without-debug', + '--without-python', + '--without-crypto', + '--with-libxml-src=' + libxml2_build_dir, + '--disable-shared', + _env=env) + shprint(sh.make, "V=1", _env=env) + + shutil.copyfile('libxslt/.libs/libxslt.a', + join(self.ctx.libs_dir, 'libxslt.a')) + shutil.copyfile('libexslt/.libs/libexslt.a', + join(self.ctx.libs_dir, 'libexslt.a')) + + def get_recipe_env(self, arch): + env = super(LibxsltRecipe, self).get_recipe_env(arch) + env['CONFIG_SHELL'] = '/bin/bash' + env['SHELL'] = '/bin/bash' + + libxml2_recipe = Recipe.get_recipe('libxml2', self.ctx) + libxml2_build_dir = libxml2_recipe.get_build_dir(arch.arch) + libxml2_libs_dir = join(libxml2_build_dir, '.libs') + + env['CFLAGS'] = ' '.join([ + env['CFLAGS'], + '-I' + libxml2_build_dir, + '-I' + join(libxml2_build_dir, 'include', 'libxml'), + '-I' + self.get_build_dir(arch.arch), + ]) + env['LDFLAGS'] += ' -L' + libxml2_libs_dir + env['LIBS'] = '-lxml2 -lz -lm' + + return env + + +recipe = LibxsltRecipe() diff --git a/p4a/pythonforandroidold/recipes/libxslt/fix-dlopen.patch b/p4a/pythonforandroidold/recipes/libxslt/fix-dlopen.patch new file mode 100644 index 0000000..34d56b6 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libxslt/fix-dlopen.patch @@ -0,0 +1,11 @@ +--- libxslt-1.1.27.orig/python/libxsl.py 2012-09-04 16:26:23.000000000 +0200 ++++ libxslt-1.1.27/python/libxsl.py 2013-07-29 15:11:04.182227378 +0200 +@@ -4,7 +4,7 @@ + # loader to work in that mode if feasible + # + import sys +-if not hasattr(sys,'getdlopenflags'): ++if True: + import libxml2mod + import libxsltmod + import libxml2 diff --git a/p4a/pythonforandroidold/recipes/libzbar/__init__.py b/p4a/pythonforandroidold/recipes/libzbar/__init__.py new file mode 100644 index 0000000..43ae34c --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libzbar/__init__.py @@ -0,0 +1,57 @@ +import os +from pythonforandroid.toolchain import shprint, current_directory +from pythonforandroid.recipe import Recipe +from multiprocessing import cpu_count +import sh + + +class LibZBarRecipe(Recipe): + + version = '0.10' + + url = 'https://github.com/ZBar/ZBar/archive/{version}.zip' + + depends = ['libiconv'] + + patches = ["werror.patch"] + + def should_build(self, arch): + return not os.path.exists( + os.path.join(self.ctx.get_libs_dir(arch.arch), 'libzbar.so')) + + def get_recipe_env(self, arch=None, with_flags_in_cc=True): + env = super(LibZBarRecipe, self).get_recipe_env(arch, with_flags_in_cc) + libiconv = self.get_recipe('libiconv', self.ctx) + libiconv_dir = libiconv.get_build_dir(arch.arch) + env['CFLAGS'] += ' -I' + os.path.join(libiconv_dir, 'include') + env['LIBS'] = env.get('LIBS', '') + ' -landroid -liconv' + return env + + def build_arch(self, arch): + super(LibZBarRecipe, self).build_arch(arch) + env = self.get_recipe_env(arch) + with current_directory(self.get_build_dir(arch.arch)): + shprint(sh.Command('autoreconf'), '-vif', _env=env) + shprint( + sh.Command('./configure'), + '--host=' + arch.toolchain_prefix, + '--target=' + arch.toolchain_prefix, + '--prefix=' + self.ctx.get_python_install_dir(), + # Python bindings are compiled in a separated recipe + '--with-python=no', + '--with-gtk=no', + '--with-qt=no', + '--with-x=no', + '--with-jpeg=no', + '--with-imagemagick=no', + '--enable-pthread=no', + '--enable-video=no', + '--enable-shared=yes', + '--enable-static=no', + _env=env) + shprint(sh.make, '-j' + str(cpu_count()), _env=env) + libs = ['zbar/.libs/libzbar.so'] + self.install_libs(arch, *libs) + + +recipe = LibZBarRecipe() diff --git a/p4a/pythonforandroidold/recipes/libzbar/werror.patch b/p4a/pythonforandroidold/recipes/libzbar/werror.patch new file mode 100644 index 0000000..9fe5d36 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libzbar/werror.patch @@ -0,0 +1,13 @@ +diff --git a/configure.ac b/configure.ac +index 256aedb..727caba 100644 +--- a/configure.ac ++++ b/configure.ac +@@ -3,7 +3,7 @@ AC_PREREQ([2.61]) + AC_INIT([zbar], [0.10], [spadix@users.sourceforge.net]) + AC_CONFIG_AUX_DIR(config) + AC_CONFIG_MACRO_DIR(config) +-AM_INIT_AUTOMAKE([1.10 -Wall -Werror foreign subdir-objects std-options dist-bzip2]) ++AM_INIT_AUTOMAKE([1.10 -Wall foreign subdir-objects std-options dist-bzip2]) + AC_CONFIG_HEADERS([include/config.h]) + AC_CONFIG_SRCDIR(zbar/scanner.c) + LT_PREREQ([2.2]) diff --git a/p4a/pythonforandroidold/recipes/libzmq/__init__.py b/p4a/pythonforandroidold/recipes/libzmq/__init__.py new file mode 100644 index 0000000..b33f3ac --- /dev/null +++ b/p4a/pythonforandroidold/recipes/libzmq/__init__.py @@ -0,0 +1,75 @@ +from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory +from pythonforandroid.util import ensure_dir +from os.path import exists, join +import sh + + +class LibZMQRecipe(Recipe): + version = '4.1.4' + url = 'http://download.zeromq.org/zeromq-{version}.tar.gz' + depends = [] + + def should_build(self, arch): + super(LibZMQRecipe, self).should_build(arch) + return True + return not exists(join(self.ctx.get_libs_dir(arch.arch), 'libzmq.so')) + + def build_arch(self, arch): + super(LibZMQRecipe, self).build_arch(arch) + env = self.get_recipe_env(arch) + # + # libsodium_recipe = Recipe.get_recipe('libsodium', self.ctx) + # libsodium_dir = libsodium_recipe.get_build_dir(arch.arch) + # env['sodium_CFLAGS'] = '-I{}'.format(join( + # libsodium_dir, 'src')) + # env['sodium_LDLAGS'] = '-L{}'.format(join( + # libsodium_dir, 'src', 'libsodium', '.libs')) + + curdir = self.get_build_dir(arch.arch) + prefix = join(curdir, "install") + with current_directory(curdir): + bash = sh.Command('sh') + shprint( + bash, './configure', + '--host=arm-linux-androideabi', + '--without-documentation', + '--prefix={}'.format(prefix), + '--with-libsodium=no', + _env=env) + shprint(sh.make, _env=env) + shprint(sh.make, 'install', _env=env) + shutil.copyfile('.libs/libzmq.so', join( + self.ctx.get_libs_dir(arch.arch), 'libzmq.so')) + + bootstrap_obj_dir = join(self.ctx.bootstrap.build_dir, 'obj', 'local', arch.arch) + ensure_dir(bootstrap_obj_dir) + shutil.copyfile( + '{}/sources/cxx-stl/gnu-libstdc++/{}/libs/{}/libgnustl_shared.so'.format( + self.ctx.ndk_dir, self.ctx.toolchain_version, arch), + join(bootstrap_obj_dir, 'libgnustl_shared.so')) + + # Copy libgnustl_shared.so + with current_directory(self.get_build_dir(arch.arch)): + sh.cp( + "{ctx.ndk_dir}/sources/cxx-stl/gnu-libstdc++/{ctx.toolchain_version}/libs/{arch.arch}/libgnustl_shared.so".format(ctx=self.ctx, arch=arch), + self.ctx.get_libs_dir(arch.arch) + ) + + def get_recipe_env(self, arch): + # XXX should stl be configuration for the toolchain itself? + env = super(LibZMQRecipe, self).get_recipe_env(arch) + env['CFLAGS'] += ' -Os' + env['CXXFLAGS'] += ' -Os -fPIC -fvisibility=default' + env['CXXFLAGS'] += ' -I{}/sources/cxx-stl/gnu-libstdc++/{}/include'.format( + self.ctx.ndk_dir, self.ctx.toolchain_version) + env['CXXFLAGS'] += ' -I{}/sources/cxx-stl/gnu-libstdc++/{}/libs/{}/include'.format( + self.ctx.ndk_dir, self.ctx.toolchain_version, arch) + env['CXXFLAGS'] += ' -L{}/sources/cxx-stl/gnu-libstdc++/{}/libs/{}'.format( + self.ctx.ndk_dir, self.ctx.toolchain_version, arch) + env['CXXFLAGS'] += ' -lgnustl_shared' + env['LDFLAGS'] += ' -L{}/sources/cxx-stl/gnu-libstdc++/{}/libs/{}'.format( + self.ctx.ndk_dir, self.ctx.toolchain_version, arch) + return env + + +recipe = LibZMQRecipe() diff --git a/p4a/pythonforandroidold/recipes/lxml/__init__.py b/p4a/pythonforandroidold/recipes/lxml/__init__.py new file mode 100644 index 0000000..6d4b91c --- /dev/null +++ b/p4a/pythonforandroidold/recipes/lxml/__init__.py @@ -0,0 +1,66 @@ +from pythonforandroid.recipe import Recipe, CompiledComponentsPythonRecipe +from os.path import exists, join +from os import uname + + +class LXMLRecipe(CompiledComponentsPythonRecipe): + version = '4.2.5' + url = 'https://pypi.python.org/packages/source/l/lxml/lxml-{version}.tar.gz' # noqa + depends = ['librt', 'libxml2', 'libxslt', 'setuptools'] + name = 'lxml' + + call_hostpython_via_targetpython = False # Due to setuptools + + def should_build(self, arch): + super(LXMLRecipe, self).should_build(arch) + + py_ver = self.ctx.python_recipe.major_minor_version_string + build_platform = '{system}-{machine}'.format( + system=uname()[0], machine=uname()[-1]).lower() + build_dir = join(self.get_build_dir(arch.arch), 'build', + 'lib.' + build_platform + '-' + py_ver, 'lxml') + py_libs = ['_elementpath.so', 'builder.so', 'etree.so', 'objectify.so'] + + return not all([exists(join(build_dir, lib)) for lib in py_libs]) + + def get_recipe_env(self, arch): + env = super(LXMLRecipe, self).get_recipe_env(arch) + + # libxslt flags + libxslt_recipe = Recipe.get_recipe('libxslt', self.ctx) + libxslt_build_dir = libxslt_recipe.get_build_dir(arch.arch) + + cflags = ' -I' + libxslt_build_dir + cflags += ' -I' + join(libxslt_build_dir, 'libxslt') + cflags += ' -I' + join(libxslt_build_dir, 'libexslt') + + env['LDFLAGS'] += ' -L' + join(libxslt_build_dir, 'libxslt', '.libs') + env['LDFLAGS'] += ' -L' + join(libxslt_build_dir, 'libexslt', '.libs') + env['LIBS'] = '-lxslt -lexslt' + + # libxml2 flags + libxml2_recipe = Recipe.get_recipe('libxml2', self.ctx) + libxml2_build_dir = libxml2_recipe.get_build_dir(arch.arch) + libxml2_libs_dir = join(libxml2_build_dir, '.libs') + + cflags += ' -I' + libxml2_build_dir + cflags += ' -I' + join(libxml2_build_dir, 'include') + cflags += ' -I' + join(libxml2_build_dir, 'include', 'libxml') + cflags += ' -I' + self.get_build_dir(arch.arch) + env['LDFLAGS'] += ' -L' + libxml2_libs_dir + env['LIBS'] += ' -lxml2' + + # android's ndk flags + ndk_lib_dir = join(self.ctx.ndk_platform, 'usr', 'lib') + ndk_include_dir = join(self.ctx.ndk_dir, 'sysroot', 'usr', 'include') + cflags += ' -I' + ndk_include_dir + env['LDFLAGS'] += ' -L' + ndk_lib_dir + env['LIBS'] += ' -lz -lm -lc' + + if cflags not in env['CFLAGS']: + env['CFLAGS'] += cflags + + return env + + +recipe = LXMLRecipe() diff --git a/p4a/pythonforandroidold/recipes/m2crypto/__init__.py b/p4a/pythonforandroidold/recipes/m2crypto/__init__.py new file mode 100644 index 0000000..653eeca --- /dev/null +++ b/p4a/pythonforandroidold/recipes/m2crypto/__init__.py @@ -0,0 +1,40 @@ +from pythonforandroid.recipe import CompiledComponentsPythonRecipe +from pythonforandroid.toolchain import current_directory +from pythonforandroid.logger import shprint, info +import glob +import sh + + +class M2CryptoRecipe(CompiledComponentsPythonRecipe): + version = '0.30.1' + url = 'https://pypi.python.org/packages/source/M/M2Crypto/M2Crypto-{version}.tar.gz' + depends = ['openssl', 'setuptools'] + site_packages_name = 'M2Crypto' + call_hostpython_via_targetpython = False + + def build_compiled_components(self, arch): + info('Building compiled components in {}'.format(self.name)) + + env = self.get_recipe_env(arch) + with current_directory(self.get_build_dir(arch.arch)): + # Build M2Crypto + hostpython = sh.Command(self.hostpython_location) + if self.install_in_hostpython: + shprint(hostpython, 'setup.py', 'clean', '--all', _env=env) + shprint(hostpython, 'setup.py', self.build_cmd, + '-p' + arch.arch, + '-c' + 'unix', + '-o' + env['OPENSSL_BUILD_PATH'], + '-L' + env['OPENSSL_BUILD_PATH'], + _env=env, *self.setup_extra_args) + build_dir = glob.glob('build/lib.*')[0] + shprint(sh.find, build_dir, '-name', '"*.o"', '-exec', + env['STRIP'], '{}', ';', _env=env) + + def get_recipe_env(self, arch): + env = super(M2CryptoRecipe, self).get_recipe_env(arch) + env['OPENSSL_BUILD_PATH'] = self.get_recipe('openssl', self.ctx).get_build_dir(arch.arch) + return env + + +recipe = M2CryptoRecipe() diff --git a/p4a/pythonforandroidold/recipes/msgpack-python/__init__.py b/p4a/pythonforandroidold/recipes/msgpack-python/__init__.py new file mode 100644 index 0000000..cdd024b --- /dev/null +++ b/p4a/pythonforandroidold/recipes/msgpack-python/__init__.py @@ -0,0 +1,11 @@ +from pythonforandroid.recipe import CythonRecipe + + +class MsgPackRecipe(CythonRecipe): + version = '0.4.7' + url = 'https://pypi.python.org/packages/source/m/msgpack-python/msgpack-python-{version}.tar.gz' + depends = ["setuptools"] + call_hostpython_via_targetpython = False + + +recipe = MsgPackRecipe() diff --git a/p4a/pythonforandroidold/recipes/mysqldb/__init__.py b/p4a/pythonforandroidold/recipes/mysqldb/__init__.py new file mode 100644 index 0000000..f084585 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/mysqldb/__init__.py @@ -0,0 +1,52 @@ +from pythonforandroid.recipe import CompiledComponentsPythonRecipe +from os.path import join + + +class MysqldbRecipe(CompiledComponentsPythonRecipe): + name = 'mysqldb' + version = '1.2.5' + url = 'https://pypi.python.org/packages/source/M/MySQL-python/MySQL-python-{version}.zip' + site_packages_name = 'MySQLdb' + + depends = ['setuptools', 'libmysqlclient'] + + patches = ['override-mysql-config.patch', + 'disable-zip.patch'] + + # call_hostpython_via_targetpython = False + + def convert_newlines(self, filename): + print('converting newlines in {}'.format(filename)) + with open(filename, 'rb') as f: + data = f.read() + with open(filename, 'wb') as f: + f.write(data.replace(b'\r\n', b'\n').replace(b'\r', b'\n')) + + def prebuild_arch(self, arch): + super(MysqldbRecipe, self).prebuild_arch(arch) + setupbase = join(self.get_build_dir(arch.arch), 'setup') + self.convert_newlines(setupbase + '.py') + self.convert_newlines(setupbase + '_posix.py') + + def get_recipe_env(self, arch=None): + env = super(MysqldbRecipe, self).get_recipe_env(arch) + + hostpython = self.get_recipe('hostpython2', self.ctx) + # TODO: fix hardcoded path + env['PYTHONPATH'] = (join(hostpython.get_build_dir(arch.arch), + 'build', 'lib.linux-x86_64-2.7') + + ':' + env.get('PYTHONPATH', '')) + + libmysql = self.get_recipe('libmysqlclient', self.ctx) + mydir = join(libmysql.get_build_dir(arch.arch), 'libmysqlclient') + # env['CFLAGS'] += ' -I' + join(mydir, 'include') + # env['LDFLAGS'] += ' -L' + join(mydir) + libdir = self.ctx.get_libs_dir(arch.arch) + env['MYSQL_libs'] = env['MYSQL_libs_r'] = '-L' + libdir + ' -lmysql' + env['MYSQL_cflags'] = env['MYSQL_include'] = '-I' + join(mydir, + 'include') + + return env + + +recipe = MysqldbRecipe() diff --git a/p4a/pythonforandroidold/recipes/mysqldb/disable-zip.patch b/p4a/pythonforandroidold/recipes/mysqldb/disable-zip.patch new file mode 100644 index 0000000..51f804e --- /dev/null +++ b/p4a/pythonforandroidold/recipes/mysqldb/disable-zip.patch @@ -0,0 +1,8 @@ +--- mysqldb/setup.py 2014-01-02 13:52:50.000000000 -0600 ++++ b/setup.py 2016-01-13 15:48:36.781216443 -0600 +@@ -18,4 +18,5 @@ + metadata['ext_modules'] = [ + setuptools.Extension(sources=['_mysql.c'], **options)] + metadata['long_description'] = metadata['long_description'].replace(r'\n', '') ++metadata['zip_safe'] = False + setuptools.setup(**metadata) diff --git a/p4a/pythonforandroidold/recipes/mysqldb/override-mysql-config.patch b/p4a/pythonforandroidold/recipes/mysqldb/override-mysql-config.patch new file mode 100644 index 0000000..195ebda --- /dev/null +++ b/p4a/pythonforandroidold/recipes/mysqldb/override-mysql-config.patch @@ -0,0 +1,21 @@ +--- mysqldb/setup_posix.py 2014-01-02 13:52:50.000000000 -0600 ++++ b/setup_posix.py 2016-01-13 15:48:18.732883429 -0600 +@@ -13,17 +13,7 @@ + return "-%s" % f + + def mysql_config(what): +- from os import popen +- +- f = popen("%s --%s" % (mysql_config.path, what)) +- data = f.read().strip().split() +- ret = f.close() +- if ret: +- if ret/256: +- data = [] +- if ret/256 > 1: +- raise EnvironmentError("%s not found" % (mysql_config.path,)) +- return data ++ return os.environ['MYSQL_' + what.replace('-', '_')].strip().split() + mysql_config.path = "mysql_config" + + def get_config(): diff --git a/p4a/pythonforandroidold/recipes/ndghttpsclient b/p4a/pythonforandroidold/recipes/ndghttpsclient new file mode 100644 index 0000000..35e996f --- /dev/null +++ b/p4a/pythonforandroidold/recipes/ndghttpsclient @@ -0,0 +1,9 @@ +from pythonforandroid.recipe import PythonRecipe + +class NdgHttpsClientRecipe(PythonRecipe): + version = '0.4.0' + url = 'https://pypi.python.org/packages/source/n/ndg-httpsclient/ndg_httpsclient-{version}.tar.gz' + depends = ['python2', 'pyopenssl', 'cryptography'] + call_hostpython_via_targetpython = False + +recipe = NdgHttpsClientRecipe() diff --git a/p4a/pythonforandroidold/recipes/netifaces/__init__.py b/p4a/pythonforandroidold/recipes/netifaces/__init__.py new file mode 100644 index 0000000..8ad1382 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/netifaces/__init__.py @@ -0,0 +1,19 @@ +from pythonforandroid.recipe import CompiledComponentsPythonRecipe + + +class NetifacesRecipe(CompiledComponentsPythonRecipe): + + version = '0.10.9' + + url = 'https://files.pythonhosted.org/packages/source/n/netifaces/netifaces-{version}.tar.gz' + + depends = ['setuptools'] + + patches = ['fix-build.patch'] + + site_packages_name = 'netifaces' + + call_hostpython_via_targetpython = False + + +recipe = NetifacesRecipe() diff --git a/p4a/pythonforandroidold/recipes/netifaces/fix-build.patch b/p4a/pythonforandroidold/recipes/netifaces/fix-build.patch new file mode 100644 index 0000000..3404c4f --- /dev/null +++ b/p4a/pythonforandroidold/recipes/netifaces/fix-build.patch @@ -0,0 +1,11 @@ +--- netifaces/setup.py.orig 2018-05-02 09:45:09.000000000 +0200 ++++ netifaces/setup.py 2018-12-11 14:12:02.785808692 +0100 +@@ -55,7 +55,7 @@ + self.check_requirements() + build_ext.build_extensions(self) + +- def test_build(self, contents, link=True, execute=False, libraries=None, ++ def test_build(self, contents, link=False, execute=False, libraries=None, + include_dirs=None, library_dirs=None): + name = os.path.join(self.build_temp, 'conftest-%s.c' % self.conftestidx) + self.conftestidx += 1 diff --git a/p4a/pythonforandroidold/recipes/numpy/__init__.py b/p4a/pythonforandroidold/recipes/numpy/__init__.py new file mode 100644 index 0000000..6b6e6b3 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/numpy/__init__.py @@ -0,0 +1,58 @@ +from pythonforandroid.recipe import CompiledComponentsPythonRecipe +from multiprocessing import cpu_count +from os.path import join + + +class NumpyRecipe(CompiledComponentsPythonRecipe): + + version = '1.15.1' + url = 'https://pypi.python.org/packages/source/n/numpy/numpy-{version}.zip' + site_packages_name = 'numpy' + depends = [('python2', 'python3', 'python3crystax')] + + patches = [ + join('patches', 'fix-numpy.patch'), + join('patches', 'prevent_libs_check.patch'), + join('patches', 'ar.patch'), + join('patches', 'lib.patch'), + join('patches', 'python-fixes.patch') + ] + + def build_compiled_components(self, arch): + self.setup_extra_args = ['-j', str(cpu_count())] + super(NumpyRecipe, self).build_compiled_components(arch) + self.setup_extra_args = [] + + def rebuild_compiled_components(self, arch, env): + self.setup_extra_args = ['-j', str(cpu_count())] + super(NumpyRecipe, self).rebuild_compiled_components(arch, env) + self.setup_extra_args = [] + + def get_recipe_env(self, arch): + env = super(NumpyRecipe, self).get_recipe_env(arch) + + flags = " -L{} --sysroot={}".format( + join(self.ctx.ndk_platform, 'usr', 'lib'), + self.ctx.ndk_platform + ) + + py_ver = self.ctx.python_recipe.major_minor_version_string + py_inc_dir = self.ctx.python_recipe.include_root(arch.arch) + py_lib_dir = self.ctx.python_recipe.link_root(arch.arch) + if self.ctx.ndk == 'crystax': + src_dir = join(self.ctx.ndk_dir, 'sources') + flags += " -I{}".format(join(src_dir, 'crystax', 'include')) + flags += " -L{}".format(join(src_dir, 'crystax', 'libs', arch.arch)) + flags += ' -I{}'.format(py_inc_dir) + flags += ' -L{} -lpython{}'.format(py_lib_dir, py_ver) + if 'python3' in self.ctx.python_recipe.name: + flags += 'm' + + if flags not in env['CC']: + env['CC'] += flags + if flags not in env['LD']: + env['LD'] += flags + ' -shared' + return env + + +recipe = NumpyRecipe() diff --git a/p4a/pythonforandroid/recipes/numpy/patches/ar.patch b/p4a/pythonforandroidold/recipes/numpy/patches/ar.patch similarity index 100% rename from p4a/pythonforandroid/recipes/numpy/patches/ar.patch rename to p4a/pythonforandroidold/recipes/numpy/patches/ar.patch diff --git a/p4a/pythonforandroid/recipes/numpy/patches/fix-numpy.patch b/p4a/pythonforandroidold/recipes/numpy/patches/fix-numpy.patch similarity index 100% rename from p4a/pythonforandroid/recipes/numpy/patches/fix-numpy.patch rename to p4a/pythonforandroidold/recipes/numpy/patches/fix-numpy.patch diff --git a/p4a/pythonforandroid/recipes/numpy/patches/lib.patch b/p4a/pythonforandroidold/recipes/numpy/patches/lib.patch similarity index 100% rename from p4a/pythonforandroid/recipes/numpy/patches/lib.patch rename to p4a/pythonforandroidold/recipes/numpy/patches/lib.patch diff --git a/p4a/pythonforandroid/recipes/numpy/patches/prevent_libs_check.patch b/p4a/pythonforandroidold/recipes/numpy/patches/prevent_libs_check.patch similarity index 100% rename from p4a/pythonforandroid/recipes/numpy/patches/prevent_libs_check.patch rename to p4a/pythonforandroidold/recipes/numpy/patches/prevent_libs_check.patch diff --git a/p4a/pythonforandroid/recipes/numpy/patches/python-fixes.patch b/p4a/pythonforandroidold/recipes/numpy/patches/python-fixes.patch similarity index 100% rename from p4a/pythonforandroid/recipes/numpy/patches/python-fixes.patch rename to p4a/pythonforandroidold/recipes/numpy/patches/python-fixes.patch diff --git a/p4a/pythonforandroidold/recipes/omemo-backend-signal/__init__.py b/p4a/pythonforandroidold/recipes/omemo-backend-signal/__init__.py new file mode 100644 index 0000000..c87034c --- /dev/null +++ b/p4a/pythonforandroidold/recipes/omemo-backend-signal/__init__.py @@ -0,0 +1,22 @@ +from pythonforandroid.recipe import PythonRecipe + + +class OmemoBackendSignalRecipe(PythonRecipe): + name = 'omemo-backend-signal' + version = '0.2.2' + url = 'https://pypi.python.org/packages/source/o/omemo-backend-signal/omemo-backend-signal-{version}.tar.gz' + site_packages_name = 'omemo-backend-signal' + depends = [ + 'setuptools', + 'protobuf_cpp', + 'x3dh', + 'DoubleRatchet', + 'hkdf==0.0.3', + 'cryptography', + 'omemo', + ] + patches = ['wireformat.patch'] + call_hostpython_via_targetpython = False + + +recipe = OmemoBackendSignalRecipe() diff --git a/p4a/pythonforandroid/recipes/omemo-backend-signal/wireformat.patch b/p4a/pythonforandroidold/recipes/omemo-backend-signal/wireformat.patch similarity index 100% rename from p4a/pythonforandroid/recipes/omemo-backend-signal/wireformat.patch rename to p4a/pythonforandroidold/recipes/omemo-backend-signal/wireformat.patch diff --git a/p4a/pythonforandroidold/recipes/omemo/__init__.py b/p4a/pythonforandroidold/recipes/omemo/__init__.py new file mode 100644 index 0000000..a940105 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/omemo/__init__.py @@ -0,0 +1,17 @@ +from pythonforandroid.recipe import PythonRecipe + + +class OmemoRecipe(PythonRecipe): + name = 'omemo' + version = '0.10.3' + url = 'https://pypi.python.org/packages/source/O/OMEMO/OMEMO-{version}.tar.gz' + site_packages_name = 'omemo' + depends = [ + 'setuptools', + 'x3dh', + 'cryptography', + ] + call_hostpython_via_targetpython = False + + +recipe = OmemoRecipe() diff --git a/p4a/pythonforandroidold/recipes/openal/__init__.py b/p4a/pythonforandroidold/recipes/openal/__init__.py new file mode 100644 index 0000000..ad93065 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/openal/__init__.py @@ -0,0 +1,39 @@ +from pythonforandroid.recipe import NDKRecipe +from pythonforandroid.toolchain import current_directory, shprint +from os.path import join +import os +import sh + + +class OpenALRecipe(NDKRecipe): + version = '1.18.2' + url = 'https://github.com/kcat/openal-soft/archive/openal-soft-{version}.tar.gz' + + generated_libraries = ['libopenal.so'] + + def prebuild_arch(self, arch): + # we need to build native tools for host system architecture + with current_directory(join(self.get_build_dir(arch.arch), 'native-tools')): + shprint(sh.cmake, '.', _env=os.environ) + shprint(sh.make, _env=os.environ) + + def build_arch(self, arch): + with current_directory(self.get_build_dir(arch.arch)): + env = self.get_recipe_env(arch) + cmake_args = [ + '-DCMAKE_TOOLCHAIN_FILE={}'.format('XCompile-Android.txt'), + '-DHOST={}'.format(self.ctx.toolchain_prefix) + ] + if self.ctx.ndk == 'crystax': + # avoids a segfault in libcrystax when calling lrintf + cmake_args += ['-DHAVE_LRINTF=0'] + shprint( + sh.cmake, '.', + *cmake_args, + _env=env + ) + shprint(sh.make, _env=env) + self.install_libs(arch, 'libopenal.so') + + +recipe = OpenALRecipe() diff --git a/p4a/pythonforandroidold/recipes/opencv/__init__.py b/p4a/pythonforandroidold/recipes/opencv/__init__.py new file mode 100644 index 0000000..6932bc2 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/opencv/__init__.py @@ -0,0 +1,135 @@ +from os.path import join +import sh +from pythonforandroid.recipe import NDKRecipe +from pythonforandroid.toolchain import ( + current_directory, + shprint, +) +from multiprocessing import cpu_count + + +class OpenCVRecipe(NDKRecipe): + ''' + .. versionchanged:: 0.7.1 + rewrote recipe to support the python bindings (cv2.so) and enable the + build of most of the libraries of the opencv's package, so we can + process images, videos, objects, photos... + ''' + version = '4.0.1' + url = 'https://github.com/opencv/opencv/archive/{version}.zip' + depends = ['numpy'] + patches = ['patches/p4a_build.patch'] + generated_libraries = [ + 'libopencv_features2d.so', + 'libopencv_imgproc.so', + 'libopencv_stitching.so', + 'libopencv_calib3d.so', + 'libopencv_flann.so', + 'libopencv_ml.so', + 'libopencv_videoio.so', + 'libopencv_core.so', + 'libopencv_highgui.so', + 'libopencv_objdetect.so', + 'libopencv_video.so', + 'libopencv_dnn.so', + 'libopencv_imgcodecs.so', + 'libopencv_photo.so' + ] + + def get_lib_dir(self, arch): + return join(self.get_build_dir(arch.arch), 'build', 'lib', arch.arch) + + def get_recipe_env(self, arch): + env = super(OpenCVRecipe, self).get_recipe_env(arch) + env['ANDROID_NDK'] = self.ctx.ndk_dir + env['ANDROID_SDK'] = self.ctx.sdk_dir + return env + + def build_arch(self, arch): + build_dir = join(self.get_build_dir(arch.arch), 'build') + shprint(sh.mkdir, '-p', build_dir) + with current_directory(build_dir): + env = self.get_recipe_env(arch) + + python_major = self.ctx.python_recipe.version[0] + python_include_root = self.ctx.python_recipe.include_root(arch.arch) + python_site_packages = self.ctx.get_site_packages_dir() + python_link_root = self.ctx.python_recipe.link_root(arch.arch) + python_link_version = self.ctx.python_recipe.major_minor_version_string + if 'python3' in self.ctx.python_recipe.name: + python_link_version += 'm' + python_library = join(python_link_root, + 'libpython{}.so'.format(python_link_version)) + python_include_numpy = join(python_site_packages, + 'numpy', 'core', 'include') + + shprint(sh.cmake, + '-DP4A=ON', + '-DANDROID_ABI={}'.format(arch.arch), + '-DANDROID_STANDALONE_TOOLCHAIN={}'.format(self.ctx.ndk_dir), + '-DANDROID_NATIVE_API_LEVEL={}'.format(self.ctx.ndk_api), + '-DANDROID_EXECUTABLE={}/tools/android'.format(env['ANDROID_SDK']), + + '-DCMAKE_TOOLCHAIN_FILE={}'.format( + join(self.ctx.ndk_dir, 'build', 'cmake', + 'android.toolchain.cmake')), + # Make the linkage with our python library, otherwise we + # will get dlopen error when trying to import cv2's module. + '-DCMAKE_SHARED_LINKER_FLAGS=-L{path} -lpython{version}'.format( + path=python_link_root, + version=python_link_version), + + '-DBUILD_WITH_STANDALONE_TOOLCHAIN=ON', + # Force to build as shared libraries the cv2's dependant + # libs or we will not be able to link with our python + '-DBUILD_SHARED_LIBS=ON', + '-DBUILD_STATIC_LIBS=OFF', + + # Disable some opencv's features + '-DBUILD_opencv_java=OFF', + '-DBUILD_opencv_java_bindings_generator=OFF', + # '-DBUILD_opencv_highgui=OFF', + # '-DBUILD_opencv_imgproc=OFF', + # '-DBUILD_opencv_flann=OFF', + '-DBUILD_TESTS=OFF', + '-DBUILD_PERF_TESTS=OFF', + '-DENABLE_TESTING=OFF', + '-DBUILD_EXAMPLES=OFF', + '-DBUILD_ANDROID_EXAMPLES=OFF', + + # Force to only build our version of python + '-DBUILD_OPENCV_PYTHON{major}=ON'.format(major=python_major), + '-DBUILD_OPENCV_PYTHON{major}=OFF'.format( + major='2' if python_major == '3' else '3'), + + # Force to install the `cv2.so` library directly into + # python's site packages (otherwise the cv2's loader fails + # on finding the cv2.so library) + '-DOPENCV_SKIP_PYTHON_LOADER=ON', + '-DOPENCV_PYTHON{major}_INSTALL_PATH={site_packages}'.format( + major=python_major, site_packages=python_site_packages), + + # Define python's paths for: exe, lib, includes, numpy... + '-DPYTHON_DEFAULT_EXECUTABLE={}'.format(self.ctx.hostpython), + '-DPYTHON{major}_EXECUTABLE={host_python}'.format( + major=python_major, host_python=self.ctx.hostpython), + '-DPYTHON{major}_INCLUDE_PATH={include_path}'.format( + major=python_major, include_path=python_include_root), + '-DPYTHON{major}_LIBRARIES={python_lib}'.format( + major=python_major, python_lib=python_library), + '-DPYTHON{major}_NUMPY_INCLUDE_DIRS={numpy_include}'.format( + major=python_major, numpy_include=python_include_numpy), + '-DPYTHON{major}_PACKAGES_PATH={site_packages}'.format( + major=python_major, site_packages=python_site_packages), + + self.get_build_dir(arch.arch), + _env=env) + shprint(sh.make, '-j' + str(cpu_count()), 'opencv_python' + python_major) + # Install python bindings (cv2.so) + shprint(sh.cmake, '-DCOMPONENT=python', '-P', './cmake_install.cmake') + # Copy third party shared libs that we need in our final apk + sh.cp('-a', sh.glob('./lib/{}/lib*.so'.format(arch.arch)), + self.ctx.get_libs_dir(arch.arch)) + + +recipe = OpenCVRecipe() diff --git a/p4a/pythonforandroidold/recipes/opencv/patches/p4a_build.patch b/p4a/pythonforandroidold/recipes/opencv/patches/p4a_build.patch new file mode 100644 index 0000000..fd60c01 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/opencv/patches/p4a_build.patch @@ -0,0 +1,33 @@ +This patch allow that the opencv's build command correctly detects our version +of python, so we can successfully build the python bindings (cv2.so) +--- opencv-4.0.1/cmake/OpenCVDetectPython.cmake.orig 2018-12-22 08:03:30.000000000 +0100 ++++ opencv-4.0.1/cmake/OpenCVDetectPython.cmake 2019-01-31 11:33:10.896502978 +0100 +@@ -175,7 +175,7 @@ if(NOT ${found}) + endif() + endif() + +- if(NOT ANDROID AND NOT IOS) ++ if(P4A OR NOT ANDROID AND NOT IOS) + if(CMAKE_HOST_UNIX) + execute_process(COMMAND ${_executable} -c "from distutils.sysconfig import *; print(get_python_lib())" + RESULT_VARIABLE _cvpy_process +@@ -244,7 +244,7 @@ if(NOT ${found}) + OUTPUT_STRIP_TRAILING_WHITESPACE) + endif() + endif() +- endif(NOT ANDROID AND NOT IOS) ++ endif(P4A OR NOT ANDROID AND NOT IOS) + endif() + + # Export return values +--- opencv-4.0.1/modules/python/CMakeLists.txt.orig 2018-12-22 08:03:30.000000000 +0100 ++++ opencv-4.0.1/modules/python/CMakeLists.txt 2019-01-31 11:47:17.100494908 +0100 +@@ -3,7 +3,7 @@ + # ---------------------------------------------------------------------------- + if(DEFINED OPENCV_INITIAL_PASS) # OpenCV build + +-if(ANDROID OR APPLE_FRAMEWORK OR WINRT) ++if(ANDROID AND NOT P4A OR APPLE_FRAMEWORK OR WINRT) + ocv_module_disable_(python2) + ocv_module_disable_(python3) + return() diff --git a/p4a/pythonforandroidold/recipes/openssl/__init__.py b/p4a/pythonforandroidold/recipes/openssl/__init__.py new file mode 100644 index 0000000..3a9505f --- /dev/null +++ b/p4a/pythonforandroidold/recipes/openssl/__init__.py @@ -0,0 +1,173 @@ +from os.path import join + +from pythonforandroid.toolchain import Recipe, shprint, current_directory +import sh + + +class OpenSSLRecipe(Recipe): + ''' + The OpenSSL libraries for python-for-android. This recipe will generate the + following libraries as shared libraries (*.so): + + - crypto + - ssl + + The generated openssl libraries are versioned, where the version is the + recipe attribute :attr:`version` e.g.: ``libcrypto1.1.so``, + ``libssl1.1.so``...so...to link your recipe with the openssl libs, + remember to add the version at the end, e.g.: + ``-lcrypto1.1 -lssl1.1``. Or better, you could do it dynamically + using the methods: :meth:`include_flags`, :meth:`link_dirs_flags` and + :meth:`link_libs_flags`. + + .. note:: the python2legacy version is too old to support openssl 1.1+, so + we must use version 1.0.x. Also python3crystax is not building + successfully with openssl libs 1.1+ so we use the legacy version as + we do with python2legacy. + + .. warning:: This recipe is very sensitive because is used for our core + recipes, the python recipes. The used API should match with the one + used in our python build, otherwise we will be unable to build the + _ssl.so python module. + + .. versionchanged:: 0.6.0 + + - The gcc compiler has been deprecated in favour of clang and libraries + updated to version 1.1.1 (LTS - supported until 11th September 2023) + - Added two new methods to make easier to link with openssl: + :meth:`include_flags` and :meth:`link_flags` + - subclassed versioned_url + - Adapted method :meth:`select_build_arch` to API 21+ + - Add ability to build a legacy version of the openssl libs when using + python2legacy or python3crystax. + + ''' + + standard_version = '1.1' + '''the major minor version used to link our recipes''' + legacy_version = '1.0' + '''the major minor version used to link our recipes when using + python2legacy or python3crystax''' + + standard_url_version = '1.1.1' + '''the version used to download our libraries''' + legacy_url_version = '1.0.2q' + '''the version used to download our libraries when using python2legacy or + python3crystax''' + + url = 'https://www.openssl.org/source/openssl-{url_version}.tar.gz' + + @property + def use_legacy(self): + if not self.ctx.recipe_build_order: + return False + return any([i for i in ('python2legacy', 'python3crystax') if + i in self.ctx.recipe_build_order]) + + @property + def version(self): + if self.use_legacy: + return self.legacy_version + return self.standard_version + + @property + def url_version(self): + if self.use_legacy: + return self.legacy_url_version + return self.standard_url_version + + @property + def versioned_url(self): + if self.url is None: + return None + return self.url.format(url_version=self.url_version) + + def get_build_dir(self, arch): + return join(self.get_build_container_dir(arch), self.name + self.version) + + def include_flags(self, arch): + '''Returns a string with the include folders''' + openssl_includes = join(self.get_build_dir(arch.arch), 'include') + return (' -I' + openssl_includes + + ' -I' + join(openssl_includes, 'internal') + + ' -I' + join(openssl_includes, 'openssl')) + + def link_dirs_flags(self, arch): + '''Returns a string with the appropriate `-L` to link + with the openssl libs. This string is usually added to the environment + variable `LDFLAGS`''' + return ' -L' + self.get_build_dir(arch.arch) + + def link_libs_flags(self): + '''Returns a string with the appropriate `-l` flags to link with + the openssl libs. This string is usually added to the environment + variable `LIBS`''' + return ' -lcrypto{version} -lssl{version}'.format(version=self.version) + + def link_flags(self, arch): + '''Returns a string with the flags to link with the openssl libraries + in the format: `-L -l`''' + return self.link_dirs_flags(arch) + self.link_libs_flags() + + def should_build(self, arch): + return not self.has_libs(arch, 'libssl' + self.version + '.so', + 'libcrypto' + self.version + '.so') + + def get_recipe_env(self, arch=None): + env = super(OpenSSLRecipe, self).get_recipe_env(arch, clang=not self.use_legacy) + env['OPENSSL_VERSION'] = self.version + env['MAKE'] = 'make' # This removes the '-j5', which isn't safe + if self.use_legacy: + env['CFLAGS'] += ' ' + env['LDFLAGS'] + env['CC'] += ' ' + env['LDFLAGS'] + else: + env['ANDROID_NDK'] = self.ctx.ndk_dir + return env + + def select_build_arch(self, arch): + aname = arch.arch + if 'arm64' in aname: + return 'android-arm64' if not self.use_legacy else 'linux-aarch64' + if 'v7a' in aname: + return 'android-arm' if not self.use_legacy else 'android-armv7' + if 'arm' in aname: + return 'android' + if 'x86_64' in aname: + return 'android-x86_64' + if 'x86' in aname: + return 'android-x86' + return 'linux-armv4' + + def build_arch(self, arch): + env = self.get_recipe_env(arch) + with current_directory(self.get_build_dir(arch.arch)): + # sh fails with code 255 trying to execute ./Configure + # so instead we manually run perl passing in Configure + perl = sh.Command('perl') + buildarch = self.select_build_arch(arch) + # XXX if we don't have no-asm, using clang and ndk-15c, i got: + # crypto/aes/bsaes-armv7.S:1372:14: error: immediate operand must be in the range [0,4095] + # add r8, r6, #.LREVM0SR-.LM0 @ borrow r8 + # ^ + # crypto/aes/bsaes-armv7.S:1434:14: error: immediate operand must be in the range [0,4095] + # sub r6, r8, #.LREVM0SR-.LSR @ pass constants + config_args = ['shared', 'no-dso', 'no-asm'] + if self.use_legacy: + config_args.append('no-krb5') + config_args.append(buildarch) + if not self.use_legacy: + config_args.append('-D__ANDROID_API__={}'.format(self.ctx.ndk_api)) + shprint(perl, 'Configure', *config_args, _env=env) + self.apply_patch( + 'disable-sover{}.patch'.format( + '-legacy' if self.use_legacy else ''), arch.arch) + if self.use_legacy: + self.apply_patch('rename-shared-lib.patch', arch.arch) + + shprint(sh.make, 'build_libs', _env=env) + + self.install_libs(arch, 'libssl' + self.version + '.so', + 'libcrypto' + self.version + '.so') + + +recipe = OpenSSLRecipe() diff --git a/p4a/pythonforandroid/recipes/openssl/disable-sover-legacy.patch b/p4a/pythonforandroidold/recipes/openssl/disable-sover-legacy.patch similarity index 100% rename from p4a/pythonforandroid/recipes/openssl/disable-sover-legacy.patch rename to p4a/pythonforandroidold/recipes/openssl/disable-sover-legacy.patch diff --git a/p4a/pythonforandroidold/recipes/openssl/disable-sover.patch b/p4a/pythonforandroidold/recipes/openssl/disable-sover.patch new file mode 100644 index 0000000..d944483 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/openssl/disable-sover.patch @@ -0,0 +1,11 @@ +--- openssl/Makefile.orig 2018-10-20 22:49:40.418310423 +0200 ++++ openssl/Makefile 2018-10-20 22:50:23.347322403 +0200 +@@ -19,7 +19,7 @@ + SHLIB_MAJOR=1 + SHLIB_MINOR=1 + SHLIB_TARGET=linux-shared +-SHLIB_EXT=.so.$(SHLIB_VERSION_NUMBER) ++SHLIB_EXT=$(SHLIB_VERSION_NUMBER).so + SHLIB_EXT_SIMPLE=.so + SHLIB_EXT_IMPORT= + diff --git a/p4a/pythonforandroid/recipes/openssl/rename-shared-lib.patch b/p4a/pythonforandroidold/recipes/openssl/rename-shared-lib.patch similarity index 100% rename from p4a/pythonforandroid/recipes/openssl/rename-shared-lib.patch rename to p4a/pythonforandroidold/recipes/openssl/rename-shared-lib.patch diff --git a/p4a/pythonforandroid/recipes/pbkdf2/__init__.py b/p4a/pythonforandroidold/recipes/pbkdf2/__init__.py similarity index 100% rename from p4a/pythonforandroid/recipes/pbkdf2/__init__.py rename to p4a/pythonforandroidold/recipes/pbkdf2/__init__.py diff --git a/p4a/pythonforandroidold/recipes/pil/__init__.py b/p4a/pythonforandroidold/recipes/pil/__init__.py new file mode 100644 index 0000000..f3ad2f4 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pil/__init__.py @@ -0,0 +1,79 @@ +from os.path import join, exists +from pythonforandroid.recipe import CompiledComponentsPythonRecipe +from pythonforandroid.toolchain import shprint +import sh + + +class PILRecipe(CompiledComponentsPythonRecipe): + name = 'pil' + version = '1.1.7' + url = 'http://effbot.org/downloads/Imaging-{version}.tar.gz' + depends = ['png', 'jpeg', 'setuptools'] + opt_depends = ['freetype'] + site_packages_name = 'PIL' + + patches = ['disable-tk.patch', + 'fix-directories.patch'] + + def get_recipe_env(self, arch=None, with_flags_in_cc=True): + env = super(PILRecipe, self).get_recipe_env(arch, with_flags_in_cc) + + env['PYTHON_INCLUDE_ROOT'] = self.ctx.python_recipe.include_root(arch.arch) + env['PYTHON_LINK_ROOT'] = self.ctx.python_recipe.link_root(arch.arch) + + ndk_lib_dir = join(self.ctx.ndk_platform, 'usr', 'lib') + ndk_include_dir = join(self.ctx.ndk_dir, 'sysroot', 'usr', 'include') + + png = self.get_recipe('png', self.ctx) + png_lib_dir = png.get_lib_dir(arch) + png_jni_dir = png.get_jni_dir(arch) + + jpeg = self.get_recipe('jpeg', self.ctx) + jpeg_inc_dir = jpeg_lib_dir = jpeg.get_build_dir(arch.arch) + + if 'freetype' in self.ctx.recipe_build_order: + freetype = self.get_recipe('freetype', self.ctx) + free_lib_dir = join(freetype.get_build_dir(arch.arch), 'objs', '.libs') + free_inc_dir = join(freetype.get_build_dir(arch.arch), 'include') + # hack freetype to be found by pil + freetype_link = join(free_inc_dir, 'freetype') + if not exists(freetype_link): + shprint(sh.ln, '-s', join(free_inc_dir), freetype_link) + + harfbuzz = self.get_recipe('harfbuzz', self.ctx) + harf_lib_dir = join(harfbuzz.get_build_dir(arch.arch), 'src', '.libs') + harf_inc_dir = harfbuzz.get_build_dir(arch.arch) + + env['FREETYPE_ROOT'] = '{}|{}'.format(free_lib_dir, free_inc_dir) + + env['JPEG_ROOT'] = '{}|{}'.format(jpeg_lib_dir, jpeg_inc_dir) + env['ZLIB_ROOT'] = '{}|{}'.format(ndk_lib_dir, ndk_include_dir) + + cflags = ' -std=c99' + cflags += ' -I{}'.format(png_jni_dir) + if 'freetype' in self.ctx.recipe_build_order: + cflags += ' -I{} -I{}'.format(harf_inc_dir, join(harf_inc_dir, 'src')) + cflags += ' -I{}'.format(free_inc_dir) + cflags += ' -I{}'.format(jpeg_inc_dir) + cflags += ' -I{}'.format(ndk_include_dir) + + py_v = self.ctx.python_recipe.major_minor_version_string + if py_v[0] == '3': + py_v += 'm' + + env['LIBS'] = ' -lpython{version} -lpng'.format(version=py_v) + if 'freetype' in self.ctx.recipe_build_order: + env['LIBS'] += ' -lfreetype -lharfbuzz' + env['LIBS'] += ' -ljpeg -lturbojpeg' + + env['LDFLAGS'] += ' -L{} -L{}'.format(env['PYTHON_LINK_ROOT'], png_lib_dir) + if 'freetype' in self.ctx.recipe_build_order: + env['LDFLAGS'] += ' -L{} -L{}'.format(harf_lib_dir, free_lib_dir) + env['LDFLAGS'] += ' -L{} -L{}'.format(jpeg_lib_dir, ndk_lib_dir) + + if cflags not in env['CFLAGS']: + env['CFLAGS'] += cflags + return env + + +recipe = PILRecipe() diff --git a/p4a/pythonforandroid/recipes/pil/disable-tk.patch b/p4a/pythonforandroidold/recipes/pil/disable-tk.patch similarity index 100% rename from p4a/pythonforandroid/recipes/pil/disable-tk.patch rename to p4a/pythonforandroidold/recipes/pil/disable-tk.patch diff --git a/p4a/pythonforandroid/recipes/pil/fix-directories.patch b/p4a/pythonforandroidold/recipes/pil/fix-directories.patch similarity index 100% rename from p4a/pythonforandroid/recipes/pil/fix-directories.patch rename to p4a/pythonforandroidold/recipes/pil/fix-directories.patch diff --git a/p4a/pythonforandroidold/recipes/png/__init__.py b/p4a/pythonforandroidold/recipes/png/__init__.py new file mode 100644 index 0000000..5b69688 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/png/__init__.py @@ -0,0 +1,19 @@ +from pythonforandroid.recipe import NDKRecipe + + +class PngRecipe(NDKRecipe): + name = 'png' + # This version is the last `sha commit` published in the repo (it's more + # than one year old...) and it's for libpng version `1.6.29`. We set a + # commit for a version because the author of the github's repo never + # released/tagged it, despite He performed the necessary changes in + # master branch. + version = 'b43b4c6' + + # TODO: Try to move the repo to mainline + url = 'https://github.com/julienr/libpng-android/archive/{version}.zip' + + generated_libraries = ['libpng.a'] + + +recipe = PngRecipe() diff --git a/p4a/pythonforandroidold/recipes/preppy/__init__.py b/p4a/pythonforandroidold/recipes/preppy/__init__.py new file mode 100644 index 0000000..40afd68 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/preppy/__init__.py @@ -0,0 +1,12 @@ +from pythonforandroid.recipe import PythonRecipe + + +class PreppyRecipe(PythonRecipe): + version = '27b7085' + url = 'https://bitbucket.org/rptlab/preppy/get/{version}.tar.gz' + depends = [] + patches = ['fix-setup.patch'] + call_hostpython_via_targetpython = False + + +recipe = PreppyRecipe() diff --git a/p4a/pythonforandroidold/recipes/preppy/fix-setup.patch b/p4a/pythonforandroidold/recipes/preppy/fix-setup.patch new file mode 100644 index 0000000..400614d --- /dev/null +++ b/p4a/pythonforandroidold/recipes/preppy/fix-setup.patch @@ -0,0 +1,44 @@ +--- a/setup.py 2017-11-20 13:53:42.000000000 +0000 ++++ b/setup.py 2017-11-20 14:00:44.862203526 +0000 +@@ -15,35 +15,6 @@ + + import preppy + version = preppy.VERSION +- scriptsPath=os.path.join(pkgDir,'build','scripts') +- +- def makeScript(modName): +- try: +- bat=sys.platform in ('win32','amd64') +- scriptPath=os.path.join(scriptsPath,modName+(bat and '.bat' or '')) +- exePath=sys.executable +- f = open(scriptPath,'w') +- try: +- if bat: +- text = '@echo off\nrem startup script for %s-%s\n"%s" -m "%s" %%*\n' % (modName,version,exePath,modName) +- else: +- text = '#!/bin/sh\n#startup script for %s-%s\nexec "%s" -m "%s" $*\n' % (modName,version,exePath,modName) +- f.write(text) +- finally: +- f.close() +- except: +- print('script for %s not created or erroneous' % modName) +- import traceback +- traceback.print_exc(file=sys.stdout) +- return None +- print('Created "%s"' % scriptPath) +- return scriptPath +- +- scripts = [] +- if not os.path.isdir(scriptsPath): os.makedirs(scriptsPath) +- scripts.extend(filter(None,[ +- makeScript('preppy'), +- ])) + + setup(name='preppy', + version=version, +@@ -52,5 +23,4 @@ + author_email='andy@reportlab.com', + url='http://bitbucket.org/rptlab/preppy', + py_modules=['preppy'], +- scripts=scripts, + ) diff --git a/p4a/pythonforandroidold/recipes/protobuf_cpp/__init__.py b/p4a/pythonforandroidold/recipes/protobuf_cpp/__init__.py new file mode 100644 index 0000000..30ca030 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/protobuf_cpp/__init__.py @@ -0,0 +1,146 @@ +from pythonforandroid.recipe import PythonRecipe +from pythonforandroid.logger import shprint, info_notify +from pythonforandroid.util import current_directory, shutil +from os.path import exists, join +import sh +from multiprocessing import cpu_count +from pythonforandroid.toolchain import info +import sys +import os + + +class ProtobufCppRecipe(PythonRecipe): + name = 'protobuf_cpp' + version = '3.6.1' + url = 'https://github.com/google/protobuf/releases/download/v{version}/protobuf-python-{version}.tar.gz' + call_hostpython_via_targetpython = False + depends = ['cffi', 'setuptools'] + site_packages_name = 'google/protobuf/pyext' + protoc_dir = None + + def prebuild_arch(self, arch): + super(ProtobufCppRecipe, self).prebuild_arch(arch) + + patch_mark = join(self.get_build_dir(arch.arch), '.protobuf-patched') + if self.ctx.python_recipe.name == 'python3' and not exists(patch_mark): + self.apply_patch('fix-python3-compatibility.patch', arch.arch) + shprint(sh.touch, patch_mark) + + # During building, host needs to transpile .proto files to .py + # ideally with the same version as protobuf runtime, or with an older one. + # Because protoc is compiled for target (i.e. Android), we need an other binary + # which can be run by host. + # To make it easier, we download prebuild protoc binary adapted to the platform + + info_notify("Downloading protoc compiler for your platform") + url_prefix = "https://github.com/protocolbuffers/protobuf/releases/download/v{version}".format(version=self.version) + if sys.platform.startswith('linux'): + info_notify("GNU/Linux detected") + filename = "protoc-{version}-linux-x86_64.zip".format(version=self.version) + elif sys.platform.startswith('darwin'): + info_notify("Mac OS X detected") + filename = "protoc-{version}-osx-x86_64.zip".format(version=self.version) + else: + info_notify("Your platform is not supported, but recipe can still " + "be built if you have a valid protoc (<={version}) in " + "your path".format(version=self.version)) + return + + protoc_url = join(url_prefix, filename) + self.protoc_dir = join(self.ctx.build_dir, "tools", "protoc") + if os.path.exists(join(self.protoc_dir, "bin", "protoc")): + info_notify("protoc found, no download needed") + return + try: + os.makedirs(self.protoc_dir) + except OSError as e: + # if dir already exists (errno 17), we ignore the error + if e.errno != 17: + raise e + info_notify("Will download into {dest_dir}".format(dest_dir=self.protoc_dir)) + self.download_file(protoc_url, join(self.protoc_dir, filename)) + with current_directory(self.protoc_dir): + shprint(sh.unzip, join(self.protoc_dir, filename)) + + def build_arch(self, arch): + env = self.get_recipe_env(arch) + + # Build libproto.a + with current_directory(self.get_build_dir(arch.arch)): + env['HOSTARCH'] = 'arm-eabi' + env['BUILDARCH'] = shprint(sh.gcc, '-dumpmachine').stdout.decode('utf-8').split('\n')[0] + + if not exists('configure'): + shprint(sh.Command('./autogen.sh'), _env=env) + + shprint(sh.Command('./configure'), + '--host={}'.format(env['HOSTARCH']), + '--enable-shared', + _env=env) + + with current_directory(join(self.get_build_dir(arch.arch), 'src')): + shprint(sh.make, 'libprotobuf.la', '-j'+str(cpu_count()), _env=env) + shprint(sh.cp, '.libs/libprotobuf.a', join(self.ctx.get_libs_dir(arch.arch), 'libprotobuf.a')) + + # Copy stl library + shutil.copyfile( + self.ctx.ndk_dir + '/sources/cxx-stl/gnu-libstdc++/' + self.ctx.toolchain_version + '/libs/' + arch.arch + '/libgnustl_shared.so', + join(self.ctx.get_libs_dir(arch.arch), 'libgnustl_shared.so')) + + # Build python bindings and _message.so + with current_directory(join(self.get_build_dir(arch.arch), 'python')): + hostpython = sh.Command(self.hostpython_location) + shprint(hostpython, + 'setup.py', + 'build_ext', + '--cpp_implementation', _env=env) + + # Install python bindings + self.install_python_package(arch) + + # Create __init__.py which is missing (cf. https://github.com/protocolbuffers/protobuf/issues/1296 + # and https://stackoverflow.com/questions/13862562/google-protocol-buffers-not-found-when-trying-to-freeze-python-app) + open(join(self.ctx.get_site_packages_dir(), 'google', '__init__.py'), 'a').close() + + def install_python_package(self, arch): + env = self.get_recipe_env(arch) + + info('Installing {} into site-packages'.format(self.name)) + + with current_directory(join(self.get_build_dir(arch.arch), 'python')): + hostpython = sh.Command(self.hostpython_location) + + hpenv = env.copy() + shprint(hostpython, 'setup.py', 'install', '-O2', + '--root={}'.format(self.ctx.get_python_install_dir()), + '--install-lib=.', + '--cpp_implementation', + _env=hpenv, *self.setup_extra_args) + + def get_recipe_env(self, arch): + env = super(ProtobufCppRecipe, self).get_recipe_env(arch) + if self.protoc_dir is not None: + # we need protoc with binary for host platform + env['PROTOC'] = join(self.protoc_dir, 'bin', 'protoc') + env['TARGET_OS'] = 'OS_ANDROID_CROSSCOMPILE' + env['CFLAGS'] += ( + ' -I' + self.ctx.ndk_dir + '/platforms/android-' + + str(self.ctx.android_api) + + '/arch-' + arch.arch.replace('eabi', '') + '/usr/include' + + ' -I' + self.ctx.ndk_dir + '/sources/cxx-stl/gnu-libstdc++/' + + self.ctx.toolchain_version + '/include' + + ' -I' + self.ctx.ndk_dir + '/sources/cxx-stl/gnu-libstdc++/' + + self.ctx.toolchain_version + '/libs/' + arch.arch + '/include') + env['CFLAGS'] += ' -std=gnu++11' + env['CXXFLAGS'] = env['CFLAGS'] + env['CXXFLAGS'] += ' -frtti' + env['CXXFLAGS'] += ' -fexceptions' + env['LDFLAGS'] += ( + ' -lgnustl_shared -landroid -llog' + + ' -L' + self.ctx.ndk_dir + + '/sources/cxx-stl/gnu-libstdc++/' + self.ctx.toolchain_version + + '/libs/' + arch.arch) + return env + + +recipe = ProtobufCppRecipe() diff --git a/p4a/pythonforandroidold/recipes/protobuf_cpp/fix-python3-compatibility.patch b/p4a/pythonforandroidold/recipes/protobuf_cpp/fix-python3-compatibility.patch new file mode 100644 index 0000000..e77deba --- /dev/null +++ b/p4a/pythonforandroidold/recipes/protobuf_cpp/fix-python3-compatibility.patch @@ -0,0 +1,91 @@ +From 539bc017a62f91bdf7c547b58948cb5a2f59d918 Mon Sep 17 00:00:00 2001 +From: Ben Webb +Date: Thu, 12 Jul 2018 10:58:10 -0700 +Subject: [PATCH] Add Python 3.7 compatibility (#4862) + +Compilation of Python wrappers fails with Python 3.7 because +the Python folks changed their C API such that +PyUnicode_AsUTF8AndSize() now returns a const char* rather +than a char*. Add a patch to work around. Relates #4086. +--- + python/google/protobuf/pyext/descriptor.cc | 2 +- + python/google/protobuf/pyext/descriptor_containers.cc | 2 +- + python/google/protobuf/pyext/descriptor_pool.cc | 2 +- + python/google/protobuf/pyext/extension_dict.cc | 2 +- + python/google/protobuf/pyext/message.cc | 4 ++-- + 5 files changed, 6 insertions(+), 6 deletions(-) + +diff --git a/python/google/protobuf/pyext/descriptor.cc b/python/google/protobuf/pyext/descriptor.cc +index 8af0cb1289..19a1c38a62 100644 +--- a/python/google/protobuf/pyext/descriptor.cc ++++ b/python/google/protobuf/pyext/descriptor.cc +@@ -56,7 +56,7 @@ + #endif + #define PyString_AsStringAndSize(ob, charpp, sizep) \ + (PyUnicode_Check(ob)? \ +- ((*(charpp) = PyUnicode_AsUTF8AndSize(ob, (sizep))) == NULL? -1: 0): \ ++ ((*(charpp) = const_cast(PyUnicode_AsUTF8AndSize(ob, (sizep)))) == NULL? -1: 0): \ + PyBytes_AsStringAndSize(ob, (charpp), (sizep))) + #endif + +diff --git a/python/google/protobuf/pyext/descriptor_containers.cc b/python/google/protobuf/pyext/descriptor_containers.cc +index bc007f7efa..0153664f50 100644 +--- a/python/google/protobuf/pyext/descriptor_containers.cc ++++ b/python/google/protobuf/pyext/descriptor_containers.cc +@@ -66,7 +66,7 @@ + #endif + #define PyString_AsStringAndSize(ob, charpp, sizep) \ + (PyUnicode_Check(ob)? \ +- ((*(charpp) = PyUnicode_AsUTF8AndSize(ob, (sizep))) == NULL? -1: 0): \ ++ ((*(charpp) = const_cast(PyUnicode_AsUTF8AndSize(ob, (sizep)))) == NULL? -1: 0): \ + PyBytes_AsStringAndSize(ob, (charpp), (sizep))) + #endif + +diff --git a/python/google/protobuf/pyext/descriptor_pool.cc b/python/google/protobuf/pyext/descriptor_pool.cc +index 95882aeb35..962accc6e9 100644 +--- a/python/google/protobuf/pyext/descriptor_pool.cc ++++ b/python/google/protobuf/pyext/descriptor_pool.cc +@@ -48,7 +48,7 @@ + #endif + #define PyString_AsStringAndSize(ob, charpp, sizep) \ + (PyUnicode_Check(ob)? \ +- ((*(charpp) = PyUnicode_AsUTF8AndSize(ob, (sizep))) == NULL? -1: 0): \ ++ ((*(charpp) = const_cast(PyUnicode_AsUTF8AndSize(ob, (sizep)))) == NULL? -1: 0): \ + PyBytes_AsStringAndSize(ob, (charpp), (sizep))) + #endif + +diff --git a/python/google/protobuf/pyext/extension_dict.cc b/python/google/protobuf/pyext/extension_dict.cc +index 018b5c2c49..174c5470c2 100644 +--- a/python/google/protobuf/pyext/extension_dict.cc ++++ b/python/google/protobuf/pyext/extension_dict.cc +@@ -53,7 +53,7 @@ + #endif + #define PyString_AsStringAndSize(ob, charpp, sizep) \ + (PyUnicode_Check(ob)? \ +- ((*(charpp) = PyUnicode_AsUTF8AndSize(ob, (sizep))) == NULL? -1: 0): \ ++ ((*(charpp) = const_cast(PyUnicode_AsUTF8AndSize(ob, (sizep)))) == NULL? -1: 0): \ + PyBytes_AsStringAndSize(ob, (charpp), (sizep))) + #endif + +diff --git a/python/google/protobuf/pyext/message.cc b/python/google/protobuf/pyext/message.cc +index 5893533adf..31094b7e10 100644 +--- a/python/google/protobuf/pyext/message.cc ++++ b/python/google/protobuf/pyext/message.cc +@@ -79,7 +79,7 @@ + (PyUnicode_Check(ob)? PyUnicode_AsUTF8(ob): PyBytes_AsString(ob)) + #define PyString_AsStringAndSize(ob, charpp, sizep) \ + (PyUnicode_Check(ob)? \ +- ((*(charpp) = PyUnicode_AsUTF8AndSize(ob, (sizep))) == NULL? -1: 0): \ ++ ((*(charpp) = const_cast(PyUnicode_AsUTF8AndSize(ob, (sizep)))) == NULL? -1: 0): \ + PyBytes_AsStringAndSize(ob, (charpp), (sizep))) + #endif + #endif +@@ -1529,7 +1529,7 @@ PyObject* HasField(CMessage* self, PyObject* arg) { + return NULL; + } + #else +- field_name = PyUnicode_AsUTF8AndSize(arg, &size); ++ field_name = const_cast(PyUnicode_AsUTF8AndSize(arg, &size)); + if (!field_name) { + return NULL; + } diff --git a/p4a/pythonforandroidold/recipes/psycopg2/__init__.py b/p4a/pythonforandroidold/recipes/psycopg2/__init__.py new file mode 100644 index 0000000..aaf5a33 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/psycopg2/__init__.py @@ -0,0 +1,50 @@ +from pythonforandroid.recipe import PythonRecipe +from pythonforandroid.toolchain import current_directory, shprint +import sh + + +class Psycopg2Recipe(PythonRecipe): + """ + Requires `libpq-dev` system dependency e.g. for `pg_config` binary. + If you get `nl_langinfo` symbol runtime error, make sure you're running on + `ANDROID_API` (`ndk-api`) >= 26, see: + https://github.com/kivy/python-for-android/issues/1711#issuecomment-465747557 + """ + version = 'latest' + url = 'http://initd.org/psycopg/tarballs/psycopg2-{version}.tar.gz' + depends = ['libpq'] + site_packages_name = 'psycopg2' + call_hostpython_via_targetpython = False + + def prebuild_arch(self, arch): + libdir = self.ctx.get_libs_dir(arch.arch) + with current_directory(self.get_build_dir(arch.arch)): + # pg_config_helper will return the system installed libpq, but we + # need the one we just cross-compiled + shprint(sh.sed, '-i', + "s|pg_config_helper.query(.libdir.)|'{}'|".format(libdir), + 'setup.py') + + def get_recipe_env(self, arch): + env = super(Psycopg2Recipe, self).get_recipe_env(arch) + env['LDFLAGS'] = "{} -L{}".format(env['LDFLAGS'], self.ctx.get_libs_dir(arch.arch)) + env['EXTRA_CFLAGS'] = "--host linux-armv" + return env + + def install_python_package(self, arch, name=None, env=None, is_dir=True): + '''Automate the installation of a Python package (or a cython + package where the cython components are pre-built).''' + if env is None: + env = self.get_recipe_env(arch) + + with current_directory(self.get_build_dir(arch.arch)): + hostpython = sh.Command(self.ctx.hostpython) + + shprint(hostpython, 'setup.py', 'build_ext', '--static-libpq', + _env=env) + shprint(hostpython, 'setup.py', 'install', '-O2', + '--root={}'.format(self.ctx.get_python_install_dir()), + '--install-lib=.', _env=env) + + +recipe = Psycopg2Recipe() diff --git a/p4a/pythonforandroidold/recipes/pyaml/__init__.py b/p4a/pythonforandroidold/recipes/pyaml/__init__.py new file mode 100644 index 0000000..8440175 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pyaml/__init__.py @@ -0,0 +1,12 @@ +from pythonforandroid.recipe import PythonRecipe + + +class PyamlRecipe(PythonRecipe): + version = "15.8.2" + url = 'https://pypi.python.org/packages/source/p/pyaml/pyaml-{version}.tar.gz' + depends = ["setuptools"] + site_packages_name = 'yaml' + call_hostpython_via_targetpython = False + + +recipe = PyamlRecipe() diff --git a/p4a/pythonforandroid/recipes/pyasn1/__init__.py b/p4a/pythonforandroidold/recipes/pyasn1/__init__.py similarity index 100% rename from p4a/pythonforandroid/recipes/pyasn1/__init__.py rename to p4a/pythonforandroidold/recipes/pyasn1/__init__.py diff --git a/p4a/pythonforandroidold/recipes/pycparser/__init__.py b/p4a/pythonforandroidold/recipes/pycparser/__init__.py new file mode 100644 index 0000000..6c82cf8 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pycparser/__init__.py @@ -0,0 +1,16 @@ +from pythonforandroid.recipe import PythonRecipe + + +class PycparserRecipe(PythonRecipe): + name = 'pycparser' + version = '2.14' + url = 'https://pypi.python.org/packages/source/p/pycparser/pycparser-{version}.tar.gz' + + depends = ['setuptools'] + + call_hostpython_via_targetpython = False + + install_in_hostpython = True + + +recipe = PycparserRecipe() diff --git a/p4a/pythonforandroidold/recipes/pycrypto/__init__.py b/p4a/pythonforandroidold/recipes/pycrypto/__init__.py new file mode 100644 index 0000000..e8bfab2 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pycrypto/__init__.py @@ -0,0 +1,44 @@ +from pythonforandroid.recipe import CompiledComponentsPythonRecipe, Recipe +from pythonforandroid.toolchain import ( + current_directory, + info, + shprint, +) +import sh + + +class PyCryptoRecipe(CompiledComponentsPythonRecipe): + version = '2.7a1' + url = 'https://github.com/dlitz/pycrypto/archive/v{version}.zip' + depends = ['openssl', ('python2', 'python3')] + site_packages_name = 'Crypto' + call_hostpython_via_targetpython = False + patches = ['add_length.patch'] + + def get_recipe_env(self, arch=None, clang=True): + env = super(PyCryptoRecipe, self).get_recipe_env(arch) + openssl_recipe = Recipe.get_recipe('openssl', self.ctx) + env['CC'] = env['CC'] + openssl_recipe.include_flags(arch) + + env['LDFLAGS'] += ' -L{}'.format(self.ctx.get_libs_dir(arch.arch)) + env['LDFLAGS'] += ' -L{}'.format(self.ctx.libs_dir) + env['LDFLAGS'] += openssl_recipe.link_dirs_flags(arch) + env['LIBS'] = env.get('LIBS', '') + openssl_recipe.link_libs_flags() + + env['EXTRA_CFLAGS'] = '--host linux-armv' + env['ac_cv_func_malloc_0_nonnull'] = 'yes' + return env + + def build_compiled_components(self, arch): + info('Configuring compiled components in {}'.format(self.name)) + + env = self.get_recipe_env(arch) + with current_directory(self.get_build_dir(arch.arch)): + configure = sh.Command('./configure') + shprint(configure, '--host=arm-eabi', + '--prefix={}'.format(self.ctx.get_python_install_dir()), + '--enable-shared', _env=env) + super(PyCryptoRecipe, self).build_compiled_components(arch) + + +recipe = PyCryptoRecipe() diff --git a/p4a/pythonforandroidold/recipes/pycrypto/add_length.patch b/p4a/pythonforandroidold/recipes/pycrypto/add_length.patch new file mode 100644 index 0000000..7bb9299 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pycrypto/add_length.patch @@ -0,0 +1,11 @@ +--- pycrypto-2.6.1/src/hash_SHA2_template.c.orig 2013-10-14 14:38:10.000000000 -0700 ++++ pycrypto-2.6.1/src/hash_SHA2_template.c 2014-05-19 10:15:51.000000000 -0700 +@@ -87,7 +87,7 @@ + * return 1 on success + * return 0 if the length overflows + */ +-int add_length(hash_state *hs, sha2_word_t inc) { ++static int add_length(hash_state *hs, sha2_word_t inc) { + sha2_word_t overflow_detector; + overflow_detector = hs->length_lower; + hs->length_lower += inc; diff --git a/p4a/pythonforandroidold/recipes/pycryptodome/__init__.py b/p4a/pythonforandroidold/recipes/pycryptodome/__init__.py new file mode 100644 index 0000000..9418600 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pycryptodome/__init__.py @@ -0,0 +1,10 @@ +from pythonforandroid.recipe import PythonRecipe + + +class PycryptodomeRecipe(PythonRecipe): + version = '3.6.3' + url = 'https://github.com/Legrandin/pycryptodome/archive/v{version}.tar.gz' + depends = ['setuptools', 'cffi'] + + +recipe = PycryptodomeRecipe() diff --git a/p4a/pythonforandroid/recipes/pyethereum/__init__.py b/p4a/pythonforandroidold/recipes/pyethereum/__init__.py similarity index 100% rename from p4a/pythonforandroid/recipes/pyethereum/__init__.py rename to p4a/pythonforandroidold/recipes/pyethereum/__init__.py diff --git a/p4a/pythonforandroid/recipes/pygame/Setup b/p4a/pythonforandroidold/recipes/pygame/Setup similarity index 100% rename from p4a/pythonforandroid/recipes/pygame/Setup rename to p4a/pythonforandroidold/recipes/pygame/Setup diff --git a/p4a/pythonforandroidold/recipes/pygame/__init__.py b/p4a/pythonforandroidold/recipes/pygame/__init__.py new file mode 100644 index 0000000..981fa44 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pygame/__init__.py @@ -0,0 +1,74 @@ + +from pythonforandroid.recipe import Recipe +from pythonforandroid.util import current_directory, ensure_dir +from pythonforandroid.logger import debug, shprint, info, warning +from os.path import join +import sh +import glob + + +class PygameRecipe(Recipe): + name = 'pygame' + version = '1.9.1' + url = 'http://pygame.org/ftp/pygame-{version}release.tar.gz' + + depends = ['python2legacy', 'sdl'] + conflicts = ['sdl2'] + + patches = ['patches/fix-surface-access.patch', + 'patches/fix-array-surface.patch', + 'patches/fix-sdl-spam-log.patch'] + + def get_recipe_env(self, arch=None): + env = super(PygameRecipe, self).get_recipe_env(arch) + env['LDFLAGS'] = env['LDFLAGS'] + ' -L{}'.format( + self.ctx.get_libs_dir(arch.arch)) + env['LDSHARED'] = join(self.ctx.root_dir, 'tools', 'liblink') + env['LIBLINK'] = 'NOTNONE' + env['NDKPLATFORM'] = self.ctx.ndk_platform + + # Every recipe uses its own liblink path, object files are collected and biglinked later + liblink_path = join(self.get_build_container_dir(arch.arch), 'objects_{}'.format(self.name)) + env['LIBLINK_PATH'] = liblink_path + ensure_dir(liblink_path) + return env + + def prebuild_arch(self, arch): + if self.is_patched(arch): + return + shprint(sh.cp, join(self.get_recipe_dir(), 'Setup'), + join(self.get_build_dir(arch.arch), 'Setup')) + + def build_arch(self, arch): + env = self.get_recipe_env(arch) + + env['CFLAGS'] = env['CFLAGS'] + ' -I{jni_path}/png -I{jni_path}/jpeg'.format( + jni_path=join(self.ctx.bootstrap.build_dir, 'jni')) + env['CFLAGS'] = env['CFLAGS'] + ' -I{jni_path}/sdl/include -I{jni_path}/sdl_mixer'.format( + jni_path=join(self.ctx.bootstrap.build_dir, 'jni')) + env['CFLAGS'] = env['CFLAGS'] + ' -I{jni_path}/sdl_ttf -I{jni_path}/sdl_image'.format( + jni_path=join(self.ctx.bootstrap.build_dir, 'jni')) + debug('pygame cflags', env['CFLAGS']) + + env['LDFLAGS'] = env['LDFLAGS'] + ' -L{libs_path} -L{src_path}/obj/local/{arch} -lm -lz'.format( + libs_path=self.ctx.libs_dir, src_path=self.ctx.bootstrap.build_dir, arch=env['ARCH']) + + env['LDSHARED'] = join(self.ctx.root_dir, 'tools', 'liblink') + + with current_directory(self.get_build_dir(arch.arch)): + info('hostpython is ' + self.ctx.hostpython) + hostpython = sh.Command(self.ctx.hostpython) + shprint(hostpython, 'setup.py', 'install', '-O2', _env=env, + _tail=10, _critical=True) + + info('strip is ' + env['STRIP']) + build_lib = glob.glob('./build/lib*') + assert len(build_lib) == 1 + print('stripping pygame') + shprint(sh.find, build_lib[0], '-name', '*.o', '-exec', + env['STRIP'], '{}', ';') + + warning('Should remove pygame tests etc. here, but skipping for now') + + +recipe = PygameRecipe() diff --git a/p4a/pythonforandroid/recipes/pygame/patches/fix-array-surface.patch b/p4a/pythonforandroidold/recipes/pygame/patches/fix-array-surface.patch similarity index 100% rename from p4a/pythonforandroid/recipes/pygame/patches/fix-array-surface.patch rename to p4a/pythonforandroidold/recipes/pygame/patches/fix-array-surface.patch diff --git a/p4a/pythonforandroid/recipes/pygame/patches/fix-sdl-spam-log.patch b/p4a/pythonforandroidold/recipes/pygame/patches/fix-sdl-spam-log.patch similarity index 100% rename from p4a/pythonforandroid/recipes/pygame/patches/fix-sdl-spam-log.patch rename to p4a/pythonforandroidold/recipes/pygame/patches/fix-sdl-spam-log.patch diff --git a/p4a/pythonforandroid/recipes/pygame/patches/fix-surface-access.patch b/p4a/pythonforandroidold/recipes/pygame/patches/fix-surface-access.patch similarity index 100% rename from p4a/pythonforandroid/recipes/pygame/patches/fix-surface-access.patch rename to p4a/pythonforandroidold/recipes/pygame/patches/fix-surface-access.patch diff --git a/p4a/pythonforandroid/recipes/pygame_bootstrap_components/__init__.py b/p4a/pythonforandroidold/recipes/pygame_bootstrap_components/__init__.py similarity index 100% rename from p4a/pythonforandroid/recipes/pygame_bootstrap_components/__init__.py rename to p4a/pythonforandroidold/recipes/pygame_bootstrap_components/__init__.py diff --git a/p4a/pythonforandroid/recipes/pygame_bootstrap_components/jpeg-ndk15-plus.patch b/p4a/pythonforandroidold/recipes/pygame_bootstrap_components/jpeg-ndk15-plus.patch similarity index 100% rename from p4a/pythonforandroid/recipes/pygame_bootstrap_components/jpeg-ndk15-plus.patch rename to p4a/pythonforandroidold/recipes/pygame_bootstrap_components/jpeg-ndk15-plus.patch diff --git a/p4a/pythonforandroidold/recipes/pyicu/__init__.py b/p4a/pythonforandroidold/recipes/pyicu/__init__.py new file mode 100644 index 0000000..98ec7b7 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pyicu/__init__.py @@ -0,0 +1,58 @@ +import os +import sh +from os.path import join +from pythonforandroid.recipe import CompiledComponentsPythonRecipe +from pythonforandroid.toolchain import shprint, info + + +class PyICURecipe(CompiledComponentsPythonRecipe): + version = '1.9.2' + url = 'https://pypi.python.org/packages/source/P/PyICU/PyICU-{version}.tar.gz' + depends = ["icu"] + patches = ['locale.patch', 'icu.patch'] + + def get_recipe_env(self, arch): + env = super(PyICURecipe, self).get_recipe_env(arch) + + icu_include = join( + self.ctx.get_python_install_dir(), "include", "icu") + + env["CC"] += " -I"+icu_include + + include = ( + " -I{ndk}/sources/cxx-stl/gnu-libstdc++/{version}/include/" + " -I{ndk}/sources/cxx-stl/gnu-libstdc++/{version}/libs/" + "{arch}/include") + include = include.format(ndk=self.ctx.ndk_dir, + version=env["TOOLCHAIN_VERSION"], + arch=arch.arch) + env["CC"] += include + + lib = "{ndk}/sources/cxx-stl/gnu-libstdc++/{version}/libs/{arch}" + lib = lib.format(ndk=self.ctx.ndk_dir, + version=env["TOOLCHAIN_VERSION"], + arch=arch.arch) + env["LDFLAGS"] += " -lgnustl_shared -L"+lib + + build_dir = self.get_build_dir(arch.arch) + env["LDFLAGS"] += " -L"+build_dir + return env + + def build_arch(self, arch): + build_dir = self.get_build_dir(arch.arch) + + info("create links to icu libs") + lib_dir = join(self.ctx.get_python_install_dir(), "lib") + icu_libs = [f for f in os.listdir(lib_dir) if f.startswith("libicu")] + + for l in icu_libs: + raw = l.rsplit(".", 1)[0] + try: + shprint(sh.ln, "-s", join(lib_dir, l), join(build_dir, raw)) + except Exception: + pass + + super(PyICURecipe, self).build_arch(arch) + + +recipe = PyICURecipe() diff --git a/p4a/pythonforandroid/recipes/pyicu/icu.patch b/p4a/pythonforandroidold/recipes/pyicu/icu.patch similarity index 100% rename from p4a/pythonforandroid/recipes/pyicu/icu.patch rename to p4a/pythonforandroidold/recipes/pyicu/icu.patch diff --git a/p4a/pythonforandroidold/recipes/pyicu/locale.patch b/p4a/pythonforandroidold/recipes/pyicu/locale.patch new file mode 100644 index 0000000..b291c30 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pyicu/locale.patch @@ -0,0 +1,12 @@ +diff -Naur locale.cpp locale1.cpp +--- pyicu/locale.cpp 2015-04-29 07:32:39.000000000 +0200 ++++ locale1.cpp 2016-05-12 17:13:08.990059346 +0200 +@@ -27,7 +27,7 @@ + #if defined(_MSC_VER) || defined(__WIN32) + #include + #else +-#include ++#include + #include + #include + #endif diff --git a/p4a/pythonforandroidold/recipes/pyjnius/__init__.py b/p4a/pythonforandroidold/recipes/pyjnius/__init__.py new file mode 100644 index 0000000..8aeac6c --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pyjnius/__init__.py @@ -0,0 +1,27 @@ +from pythonforandroid.recipe import CythonRecipe +from pythonforandroid.toolchain import shprint, current_directory, info +from pythonforandroid.patching import will_build +import sh +from os.path import join + + +class PyjniusRecipe(CythonRecipe): + # "6553ad4" is one commit after last release (1.2.0) + # it fixes method resolution, required for resolving requestPermissions() + version = '6553ad4' + url = 'https://github.com/kivy/pyjnius/archive/{version}.zip' + name = 'pyjnius' + depends = [('genericndkbuild', 'sdl2', 'sdl'), 'six'] + site_packages_name = 'jnius' + + patches = [('sdl2_jnienv_getter.patch', will_build('sdl2')), + ('genericndkbuild_jnienv_getter.patch', will_build('genericndkbuild'))] + + def postbuild_arch(self, arch): + super(PyjniusRecipe, self).postbuild_arch(arch) + info('Copying pyjnius java class to classes build dir') + with current_directory(self.get_build_dir(arch.arch)): + shprint(sh.cp, '-a', join('jnius', 'src', 'org'), self.ctx.javaclass_dir) + + +recipe = PyjniusRecipe() diff --git a/p4a/pythonforandroidold/recipes/pyjnius/genericndkbuild_jnienv_getter.patch b/p4a/pythonforandroidold/recipes/pyjnius/genericndkbuild_jnienv_getter.patch new file mode 100644 index 0000000..ff26994 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pyjnius/genericndkbuild_jnienv_getter.patch @@ -0,0 +1,25 @@ +diff --git a/jnius/jnius_jvm_android.pxi b/jnius/jnius_jvm_android.pxi +index ac89fec..71daa43 100644 +--- a/jnius/jnius_jvm_android.pxi ++++ b/jnius/jnius_jvm_android.pxi +@@ -1,5 +1,5 @@ + # on android, rely on SDL to get the JNI env +-cdef extern JNIEnv *SDL_ANDROID_GetJNIEnv() ++cdef extern JNIEnv *WebView_AndroidGetJNIEnv() + + cdef JNIEnv *get_platform_jnienv(): +- return SDL_ANDROID_GetJNIEnv() ++ return WebView_AndroidGetJNIEnv() +diff --git a/setup.py b/setup.py +index 740510f..0c8e55f 100644 +--- a/setup.py ++++ b/setup.py +@@ -53,7 +53,7 @@ except ImportError: + + if PLATFORM == 'android': + # for android, we use SDL... +- LIBRARIES = ['sdl', 'log'] ++ LIBRARIES = ['main', 'log'] + LIBRARY_DIRS = ['libs/' + getenv('ARCH')] + elif PLATFORM == 'darwin': + import subprocess diff --git a/p4a/pythonforandroid/recipes/pyjnius/sdl2_jnienv_getter.patch b/p4a/pythonforandroidold/recipes/pyjnius/sdl2_jnienv_getter.patch similarity index 100% rename from p4a/pythonforandroid/recipes/pyjnius/sdl2_jnienv_getter.patch rename to p4a/pythonforandroidold/recipes/pyjnius/sdl2_jnienv_getter.patch diff --git a/p4a/pythonforandroidold/recipes/pyleveldb/__init__.py b/p4a/pythonforandroidold/recipes/pyleveldb/__init__.py new file mode 100644 index 0000000..6147709 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pyleveldb/__init__.py @@ -0,0 +1,13 @@ +from pythonforandroid.recipe import CppCompiledComponentsPythonRecipe + + +class PyLevelDBRecipe(CppCompiledComponentsPythonRecipe): + version = '0.193' + url = 'https://pypi.python.org/packages/source/l/leveldb/leveldb-{version}.tar.gz' + depends = ['snappy', 'leveldb', ('hostpython2', 'hostpython3'), 'setuptools'] + patches = ['bindings-only.patch'] + call_hostpython_via_targetpython = False # Due to setuptools + site_packages_name = 'leveldb' + + +recipe = PyLevelDBRecipe() diff --git a/p4a/pythonforandroidold/recipes/pyleveldb/bindings-only.patch b/p4a/pythonforandroidold/recipes/pyleveldb/bindings-only.patch new file mode 100644 index 0000000..2899f4e --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pyleveldb/bindings-only.patch @@ -0,0 +1,103 @@ +--- pyleveldb/setup.py 2014-03-28 02:51:24.000000000 +0100 ++++ pyleveldb-patch/setup.py 2016-03-02 11:52:13.780678586 +0100 +@@ -7,41 +7,22 @@ + # + # See LICENSE for details. + +-import glob +-import platform +-import sys +- + from setuptools import setup, Extension + +-system,node,release,version,machine,processor = platform.uname() +-common_flags = [ ++extra_compile_args = [ + '-I./leveldb/include', + '-I./leveldb', +- '-I./snappy', ++ '-I./leveldb/snappy', + '-I.', +- '-fno-builtin-memcmp', + '-O2', + '-fPIC', + '-DNDEBUG', + '-DSNAPPY', +-] +- +-if system == 'Darwin': +- extra_compile_args = common_flags + [ +- '-DOS_MACOSX', ++ '-Wall', + '-DLEVELDB_PLATFORM_POSIX', +- '-Wno-error=unused-command-line-argument-hard-error-in-future', +- ] +-elif system == 'Linux': +- extra_compile_args = common_flags + [ +- '-pthread', +- '-Wall', +- '-DOS_LINUX', +- '-DLEVELDB_PLATFORM_POSIX', +- ] +-else: +- print >>sys.stderr, "Don't know how to compile leveldb for %s!" % system +- sys.exit(0) ++ '-D_REENTRANT', ++ '-DOS_ANDROID', ++] + + setup( + name = 'leveldb', +@@ -75,52 +56,6 @@ + ext_modules = [ + Extension('leveldb', + sources = [ +- # snappy +- './snappy/snappy.cc', +- './snappy/snappy-stubs-internal.cc', +- './snappy/snappy-sinksource.cc', +- './snappy/snappy-c.cc', +- +- #leveldb +- 'leveldb/db/builder.cc', +- 'leveldb/db/c.cc', +- 'leveldb/db/db_impl.cc', +- 'leveldb/db/db_iter.cc', +- 'leveldb/db/dbformat.cc', +- 'leveldb/db/filename.cc', +- 'leveldb/db/log_reader.cc', +- 'leveldb/db/log_writer.cc', +- 'leveldb/db/memtable.cc', +- 'leveldb/db/repair.cc', +- 'leveldb/db/table_cache.cc', +- 'leveldb/db/version_edit.cc', +- 'leveldb/db/version_set.cc', +- 'leveldb/db/write_batch.cc', +- 'leveldb/table/block.cc', +- 'leveldb/table/block_builder.cc', +- 'leveldb/table/filter_block.cc', +- 'leveldb/table/format.cc', +- 'leveldb/table/iterator.cc', +- 'leveldb/table/merger.cc', +- 'leveldb/table/table.cc', +- 'leveldb/table/table_builder.cc', +- 'leveldb/table/two_level_iterator.cc', +- 'leveldb/util/arena.cc', +- 'leveldb/util/bloom.cc', +- 'leveldb/util/cache.cc', +- 'leveldb/util/coding.cc', +- 'leveldb/util/comparator.cc', +- 'leveldb/util/crc32c.cc', +- 'leveldb/util/env.cc', +- 'leveldb/util/env_posix.cc', +- 'leveldb/util/filter_policy.cc', +- 'leveldb/util/hash.cc', +- 'leveldb/util/histogram.cc', +- 'leveldb/util/logging.cc', +- 'leveldb/util/options.cc', +- 'leveldb/util/status.cc', +- 'leveldb/port/port_posix.cc', +- + # python stuff + 'leveldb_ext.cc', + 'leveldb_object.cc', diff --git a/p4a/pythonforandroidold/recipes/pymunk/__init__.py b/p4a/pythonforandroidold/recipes/pymunk/__init__.py new file mode 100644 index 0000000..b72b85b --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pymunk/__init__.py @@ -0,0 +1,22 @@ +from os.path import join +from pythonforandroid.recipe import CompiledComponentsPythonRecipe + + +class PymunkRecipe(CompiledComponentsPythonRecipe): + name = "pymunk" + version = '5.3.2' + url = 'https://pypi.python.org/packages/source/p/pymunk/pymunk-{version}.zip' + depends = ['cffi', 'setuptools'] + call_hostpython_via_targetpython = False + + def get_recipe_env(self, arch): + env = super(PymunkRecipe, self).get_recipe_env(arch) + env['PYTHON_ROOT'] = self.ctx.get_python_install_dir() + env['LDFLAGS'] += " -shared -llog" + env['LDFLAGS'] += ' -L{}'.format(join(self.ctx.ndk_platform, 'usr', 'lib')) + env['LDFLAGS'] += " --sysroot={}".format(self.ctx.ndk_platform) + env['LIBS'] = env.get('LIBS', '') + ' -landroid' + return env + + +recipe = PymunkRecipe() diff --git a/p4a/pythonforandroidold/recipes/pynacl/__init__.py b/p4a/pythonforandroidold/recipes/pynacl/__init__.py new file mode 100644 index 0000000..eb9ca2d --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pynacl/__init__.py @@ -0,0 +1,29 @@ +from pythonforandroid.recipe import CompiledComponentsPythonRecipe +import os + + +class PyNaCLRecipe(CompiledComponentsPythonRecipe): + name = 'pynacl' + version = '1.3.0' + url = 'https://pypi.python.org/packages/source/P/PyNaCl/PyNaCl-{version}.tar.gz' + + depends = [('hostpython2', 'hostpython3'), 'six', 'setuptools', 'cffi', 'libsodium'] + call_hostpython_via_targetpython = False + + def get_recipe_env(self, arch): + env = super(PyNaCLRecipe, self).get_recipe_env(arch) + env['SODIUM_INSTALL'] = 'system' + + libsodium_build_dir = self.get_recipe( + 'libsodium', self.ctx).get_build_dir(arch.arch) + env['CFLAGS'] += ' -I{}'.format(os.path.join(libsodium_build_dir, + 'src/libsodium/include')) + env['LDFLAGS'] += ' -L{}'.format( + self.ctx.get_libs_dir(arch.arch) + + '-L{}'.format(self.ctx.libs_dir)) + ' -L{}'.format( + libsodium_build_dir) + + return env + + +recipe = PyNaCLRecipe() diff --git a/p4a/pythonforandroidold/recipes/pyogg/__init__.py b/p4a/pythonforandroidold/recipes/pyogg/__init__.py new file mode 100644 index 0000000..70ea435 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pyogg/__init__.py @@ -0,0 +1,14 @@ +from pythonforandroid.recipe import PythonRecipe +from os.path import join + + +class PyOggRecipe(PythonRecipe): + version = '0.6.4a1' + url = 'https://files.pythonhosted.org/packages/source/p/pyogg/PyOgg-{version}.tar.gz' + depends = ['libogg', 'libvorbis', 'setuptools'] + patches = [join('patches', 'fix-find-lib.patch')] + + call_hostpython_via_targetpython = False + + +recipe = PyOggRecipe() diff --git a/p4a/pythonforandroidold/recipes/pyogg/patches/fix-find-lib.patch b/p4a/pythonforandroidold/recipes/pyogg/patches/fix-find-lib.patch new file mode 100644 index 0000000..0db7bfd --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pyogg/patches/fix-find-lib.patch @@ -0,0 +1,13 @@ +diff --git a/pyogg/library_loader.py b/pyogg/library_loader.py +index c2ba36c..383331a 100644 +--- a/pyogg/library_loader.py ++++ b/pyogg/library_loader.py +@@ -54,7 +54,7 @@ def load_other(name, paths = None): + except: + pass + else: +- for path in [os.getcwd(), _here]: ++ for path in [os.path.join(os.environ['ANDROID_PRIVATE'], '..', 'lib')]: + for style in other_styles: + candidate = os.path.join(path, style.format(name)) + if os.path.exists(candidate): diff --git a/p4a/pythonforandroidold/recipes/pyopenal/__init__.py b/p4a/pythonforandroidold/recipes/pyopenal/__init__.py new file mode 100644 index 0000000..c42cd09 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pyopenal/__init__.py @@ -0,0 +1,14 @@ +from pythonforandroid.recipe import PythonRecipe +from os.path import join + + +class PyOpenALRecipe(PythonRecipe): + version = '0.7.3a1' + url = 'https://files.pythonhosted.org/packages/source/p/pyopenal/PyOpenAL-{version}.tar.gz' + depends = ['openal', 'numpy', 'setuptools'] + patches = [join('patches', 'fix-find-lib.patch')] + + call_hostpython_via_targetpython = False + + +recipe = PyOpenALRecipe() diff --git a/p4a/pythonforandroidold/recipes/pyopenal/patches/fix-find-lib.patch b/p4a/pythonforandroidold/recipes/pyopenal/patches/fix-find-lib.patch new file mode 100644 index 0000000..e798bd1 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pyopenal/patches/fix-find-lib.patch @@ -0,0 +1,13 @@ +diff --git a/openal/library_loader.py b/openal/library_loader.py +index be2485c..e8c6cd2 100644 +--- a/openal/library_loader.py ++++ b/openal/library_loader.py +@@ -56,7 +56,7 @@ class ExternalLibrary: + except: + pass + else: +- for path in [os.getcwd(), _here]: ++ for path in [os.path.join(os.environ['ANDROID_PRIVATE'], '..', 'lib')]: + for style in ExternalLibrary.other_styles: + candidate = os.path.join(path, style.format(name)) + if os.path.exists(candidate) and os.path.isfile(candidate): diff --git a/p4a/pythonforandroidold/recipes/pyopenssl/__init__.py b/p4a/pythonforandroidold/recipes/pyopenssl/__init__.py new file mode 100644 index 0000000..092a310 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pyopenssl/__init__.py @@ -0,0 +1,14 @@ + +from pythonforandroid.recipe import PythonRecipe + + +class PyOpenSSLRecipe(PythonRecipe): + version = '19.0.0' + url = 'https://pypi.python.org/packages/source/p/pyOpenSSL/pyOpenSSL-{version}.tar.gz' + depends = ['openssl', 'setuptools'] + site_packages_name = 'OpenSSL' + + call_hostpython_via_targetpython = False + + +recipe = PyOpenSSLRecipe() diff --git a/p4a/pythonforandroidold/recipes/pyopenssl/fix-dlfcn.patch b/p4a/pythonforandroidold/recipes/pyopenssl/fix-dlfcn.patch new file mode 100644 index 0000000..a19ba20 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pyopenssl/fix-dlfcn.patch @@ -0,0 +1,22 @@ +--- pyOpenSSL-0.13.orig/OpenSSL/__init__.py 2011-09-02 17:46:13.000000000 +0200 ++++ pyOpenSSL-0.13/OpenSSL/__init__.py 2013-07-29 17:20:15.750079894 +0200 +@@ -12,6 +12,11 @@ + except AttributeError: + from OpenSSL import crypto + else: ++ # XXX android fix ++ # linux: RTLD_NOW (0x2) | RTLD_GLOBAL (0x100 / 256) ++ # android: RTLD_NOW (0x0) | RTLD_GLOBAL (0x2) ++ flags = 0x2 ++ ''' + try: + import DLFCN + except ImportError: +@@ -31,6 +36,7 @@ + else: + flags = DLFCN.RTLD_NOW | DLFCN.RTLD_GLOBAL + del DLFCN ++ ''' + + sys.setdlopenflags(flags) + from OpenSSL import crypto diff --git a/p4a/pythonforandroidold/recipes/pyproj/__init__.py b/p4a/pythonforandroidold/recipes/pyproj/__init__.py new file mode 100644 index 0000000..71b272d --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pyproj/__init__.py @@ -0,0 +1,11 @@ +from pythonforandroid.recipe import CythonRecipe + + +class PyProjRecipe(CythonRecipe): + version = '1.9.5.1' + url = 'https://github.com/jswhit/pyproj/archive/master.zip' + depends = ['setuptools'] + call_hostpython_via_targetpython = False + + +recipe = PyProjRecipe() diff --git a/p4a/pythonforandroidold/recipes/pyrxp/__init__.py b/p4a/pythonforandroidold/recipes/pyrxp/__init__.py new file mode 100644 index 0000000..09b1804 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pyrxp/__init__.py @@ -0,0 +1,11 @@ +from pythonforandroid.recipe import CompiledComponentsPythonRecipe + + +class PyRXPURecipe(CompiledComponentsPythonRecipe): + version = '2a02cecc87b9' + url = 'https://bitbucket.org/rptlab/pyrxp/get/{version}.tar.gz' + depends = [] + patches = [] + + +recipe = PyRXPURecipe() diff --git a/p4a/pythonforandroidold/recipes/pysdl2/__init__.py b/p4a/pythonforandroidold/recipes/pysdl2/__init__.py new file mode 100644 index 0000000..e0df9dc --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pysdl2/__init__.py @@ -0,0 +1,12 @@ + +from pythonforandroid.recipe import PythonRecipe + + +class PySDL2Recipe(PythonRecipe): + version = '0.9.3' + url = 'https://bitbucket.org/marcusva/py-sdl2/downloads/PySDL2-{version}.tar.gz' + + depends = ['sdl2'] + + +recipe = PySDL2Recipe() diff --git a/p4a/pythonforandroidold/recipes/pysha3/__init__.py b/p4a/pythonforandroidold/recipes/pysha3/__init__.py new file mode 100644 index 0000000..35cfff8 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pysha3/__init__.py @@ -0,0 +1,30 @@ +import os +from pythonforandroid.recipe import PythonRecipe + + +# TODO: CompiledComponentsPythonRecipe +class Pysha3Recipe(PythonRecipe): + version = '1.0.2' + url = 'https://github.com/tiran/pysha3/archive/{version}.tar.gz' + depends = ['setuptools'] + call_hostpython_via_targetpython = False + + def get_recipe_env(self, arch=None, with_flags_in_cc=True): + env = super(Pysha3Recipe, self).get_recipe_env(arch, with_flags_in_cc) + # CFLAGS may only be used to specify C compiler flags, for macro definitions use CPPFLAGS + env['CPPFLAGS'] = env['CFLAGS'] + if self.ctx.ndk == 'crystax': + env['CPPFLAGS'] += ' -I{}/sources/python/{}/include/python/'.format( + self.ctx.ndk_dir, self.ctx.python_recipe.version[0:3]) + env['CFLAGS'] = '' + # LDFLAGS may only be used to specify linker flags, for libraries use LIBS + env['LDFLAGS'] = env['LDFLAGS'].replace('-lm', '').replace('-lcrystax', '') + env['LDFLAGS'] += ' -L{}'.format(os.path.join(self.ctx.bootstrap.build_dir, 'libs', arch.arch)) + env['LIBS'] = ' -lm' + if self.ctx.ndk == 'crystax': + env['LIBS'] += ' -lcrystax -lpython{}m'.format(self.ctx.python_recipe.version[0:3]) + env['LDSHARED'] += env['LIBS'] + return env + + +recipe = Pysha3Recipe() diff --git a/p4a/pythonforandroid/recipes/python2/__init__.py b/p4a/pythonforandroidold/recipes/python2/__init__.py similarity index 100% rename from p4a/pythonforandroid/recipes/python2/__init__.py rename to p4a/pythonforandroidold/recipes/python2/__init__.py diff --git a/p4a/pythonforandroid/recipes/python2/patches/enable-openssl.patch b/p4a/pythonforandroidold/recipes/python2/patches/enable-openssl.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2/patches/enable-openssl.patch rename to p4a/pythonforandroidold/recipes/python2/patches/enable-openssl.patch diff --git a/p4a/pythonforandroid/recipes/python2/patches/fix-api-minor-than-21.patch b/p4a/pythonforandroidold/recipes/python2/patches/fix-api-minor-than-21.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2/patches/fix-api-minor-than-21.patch rename to p4a/pythonforandroidold/recipes/python2/patches/fix-api-minor-than-21.patch diff --git a/p4a/pythonforandroid/recipes/python2/patches/fix-filesystem-default-encoding.patch b/p4a/pythonforandroidold/recipes/python2/patches/fix-filesystem-default-encoding.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2/patches/fix-filesystem-default-encoding.patch rename to p4a/pythonforandroidold/recipes/python2/patches/fix-filesystem-default-encoding.patch diff --git a/p4a/pythonforandroid/recipes/python2/patches/fix-gethostbyaddr.patch b/p4a/pythonforandroidold/recipes/python2/patches/fix-gethostbyaddr.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2/patches/fix-gethostbyaddr.patch rename to p4a/pythonforandroidold/recipes/python2/patches/fix-gethostbyaddr.patch diff --git a/p4a/pythonforandroid/recipes/python2/patches/fix-missing-extensions.patch b/p4a/pythonforandroidold/recipes/python2/patches/fix-missing-extensions.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2/patches/fix-missing-extensions.patch rename to p4a/pythonforandroidold/recipes/python2/patches/fix-missing-extensions.patch diff --git a/p4a/pythonforandroid/recipes/python2/patches/fix-posix-declarations.patch b/p4a/pythonforandroidold/recipes/python2/patches/fix-posix-declarations.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2/patches/fix-posix-declarations.patch rename to p4a/pythonforandroidold/recipes/python2/patches/fix-posix-declarations.patch diff --git a/p4a/pythonforandroid/recipes/python2/patches/fix-pwd-gecos.patch b/p4a/pythonforandroidold/recipes/python2/patches/fix-pwd-gecos.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2/patches/fix-pwd-gecos.patch rename to p4a/pythonforandroidold/recipes/python2/patches/fix-pwd-gecos.patch diff --git a/p4a/pythonforandroid/recipes/python2legacy/Setup.local-ssl b/p4a/pythonforandroidold/recipes/python2legacy/Setup.local-ssl similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/Setup.local-ssl rename to p4a/pythonforandroidold/recipes/python2legacy/Setup.local-ssl diff --git a/p4a/pythonforandroid/recipes/python2legacy/__init__.py b/p4a/pythonforandroidold/recipes/python2legacy/__init__.py similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/__init__.py rename to p4a/pythonforandroidold/recipes/python2legacy/__init__.py diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/Python-2.7.2-ctypes-disable-wchar.patch b/p4a/pythonforandroidold/recipes/python2legacy/patches/Python-2.7.2-ctypes-disable-wchar.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/Python-2.7.2-ctypes-disable-wchar.patch rename to p4a/pythonforandroidold/recipes/python2legacy/patches/Python-2.7.2-ctypes-disable-wchar.patch diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/Python-2.7.2-xcompile.patch b/p4a/pythonforandroidold/recipes/python2legacy/patches/Python-2.7.2-xcompile.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/Python-2.7.2-xcompile.patch rename to p4a/pythonforandroidold/recipes/python2legacy/patches/Python-2.7.2-xcompile.patch diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/Python-2.7.2-xcompile.patch-backup b/p4a/pythonforandroidold/recipes/python2legacy/patches/Python-2.7.2-xcompile.patch-backup similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/Python-2.7.2-xcompile.patch-backup rename to p4a/pythonforandroidold/recipes/python2legacy/patches/Python-2.7.2-xcompile.patch-backup diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/Python-2.7.2-xcompile.patch-new b/p4a/pythonforandroidold/recipes/python2legacy/patches/Python-2.7.2-xcompile.patch-new similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/Python-2.7.2-xcompile.patch-new rename to p4a/pythonforandroidold/recipes/python2legacy/patches/Python-2.7.2-xcompile.patch-new diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/_scproxy.py b/p4a/pythonforandroidold/recipes/python2legacy/patches/_scproxy.py similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/_scproxy.py rename to p4a/pythonforandroidold/recipes/python2legacy/patches/_scproxy.py diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/ctypes-find-library-updated.patch b/p4a/pythonforandroidold/recipes/python2legacy/patches/ctypes-find-library-updated.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/ctypes-find-library-updated.patch rename to p4a/pythonforandroidold/recipes/python2legacy/patches/ctypes-find-library-updated.patch diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/ctypes-find-library.patch b/p4a/pythonforandroidold/recipes/python2legacy/patches/ctypes-find-library.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/ctypes-find-library.patch rename to p4a/pythonforandroidold/recipes/python2legacy/patches/ctypes-find-library.patch diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/custom-loader.patch b/p4a/pythonforandroidold/recipes/python2legacy/patches/custom-loader.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/custom-loader.patch rename to p4a/pythonforandroidold/recipes/python2legacy/patches/custom-loader.patch diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/disable-modules.patch b/p4a/pythonforandroidold/recipes/python2legacy/patches/disable-modules.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/disable-modules.patch rename to p4a/pythonforandroidold/recipes/python2legacy/patches/disable-modules.patch diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/disable-openpty.patch b/p4a/pythonforandroidold/recipes/python2legacy/patches/disable-openpty.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/disable-openpty.patch rename to p4a/pythonforandroidold/recipes/python2legacy/patches/disable-openpty.patch diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/enable-openssl.patch b/p4a/pythonforandroidold/recipes/python2legacy/patches/enable-openssl.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/enable-openssl.patch rename to p4a/pythonforandroidold/recipes/python2legacy/patches/enable-openssl.patch diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/enable-ssl.patch b/p4a/pythonforandroidold/recipes/python2legacy/patches/enable-ssl.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/enable-ssl.patch rename to p4a/pythonforandroidold/recipes/python2legacy/patches/enable-ssl.patch diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/fix-configure-darwin.patch b/p4a/pythonforandroidold/recipes/python2legacy/patches/fix-configure-darwin.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/fix-configure-darwin.patch rename to p4a/pythonforandroidold/recipes/python2legacy/patches/fix-configure-darwin.patch diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/fix-distutils-darwin.patch b/p4a/pythonforandroidold/recipes/python2legacy/patches/fix-distutils-darwin.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/fix-distutils-darwin.patch rename to p4a/pythonforandroidold/recipes/python2legacy/patches/fix-distutils-darwin.patch diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/fix-dlfcn.patch b/p4a/pythonforandroidold/recipes/python2legacy/patches/fix-dlfcn.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/fix-dlfcn.patch rename to p4a/pythonforandroidold/recipes/python2legacy/patches/fix-dlfcn.patch diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/fix-dynamic-lookup.patch b/p4a/pythonforandroidold/recipes/python2legacy/patches/fix-dynamic-lookup.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/fix-dynamic-lookup.patch rename to p4a/pythonforandroidold/recipes/python2legacy/patches/fix-dynamic-lookup.patch diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/fix-filesystemdefaultencoding.patch b/p4a/pythonforandroidold/recipes/python2legacy/patches/fix-filesystemdefaultencoding.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/fix-filesystemdefaultencoding.patch rename to p4a/pythonforandroidold/recipes/python2legacy/patches/fix-filesystemdefaultencoding.patch diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/fix-ftime-removal.patch b/p4a/pythonforandroidold/recipes/python2legacy/patches/fix-ftime-removal.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/fix-ftime-removal.patch rename to p4a/pythonforandroidold/recipes/python2legacy/patches/fix-ftime-removal.patch diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/fix-gethostbyaddr.patch b/p4a/pythonforandroidold/recipes/python2legacy/patches/fix-gethostbyaddr.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/fix-gethostbyaddr.patch rename to p4a/pythonforandroidold/recipes/python2legacy/patches/fix-gethostbyaddr.patch diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/fix-locale.patch b/p4a/pythonforandroidold/recipes/python2legacy/patches/fix-locale.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/fix-locale.patch rename to p4a/pythonforandroidold/recipes/python2legacy/patches/fix-locale.patch diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/fix-remove-corefoundation.patch b/p4a/pythonforandroidold/recipes/python2legacy/patches/fix-remove-corefoundation.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/fix-remove-corefoundation.patch rename to p4a/pythonforandroidold/recipes/python2legacy/patches/fix-remove-corefoundation.patch diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/fix-setup-flags.patch b/p4a/pythonforandroidold/recipes/python2legacy/patches/fix-setup-flags.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/fix-setup-flags.patch rename to p4a/pythonforandroidold/recipes/python2legacy/patches/fix-setup-flags.patch diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/fix-termios.patch b/p4a/pythonforandroidold/recipes/python2legacy/patches/fix-termios.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/fix-termios.patch rename to p4a/pythonforandroidold/recipes/python2legacy/patches/fix-termios.patch diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/parsetuple.patch b/p4a/pythonforandroidold/recipes/python2legacy/patches/parsetuple.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/parsetuple.patch rename to p4a/pythonforandroidold/recipes/python2legacy/patches/parsetuple.patch diff --git a/p4a/pythonforandroid/recipes/python2legacy/patches/verbose-compilation.patch b/p4a/pythonforandroidold/recipes/python2legacy/patches/verbose-compilation.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python2legacy/patches/verbose-compilation.patch rename to p4a/pythonforandroidold/recipes/python2legacy/patches/verbose-compilation.patch diff --git a/p4a/pythonforandroidold/recipes/python3/__init__.py b/p4a/pythonforandroidold/recipes/python3/__init__.py new file mode 100644 index 0000000..c6d5ba5 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/python3/__init__.py @@ -0,0 +1,54 @@ +import sh +from pythonforandroid.python import GuestPythonRecipe +from pythonforandroid.recipe import Recipe + + +class Python3Recipe(GuestPythonRecipe): + ''' + The python3's recipe. + + .. note:: This recipe can be built only against API 21+. Also, in order to + build certain python modules, we need to add some extra recipes to our + build requirements: + + - ctypes: you must add the recipe for ``libffi``. + + .. versionchanged:: 0.6.0 + Refactored into class + :class:`~pythonforandroid.python.GuestPythonRecipe` + ''' + + version = '3.7.1' + url = 'https://www.python.org/ftp/python/{version}/Python-{version}.tgz' + name = 'python3' + + patches = ["patches/fix-ctypes-util-find-library.patch"] + + if sh.which('lld') is not None: + patches = patches + ["patches/remove-fix-cortex-a8.patch"] + + depends = ['hostpython3', 'sqlite3', 'openssl', 'libffi'] + conflicts = ['python3crystax', 'python2', 'python2legacy'] + + configure_args = ( + '--host={android_host}', + '--build={android_build}', + '--enable-shared', + '--disable-ipv6', + 'ac_cv_file__dev_ptmx=yes', + 'ac_cv_file__dev_ptc=no', + '--without-ensurepip', + 'ac_cv_little_endian_double=yes', + '--prefix={prefix}', + '--exec-prefix={exec_prefix}') + + def set_libs_flags(self, env, arch): + env = super(Python3Recipe, self).set_libs_flags(env, arch) + if 'openssl' in self.ctx.recipe_build_order: + recipe = Recipe.get_recipe('openssl', self.ctx) + self.configure_args += \ + ('--with-openssl=' + recipe.get_build_dir(arch.arch),) + return env + + +recipe = Python3Recipe() diff --git a/p4a/pythonforandroid/recipes/python3/patches/fix-ctypes-util-find-library.patch b/p4a/pythonforandroidold/recipes/python3/patches/fix-ctypes-util-find-library.patch similarity index 100% rename from p4a/pythonforandroid/recipes/python3/patches/fix-ctypes-util-find-library.patch rename to p4a/pythonforandroidold/recipes/python3/patches/fix-ctypes-util-find-library.patch diff --git a/p4a/pythonforandroidold/recipes/python3/patches/remove-fix-cortex-a8.patch b/p4a/pythonforandroidold/recipes/python3/patches/remove-fix-cortex-a8.patch new file mode 100644 index 0000000..5ddc3c4 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/python3/patches/remove-fix-cortex-a8.patch @@ -0,0 +1,14 @@ +This patch removes --fix-cortex-a8 from the linker flags in order to support linking +with lld, as lld does not support this flag (https://github.com/android-ndk/ndk/issues/766). +diff --git a/configure b/configure +--- a/configure ++++ b/configure +@@ -5671,7 +5671,7 @@ $as_echo_n "checking for the Android arm ABI... " >&6; } + $as_echo "$_arm_arch" >&6; } + if test "$_arm_arch" = 7; then + BASECFLAGS="${BASECFLAGS} -mfloat-abi=softfp -mfpu=vfpv3-d16" +- LDFLAGS="${LDFLAGS} -march=armv7-a -Wl,--fix-cortex-a8" ++ LDFLAGS="${LDFLAGS} -march=armv7-a" + fi + else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: not Android" >&5 \ No newline at end of file diff --git a/p4a/pythonforandroid/recipes/python3crystax/__init__.py b/p4a/pythonforandroidold/recipes/python3crystax/__init__.py similarity index 100% rename from p4a/pythonforandroid/recipes/python3crystax/__init__.py rename to p4a/pythonforandroidold/recipes/python3crystax/__init__.py diff --git a/p4a/pythonforandroidold/recipes/pytz/__init__.py b/p4a/pythonforandroidold/recipes/pytz/__init__.py new file mode 100644 index 0000000..12133bc --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pytz/__init__.py @@ -0,0 +1,15 @@ +from pythonforandroid.recipe import PythonRecipe + + +class PytzRecipe(PythonRecipe): + name = 'pytz' + version = '2015.7' + url = 'https://pypi.python.org/packages/source/p/pytz/pytz-{version}.tar.bz2' + + depends = [] + + call_hostpython_via_targetpython = False + install_in_hostpython = True + + +recipe = PytzRecipe() diff --git a/p4a/pythonforandroidold/recipes/pyusb/__init__.py b/p4a/pythonforandroidold/recipes/pyusb/__init__.py new file mode 100644 index 0000000..0a0fbc7 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pyusb/__init__.py @@ -0,0 +1,14 @@ +from pythonforandroid.recipe import PythonRecipe + + +class PyusbRecipe(PythonRecipe): + name = 'pyusb' + version = '1.0.0b1' + url = 'https://pypi.python.org/packages/source/p/pyusb/pyusb-{version}.tar.gz' + depends = [] + site_packages_name = 'usb' + + patches = ['fix-android.patch'] + + +recipe = PyusbRecipe() diff --git a/p4a/pythonforandroidold/recipes/pyusb/fix-android.patch b/p4a/pythonforandroidold/recipes/pyusb/fix-android.patch new file mode 100644 index 0000000..a384584 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pyusb/fix-android.patch @@ -0,0 +1,40 @@ +--- pyusb-1.0.0b1.orig/usb/backend/libusb1.py 2013-10-21 12:56:10.000000000 -0500 ++++ pyusb-1.0.0b1/usb/backend/libusb1.py 2014-12-08 16:49:07.141514148 -0600 +@@ -265,13 +265,7 @@ + + def _load_library(): + if sys.platform != 'cygwin': +- candidates = ('usb-1.0', 'libusb-1.0', 'usb') +- for candidate in candidates: +- if sys.platform == 'win32': +- candidate = candidate + '.dll' +- +- libname = ctypes.util.find_library(candidate) +- if libname is not None: break ++ libname = '/system/lib/libusb1.0.so' + else: + # corner cases + # cygwin predefines library names with 'cyg' instead of 'lib' +@@ -672,16 +666,21 @@ + + # implementation of libusb 1.0 backend + class _LibUSB(usb.backend.IBackend): ++ ++ ran_init = False ++ + @methodtrace(_logger) + def __init__(self, lib): + usb.backend.IBackend.__init__(self) + self.lib = lib + self.ctx = c_void_p() + _check(self.lib.libusb_init(byref(self.ctx))) ++ self.ran_init = True + + @methodtrace(_logger) + def __del__(self): +- self.lib.libusb_exit(self.ctx) ++ if self.ran_init is True: ++ self.lib.libusb_exit(self.ctx) + + + @methodtrace(_logger) diff --git a/p4a/pythonforandroid/recipes/pyyaml/__init__.py b/p4a/pythonforandroidold/recipes/pyyaml/__init__.py similarity index 100% rename from p4a/pythonforandroid/recipes/pyyaml/__init__.py rename to p4a/pythonforandroidold/recipes/pyyaml/__init__.py diff --git a/p4a/pythonforandroidold/recipes/pyzbar/__init__.py b/p4a/pythonforandroidold/recipes/pyzbar/__init__.py new file mode 100644 index 0000000..ccfcd9b --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pyzbar/__init__.py @@ -0,0 +1,26 @@ +from os.path import join +from pythonforandroid.recipe import PythonRecipe + + +class PyZBarRecipe(PythonRecipe): + + version = '0.1.7' + + url = 'https://github.com/NaturalHistoryMuseum/pyzbar/archive/v{version}.tar.gz' # noqa + + call_hostpython_via_targetpython = False + + depends = ['setuptools', 'libzbar'] + + def get_recipe_env(self, arch=None, with_flags_in_cc=True): + env = super(PyZBarRecipe, self).get_recipe_env(arch, with_flags_in_cc) + libzbar = self.get_recipe('libzbar', self.ctx) + libzbar_dir = libzbar.get_build_dir(arch.arch) + env['PYTHON_ROOT'] = self.ctx.get_python_install_dir() + env['CFLAGS'] += ' -I' + join(libzbar_dir, 'include') + env['LDFLAGS'] += ' -L' + join(libzbar_dir, 'zbar', '.libs') + env['LIBS'] = env.get('LIBS', '') + ' -landroid -lzbar' + return env + + +recipe = PyZBarRecipe() diff --git a/p4a/pythonforandroidold/recipes/pyzmq/__init__.py b/p4a/pythonforandroidold/recipes/pyzmq/__init__.py new file mode 100644 index 0000000..5f9614d --- /dev/null +++ b/p4a/pythonforandroidold/recipes/pyzmq/__init__.py @@ -0,0 +1,59 @@ +# coding=utf-8 + +from pythonforandroid.recipe import CythonRecipe, Recipe +from os.path import join +from pythonforandroid.util import current_directory +import sh +from pythonforandroid.logger import shprint +import glob + + +class PyZMQRecipe(CythonRecipe): + name = 'pyzmq' + version = 'master' + url = 'https://github.com/zeromq/pyzmq/archive/{version}.zip' + site_packages_name = 'zmq' + depends = ['libzmq'] + cython_args = ['-Izmq/utils', + '-Izmq/backend/cython', + '-Izmq/devices'] + + def get_recipe_env(self, arch=None): + env = super(PyZMQRecipe, self).get_recipe_env(arch) + # TODO: fix hardcoded path + # This is required to prevent issue with _io.so import. + # hostpython = self.get_recipe('hostpython2', self.ctx) + # env['PYTHONPATH'] = ( + # join(hostpython.get_build_dir(arch.arch), 'build', + # 'lib.linux-x86_64-2.7') + ':' + env.get('PYTHONPATH', '') + # ) + # env["LDSHARED"] = env["CC"] + ' -shared' + return env + + def build_cython_components(self, arch): + libzmq_recipe = Recipe.get_recipe('libzmq', self.ctx) + libzmq_prefix = join(libzmq_recipe.get_build_dir(arch.arch), "install") + self.setup_extra_args = ["--zmq={}".format(libzmq_prefix)] + self.build_cmd = "configure" + + env = self.get_recipe_env(arch) + setup_cfg = join(self.get_build_dir(arch.arch), "setup.cfg") + with open(setup_cfg, "wb") as fd: + fd.write(""" +[global] +zmq_prefix = {} +skip_check_zmq = True +""".format(libzmq_prefix)) + + return super(PyZMQRecipe, self).build_cython_components(arch) + + with current_directory(self.get_build_dir(arch.arch)): + hostpython = sh.Command(self.hostpython_location) + shprint(hostpython, 'setup.py', 'configure', '-v', _env=env) + shprint(hostpython, 'setup.py', 'build_ext', '-v', _env=env) + build_dir = glob.glob('build/lib.*')[0] + shprint(sh.find, build_dir, '-name', '"*.o"', '-exec', + env['STRIP'], '{}', ';', _env=env) + + +recipe = PyZMQRecipe() diff --git a/p4a/pythonforandroidold/recipes/regex/__init__.py b/p4a/pythonforandroidold/recipes/regex/__init__.py new file mode 100644 index 0000000..9533905 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/regex/__init__.py @@ -0,0 +1,12 @@ +from pythonforandroid.recipe import CompiledComponentsPythonRecipe + + +class RegexRecipe(CompiledComponentsPythonRecipe): + name = 'regex' + version = '2017.07.28' + url = 'https://pypi.python.org/packages/d1/23/5fa829706ee1d4452552eb32e0bfc1039553e01f50a8754c6f7152e85c1b/regex-{version}.tar.gz' + + depends = ['setuptools'] + + +recipe = RegexRecipe() diff --git a/p4a/pythonforandroidold/recipes/reportlab/__init__.py b/p4a/pythonforandroidold/recipes/reportlab/__init__.py new file mode 100644 index 0000000..d5e8001 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/reportlab/__init__.py @@ -0,0 +1,55 @@ +import os +import sh +from pythonforandroid.recipe import CompiledComponentsPythonRecipe +from pythonforandroid.util import (current_directory, ensure_dir) +from pythonforandroid.logger import (info, shprint) + + +class ReportLabRecipe(CompiledComponentsPythonRecipe): + version = 'c088826211ca' + url = 'https://bitbucket.org/rptlab/reportlab/get/{version}.tar.gz' + depends = ['freetype'] + call_hostpython_via_targetpython = False + + def prebuild_arch(self, arch): + if not self.is_patched(arch): + super(ReportLabRecipe, self).prebuild_arch(arch) + recipe_dir = self.get_build_dir(arch.arch) + + # Some versions of reportlab ship with a GPL-licensed font. + # Remove it, since this is problematic in .apks unless the + # entire app is GPL: + font_dir = os.path.join(recipe_dir, + "src", "reportlab", "fonts") + if os.path.exists(font_dir): + for l in os.listdir(font_dir): + if l.lower().startswith('darkgarden'): + os.remove(os.path.join(font_dir, l)) + + # Apply patches: + self.apply_patch('patches/fix-setup.patch', arch.arch) + shprint(sh.touch, os.path.join(recipe_dir, '.patched')) + ft = self.get_recipe('freetype', self.ctx) + ft_dir = ft.get_build_dir(arch.arch) + ft_lib_dir = os.environ.get('_FT_LIB_', os.path.join(ft_dir, 'objs', '.libs')) + ft_inc_dir = os.environ.get('_FT_INC_', os.path.join(ft_dir, 'include')) + tmp_dir = os.path.normpath(os.path.join(recipe_dir, "..", "..", "tmp")) + info('reportlab recipe: recipe_dir={}'.format(recipe_dir)) + info('reportlab recipe: tmp_dir={}'.format(tmp_dir)) + info('reportlab recipe: ft_dir={}'.format(ft_dir)) + info('reportlab recipe: ft_lib_dir={}'.format(ft_lib_dir)) + info('reportlab recipe: ft_inc_dir={}'.format(ft_inc_dir)) + with current_directory(recipe_dir): + ensure_dir(tmp_dir) + pfbfile = os.path.join(tmp_dir, "pfbfer-20070710.zip") + if not os.path.isfile(pfbfile): + sh.wget("http://www.reportlab.com/ftp/pfbfer-20070710.zip", "-O", pfbfile) + sh.unzip("-u", "-d", os.path.join(recipe_dir, "src", "reportlab", "fonts"), pfbfile) + if os.path.isfile("setup.py"): + with open('setup.py', 'r') as f: + text = f.read().replace('_FT_LIB_', ft_lib_dir).replace('_FT_INC_', ft_inc_dir) + with open('setup.py', 'w') as f: + f.write(text) + + +recipe = ReportLabRecipe() diff --git a/p4a/pythonforandroidold/recipes/reportlab/patches/fix-setup.patch b/p4a/pythonforandroidold/recipes/reportlab/patches/fix-setup.patch new file mode 100644 index 0000000..eae3c1e --- /dev/null +++ b/p4a/pythonforandroidold/recipes/reportlab/patches/fix-setup.patch @@ -0,0 +1,89 @@ +diff -r 9ecdf084933c setup.py +--- a/setup.py Wed May 13 14:09:03 2015 +0100 ++++ b/setup.py Fri May 22 10:14:29 2015 +0100 +@@ -14,8 +14,8 @@ + #no-download-t1-files=yes + #ignore-system-libart=yes + # if used on command line the config values are not used +-dlt1 = not specialOption('--no-download-t1-files') +-isla = specialOption('--ignore-system-libart') ++dlt1 = False ++isla = True + + try: + import configparser +@@ -121,39 +121,6 @@ + else: + P.insert(x, d) + +-class inc_lib_dirs: +- L = None +- I = None +- def __call__(self): +- if self.L is None: +- L = [] +- I = [] +- if platform == "cygwin": +- aDir(L, os.path.join("/usr/lib", "python%s" % sys.version[:3], "config")) +- elif platform == "darwin": +- # attempt to make sure we pick freetype2 over other versions +- aDir(I, "/sw/include/freetype2") +- aDir(I, "/sw/lib/freetype2/include") +- # fink installation directories +- aDir(L, "/sw/lib") +- aDir(I, "/sw/include") +- # darwin ports installation directories +- aDir(L, "/opt/local/lib") +- aDir(I, "/opt/local/include") +- aDir(I, "/usr/local/include") +- aDir(L, "/usr/local/lib") +- aDir(I, "/usr/include") +- aDir(L, "/usr/lib") +- aDir(I, "/usr/include/freetype2") +- prefix = sysconfig.get_config_var("prefix") +- if prefix: +- aDir(L, pjoin(prefix, "lib")) +- aDir(I, pjoin(prefix, "include")) +- self.L=L +- self.I=I +- return self.I,self.L +-inc_lib_dirs=inc_lib_dirs() +- + def getVersionFromCCode(fn): + import re + tag = re.search(r'^#define\s+VERSION\s+"([^"]*)"',open(fn,'r').read(),re.M) +@@ -244,11 +211,7 @@ + ] + + def get_fonts(PACKAGE_DIR, reportlab_files): +- import sys, os, os.path, zipfile, io +- if isPy3: +- import urllib.request as ureq +- else: +- import urllib2 as ureq ++ import os, os.path + rl_dir = PACKAGE_DIR['reportlab'] + if not [x for x in reportlab_files if not os.path.isfile(pjoin(rl_dir,x))]: + infoline("Standard T1 font curves already downloaded") +@@ -257,6 +220,11 @@ + infoline('not downloading T1 font curve files') + return + try: ++ if isPy3: ++ import urllib.request as ureq ++ else: ++ import urllib2 as ureq ++ import zipfile, io + infoline("Downloading standard T1 font curves") + + remotehandle = ureq.urlopen("http://www.reportlab.com/ftp/pfbfer-20070710.zip") +@@ -448,7 +416,8 @@ + FT_LIB_DIR=[FT_LIB_DIR] if FT_LIB_DIR else [] + FT_INC_DIR=config('FREETYPE_PATHS','inc') + FT_INC_DIR=[FT_INC_DIR] if FT_INC_DIR else [] +- I,L=inc_lib_dirs() ++ I=["_FT_INC_"] ++ L=["_FT_LIB_"] + ftv = None + for d in I: + if isfile(pjoin(d, "ft2build.h")): diff --git a/p4a/pythonforandroid/recipes/requests/__init__.py b/p4a/pythonforandroidold/recipes/requests/__init__.py similarity index 100% rename from p4a/pythonforandroid/recipes/requests/__init__.py rename to p4a/pythonforandroidold/recipes/requests/__init__.py diff --git a/p4a/pythonforandroidold/recipes/ruamel.yaml/__init__.py b/p4a/pythonforandroidold/recipes/ruamel.yaml/__init__.py new file mode 100644 index 0000000..5965afa --- /dev/null +++ b/p4a/pythonforandroidold/recipes/ruamel.yaml/__init__.py @@ -0,0 +1,13 @@ +from pythonforandroid.recipe import PythonRecipe + + +class RuamelYamlRecipe(PythonRecipe): + version = '0.15.77' + url = 'https://pypi.python.org/packages/source/r/ruamel.yaml/ruamel.yaml-{version}.tar.gz' + depends = ['setuptools'] + site_packages_name = 'ruamel' + call_hostpython_via_targetpython = False + patches = ['disable-pip-req.patch'] + + +recipe = RuamelYamlRecipe() diff --git a/p4a/pythonforandroidold/recipes/ruamel.yaml/disable-pip-req.patch b/p4a/pythonforandroidold/recipes/ruamel.yaml/disable-pip-req.patch new file mode 100644 index 0000000..b033774 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/ruamel.yaml/disable-pip-req.patch @@ -0,0 +1,11 @@ +--- setup.py 2018-11-11 18:27:31.936424140 +0100 ++++ b/setup.py 2018-11-11 18:28:19.873507071 +0100 +@@ -396,7 +396,7 @@ + sys.exit(0) + if not os.environ.get('RUAMEL_NO_PIP_INSTALL_CHECK', False): + print('error: you have to install with "pip install ."') +- sys.exit(1) ++ # sys.exit(1) + # If you only support an extension module on Linux, Windows thinks it + # is pure. That way you would get pure python .whl files that take + # precedence for downloading on Linux over source with compilable C code diff --git a/p4a/pythonforandroidold/recipes/scrypt/__init__.py b/p4a/pythonforandroidold/recipes/scrypt/__init__.py new file mode 100644 index 0000000..26b8048 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/scrypt/__init__.py @@ -0,0 +1,26 @@ +from pythonforandroid.recipe import CythonRecipe + + +class ScryptRecipe(CythonRecipe): + + version = '0.8.6' + url = 'https://bitbucket.org/mhallin/py-scrypt/get/v{version}.zip' + depends = ['setuptools', 'openssl'] + call_hostpython_via_targetpython = False + patches = ["remove_librt.patch"] + + def get_recipe_env(self, arch, with_flags_in_cc=True): + """ + Adds openssl recipe to include and library path. + """ + env = super(ScryptRecipe, self).get_recipe_env(arch, with_flags_in_cc) + openssl_recipe = self.get_recipe('openssl', self.ctx) + env['CFLAGS'] += openssl_recipe.include_flags(arch) + env['LDFLAGS'] += ' -L{}'.format(self.ctx.get_libs_dir(arch.arch)) + env['LDFLAGS'] += ' -L{}'.format(self.ctx.libs_dir) + env['LDFLAGS'] += openssl_recipe.link_dirs_flags(arch) + env['LIBS'] = env.get('LIBS', '') + openssl_recipe.link_libs_flags() + return env + + +recipe = ScryptRecipe() diff --git a/p4a/pythonforandroidold/recipes/scrypt/remove_librt.patch b/p4a/pythonforandroidold/recipes/scrypt/remove_librt.patch new file mode 100644 index 0000000..270bab2 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/scrypt/remove_librt.patch @@ -0,0 +1,20 @@ +--- a/setup.py 2018-05-06 23:25:08.757522119 +0200 ++++ b/setup.py 2018-05-06 23:25:30.269797365 +0200 +@@ -15,7 +15,6 @@ + + if sys.platform.startswith('linux'): + define_macros = [('HAVE_CLOCK_GETTIME', '1'), +- ('HAVE_LIBRT', '1'), + ('HAVE_POSIX_MEMALIGN', '1'), + ('HAVE_STRUCT_SYSINFO', '1'), + ('HAVE_STRUCT_SYSINFO_MEM_UNIT', '1'), +@@ -23,8 +22,7 @@ + ('HAVE_SYSINFO', '1'), + ('HAVE_SYS_SYSINFO_H', '1'), + ('_FILE_OFFSET_BITS', '64')] +- libraries = ['crypto', 'rt'] +- includes = ['/usr/local/include', '/usr/include'] ++ libraries = ['crypto'] + CFLAGS.append('-O2') + elif sys.platform.startswith('win32'): + define_macros = [('inline', '__inline')] diff --git a/p4a/pythonforandroid/recipes/sdl/__init__.py b/p4a/pythonforandroidold/recipes/sdl/__init__.py similarity index 100% rename from p4a/pythonforandroid/recipes/sdl/__init__.py rename to p4a/pythonforandroidold/recipes/sdl/__init__.py diff --git a/p4a/pythonforandroidold/recipes/sdl2/__init__.py b/p4a/pythonforandroidold/recipes/sdl2/__init__.py new file mode 100644 index 0000000..bbfadc2 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/sdl2/__init__.py @@ -0,0 +1,29 @@ +from pythonforandroid.recipe import BootstrapNDKRecipe +from pythonforandroid.toolchain import current_directory, shprint +import sh + + +class LibSDL2Recipe(BootstrapNDKRecipe): + version = "2.0.9" + url = "https://www.libsdl.org/release/SDL2-{version}.tar.gz" + md5sum = 'f2ecfba915c54f7200f504d8b48a5dfe' + + dir_name = 'SDL' + + depends = ['sdl2_image', 'sdl2_mixer', 'sdl2_ttf'] + conflicts = ['sdl', 'pygame', 'pygame_bootstrap_components'] + + def get_recipe_env(self, arch=None, with_flags_in_cc=True, with_python=True): + env = super(LibSDL2Recipe, self).get_recipe_env( + arch=arch, with_flags_in_cc=with_flags_in_cc, with_python=with_python) + env['APP_ALLOW_MISSING_DEPS'] = 'true' + return env + + def build_arch(self, arch): + env = self.get_recipe_env(arch) + + with current_directory(self.get_jni_dir()): + shprint(sh.ndk_build, "V=1", _env=env) + + +recipe = LibSDL2Recipe() diff --git a/p4a/pythonforandroidold/recipes/sdl2_image/__init__.py b/p4a/pythonforandroidold/recipes/sdl2_image/__init__.py new file mode 100644 index 0000000..920b3ae --- /dev/null +++ b/p4a/pythonforandroidold/recipes/sdl2_image/__init__.py @@ -0,0 +1,14 @@ +from pythonforandroid.recipe import BootstrapNDKRecipe + + +class LibSDL2Image(BootstrapNDKRecipe): + version = '2.0.4' + url = 'https://www.libsdl.org/projects/SDL_image/release/SDL2_image-{version}.tar.gz' + dir_name = 'SDL2_image' + + patches = ['toggle_jpg_png_webp.patch', + 'extra_cflags.patch', + ] + + +recipe = LibSDL2Image() diff --git a/p4a/pythonforandroid/recipes/sdl2_image/add_ndk_platform_include_dir.patch b/p4a/pythonforandroidold/recipes/sdl2_image/add_ndk_platform_include_dir.patch similarity index 100% rename from p4a/pythonforandroid/recipes/sdl2_image/add_ndk_platform_include_dir.patch rename to p4a/pythonforandroidold/recipes/sdl2_image/add_ndk_platform_include_dir.patch diff --git a/p4a/pythonforandroid/recipes/sdl2_image/extra_cflags.patch b/p4a/pythonforandroidold/recipes/sdl2_image/extra_cflags.patch similarity index 100% rename from p4a/pythonforandroid/recipes/sdl2_image/extra_cflags.patch rename to p4a/pythonforandroidold/recipes/sdl2_image/extra_cflags.patch diff --git a/p4a/pythonforandroid/recipes/sdl2_image/toggle_jpg_png_webp.patch b/p4a/pythonforandroidold/recipes/sdl2_image/toggle_jpg_png_webp.patch similarity index 100% rename from p4a/pythonforandroid/recipes/sdl2_image/toggle_jpg_png_webp.patch rename to p4a/pythonforandroidold/recipes/sdl2_image/toggle_jpg_png_webp.patch diff --git a/p4a/pythonforandroidold/recipes/sdl2_mixer/__init__.py b/p4a/pythonforandroidold/recipes/sdl2_mixer/__init__.py new file mode 100644 index 0000000..1a8e0a9 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/sdl2_mixer/__init__.py @@ -0,0 +1,12 @@ +from pythonforandroid.recipe import BootstrapNDKRecipe + + +class LibSDL2Mixer(BootstrapNDKRecipe): + version = '2.0.1' + url = 'https://www.libsdl.org/projects/SDL_mixer/release/SDL2_mixer-{version}.tar.gz' + dir_name = 'SDL2_mixer' + + patches = ['toggle_modplug_mikmod_smpeg_ogg.patch'] + + +recipe = LibSDL2Mixer() diff --git a/p4a/pythonforandroid/recipes/sdl2_mixer/toggle_modplug_mikmod_smpeg_ogg.patch b/p4a/pythonforandroidold/recipes/sdl2_mixer/toggle_modplug_mikmod_smpeg_ogg.patch similarity index 100% rename from p4a/pythonforandroid/recipes/sdl2_mixer/toggle_modplug_mikmod_smpeg_ogg.patch rename to p4a/pythonforandroidold/recipes/sdl2_mixer/toggle_modplug_mikmod_smpeg_ogg.patch diff --git a/p4a/pythonforandroidold/recipes/sdl2_ttf/__init__.py b/p4a/pythonforandroidold/recipes/sdl2_ttf/__init__.py new file mode 100644 index 0000000..2d0a629 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/sdl2_ttf/__init__.py @@ -0,0 +1,10 @@ +from pythonforandroid.recipe import BootstrapNDKRecipe + + +class LibSDL2TTF(BootstrapNDKRecipe): + version = '2.0.14' + url = 'https://www.libsdl.org/projects/SDL_ttf/release/SDL2_ttf-{version}.tar.gz' + dir_name = 'SDL2_ttf' + + +recipe = LibSDL2TTF() diff --git a/p4a/pythonforandroidold/recipes/secp256k1/__init__.py b/p4a/pythonforandroidold/recipes/secp256k1/__init__.py new file mode 100644 index 0000000..8898031 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/secp256k1/__init__.py @@ -0,0 +1,30 @@ +import os +from pythonforandroid.recipe import CppCompiledComponentsPythonRecipe + + +class Secp256k1Recipe(CppCompiledComponentsPythonRecipe): + + version = '0.13.2.4' + url = 'https://github.com/ludbb/secp256k1-py/archive/{version}.tar.gz' + + call_hostpython_via_targetpython = False + + depends = [ + 'openssl', ('hostpython3', 'hostpython2', 'hostpython3crystax'), + ('python2', 'python3', 'python3crystax'), 'setuptools', + 'libffi', 'cffi', 'libsecp256k1'] + + patches = [ + "cross_compile.patch", "drop_setup_requires.patch", + "pkg-config.patch", "find_lib.patch", "no-download.patch"] + + def get_recipe_env(self, arch=None): + env = super(Secp256k1Recipe, self).get_recipe_env(arch) + libsecp256k1 = self.get_recipe('libsecp256k1', self.ctx) + libsecp256k1_dir = libsecp256k1.get_build_dir(arch.arch) + env['CFLAGS'] += ' -I' + os.path.join(libsecp256k1_dir, 'include') + env['LDFLAGS'] += ' -L{} -lsecp256k1'.format(libsecp256k1_dir) + return env + + +recipe = Secp256k1Recipe() diff --git a/p4a/pythonforandroidold/recipes/secp256k1/cross_compile.patch b/p4a/pythonforandroidold/recipes/secp256k1/cross_compile.patch new file mode 100644 index 0000000..bfef228 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/secp256k1/cross_compile.patch @@ -0,0 +1,12 @@ +diff --git a/setup.py b/setup.py +index bba4bce..b86b369 100644 +--- a/setup.py ++++ b/setup.py +@@ -191,6 +192,7 @@ class build_clib(_build_clib): + "--disable-dependency-tracking", + "--with-pic", + "--enable-module-recovery", ++ "--host=%s" % os.environ['TOOLCHAIN_PREFIX'], + "--prefix", + os.path.abspath(self.build_clib), + ] diff --git a/p4a/pythonforandroidold/recipes/secp256k1/drop_setup_requires.patch b/p4a/pythonforandroidold/recipes/secp256k1/drop_setup_requires.patch new file mode 100644 index 0000000..3be0293 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/secp256k1/drop_setup_requires.patch @@ -0,0 +1,12 @@ +diff --git a/setup.py b/setup.py +index bba4bce..bfffbbc 100644 +--- a/setup.py ++++ b/setup.py +@@ -263,7 +263,6 @@ setup( + author_email='lud@tutanota.com', + license='MIT', + +- setup_requires=['cffi>=1.3.0', 'pytest-runner==2.6.2'], + install_requires=['cffi>=1.3.0'], + tests_require=['pytest==2.8.7'], + diff --git a/p4a/pythonforandroidold/recipes/secp256k1/find_lib.patch b/p4a/pythonforandroidold/recipes/secp256k1/find_lib.patch new file mode 100644 index 0000000..87997d5 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/secp256k1/find_lib.patch @@ -0,0 +1,13 @@ +diff --git a/setup_support.py b/setup_support.py +index 68a2a7f..b84f420 100644 +--- a/setup_support.py ++++ b/setup_support.py +@@ -68,6 +68,8 @@ def build_flags(library, type_, path): + + + def _find_lib(): ++ # we're picking up the recipe one ++ return True + from cffi import FFI + ffi = FFI() + try: diff --git a/p4a/pythonforandroidold/recipes/secp256k1/no-download.patch b/p4a/pythonforandroidold/recipes/secp256k1/no-download.patch new file mode 100644 index 0000000..e905a39 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/secp256k1/no-download.patch @@ -0,0 +1,13 @@ +diff --git a/setup.py b/setup.py +index bba4bce..5ea0228 100644 +--- a/setup.py ++++ b/setup.py +@@ -55,6 +55,8 @@ except OSError: + + + def download_library(command): ++ # we will use the custom libsecp256k1 recipe ++ return + if command.dry_run: + return + libdir = absolute("libsecp256k1") diff --git a/p4a/pythonforandroidold/recipes/secp256k1/pkg-config.patch b/p4a/pythonforandroidold/recipes/secp256k1/pkg-config.patch new file mode 100644 index 0000000..bb1e344 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/secp256k1/pkg-config.patch @@ -0,0 +1,28 @@ +diff --git a/setup.py b/setup.py +index bba4bce..609481c 100644 +--- a/setup.py ++++ b/setup.py +@@ -48,10 +48,7 @@ if [int(i) for i in setuptools_version.split('.')] < [3, 3]: + try: + subprocess.check_call(['pkg-config', '--version']) + except OSError: +- raise SystemExit( +- "'pkg-config' is required to install this package. " +- "Please see the README for details." +- ) ++ pass + + + def download_library(command): +diff --git a/setup_support.py b/setup_support.py +index 68a2a7f..ccbafac 100644 +--- a/setup_support.py ++++ b/setup_support.py +@@ -40,6 +40,7 @@ def absolute(*paths): + + def build_flags(library, type_, path): + """Return separated build flags from pkg-config output""" ++ return [] + + pkg_config_path = [path] + if "PKG_CONFIG_PATH" in os.environ: diff --git a/p4a/pythonforandroidold/recipes/setuptools/__init__.py b/p4a/pythonforandroidold/recipes/setuptools/__init__.py new file mode 100644 index 0000000..6a4b650 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/setuptools/__init__.py @@ -0,0 +1,15 @@ +from pythonforandroid.recipe import PythonRecipe + + +class SetuptoolsRecipe(PythonRecipe): + version = '40.0.0' + url = 'https://pypi.python.org/packages/source/s/setuptools/setuptools-{version}.zip' + call_hostpython_via_targetpython = False + install_in_hostpython = True + depends = [('python2', 'python2legacy', 'python3', 'python3crystax')] + # this recipe seems to control the dependency graph in some way, because + # if removed the python2legacy recipe fails to solve the dependency order + # when using the sdl2 bootstrap...so be careful removing this line!!! + + +recipe = SetuptoolsRecipe() diff --git a/p4a/pythonforandroidold/recipes/shapely/__init__.py b/p4a/pythonforandroidold/recipes/shapely/__init__.py new file mode 100644 index 0000000..e0b0937 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/shapely/__init__.py @@ -0,0 +1,22 @@ +from pythonforandroid.recipe import Recipe, CythonRecipe + + +class ShapelyRecipe(CythonRecipe): + version = '1.5' + url = 'https://github.com/Toblerity/Shapely/archive/master.zip' + depends = ['setuptools', 'libgeos'] + call_hostpython_via_targetpython = False + + patches = ['setup.patch'] # Patch to force setup to fail when C extention fails to build + + # setup_extra_args = ['sdist'] # DontForce Cython + + def get_recipe_env(self, arch, with_flags_in_cc=True): + """ Add libgeos headers to path """ + env = super(ShapelyRecipe, self).get_recipe_env(arch, with_flags_in_cc) + libgeos_dir = Recipe.get_recipe('libgeos', self.ctx).get_build_dir(arch.arch) + env['CFLAGS'] += " -I{}/dist/include".format(libgeos_dir) + return env + + +recipe = ShapelyRecipe() diff --git a/p4a/pythonforandroidold/recipes/shapely/setup.patch b/p4a/pythonforandroidold/recipes/shapely/setup.patch new file mode 100644 index 0000000..9523f35 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/shapely/setup.patch @@ -0,0 +1,12 @@ +*** shapely/setup.py 2016-06-29 11:29:49.000000000 -0400 +--- b/setup.py 2016-07-09 01:51:37.759670990 -0400 +*************** +*** 359,364 **** +--- 359,365 ---- + construct_build_ext(existing_build_ext) + setup(ext_modules=ext_modules, **setup_args) + except BuildFailed as ex: ++ raise # Force python only build to fail + BUILD_EXT_WARNING = "The C extension could not be compiled, " \ + "speedups are not enabled." + log.warn(ex) diff --git a/p4a/pythonforandroid/recipes/simple-crypt/__init__.py b/p4a/pythonforandroidold/recipes/simple-crypt/__init__.py similarity index 100% rename from p4a/pythonforandroid/recipes/simple-crypt/__init__.py rename to p4a/pythonforandroidold/recipes/simple-crypt/__init__.py diff --git a/p4a/pythonforandroidold/recipes/six/__init__.py b/p4a/pythonforandroidold/recipes/six/__init__.py new file mode 100644 index 0000000..91adc6c --- /dev/null +++ b/p4a/pythonforandroidold/recipes/six/__init__.py @@ -0,0 +1,14 @@ + +from pythonforandroid.recipe import PythonRecipe + + +class SixRecipe(PythonRecipe): + version = '1.9.0' + url = 'https://pypi.python.org/packages/source/s/six/six-{version}.tar.gz' + depends = [('python2', 'python2legacy', 'python3', 'python3crystax')] + # this recipe seems to control the dependency graph in some way, because + # if removed the python2legacy recipe fails to solve the dependency order + # when using the pygame bootstrap...so be careful removing this line!!! + + +recipe = SixRecipe() diff --git a/p4a/pythonforandroidold/recipes/snappy/__init__.py b/p4a/pythonforandroidold/recipes/snappy/__init__.py new file mode 100644 index 0000000..4ca61a2 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/snappy/__init__.py @@ -0,0 +1,13 @@ +from pythonforandroid.toolchain import Recipe + + +class SnappyRecipe(Recipe): + version = '1.1.3' + url = 'https://github.com/google/snappy/releases/download/{version}/snappy-{version}.tar.gz' + + def should_build(self, arch): + # Only download to use in leveldb recipe + return False + + +recipe = SnappyRecipe() diff --git a/p4a/pythonforandroidold/recipes/spine/__init__.py b/p4a/pythonforandroidold/recipes/spine/__init__.py new file mode 100644 index 0000000..009a919 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/spine/__init__.py @@ -0,0 +1,14 @@ +from pythonforandroid.recipe import CythonRecipe + + +class SpineCython(CythonRecipe): + + version = '0.5.1' + url = 'https://github.com/tileworks/spine-cython/archive/{version}.zip' + name = 'spine' + depends = ['setuptools'] + site_packages_name = 'spine' + call_hostpython_via_targetpython = False + + +recipe = SpineCython() diff --git a/p4a/pythonforandroidold/recipes/sqlalchemy/__init__.py b/p4a/pythonforandroidold/recipes/sqlalchemy/__init__.py new file mode 100644 index 0000000..974667a --- /dev/null +++ b/p4a/pythonforandroidold/recipes/sqlalchemy/__init__.py @@ -0,0 +1,14 @@ +from pythonforandroid.recipe import CompiledComponentsPythonRecipe + + +class SQLAlchemyRecipe(CompiledComponentsPythonRecipe): + name = 'sqlalchemy' + version = '1.0.9' + url = 'https://pypi.python.org/packages/source/S/SQLAlchemy/SQLAlchemy-{version}.tar.gz' + + depends = ['setuptools'] + + patches = ['zipsafe.patch'] + + +recipe = SQLAlchemyRecipe() diff --git a/p4a/pythonforandroidold/recipes/sqlalchemy/zipsafe.patch b/p4a/pythonforandroidold/recipes/sqlalchemy/zipsafe.patch new file mode 100644 index 0000000..1820d09 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/sqlalchemy/zipsafe.patch @@ -0,0 +1,12 @@ +diff --git a/setup.py b/setup.py +index 09b524c..1e65772 100644 +--- a/setup.py ++++ b/setup.py +@@ -125,6 +125,7 @@ def run_setup(with_cext): + setup(name="SQLAlchemy", + version=VERSION, + description="Database Abstraction Library", ++ zip_safe=False, + author="Mike Bayer", + author_email="mike_mp@zzzcomputing.com", + url="http://www.sqlalchemy.org", diff --git a/p4a/pythonforandroidold/recipes/sqlite3/Android.mk b/p4a/pythonforandroidold/recipes/sqlite3/Android.mk new file mode 100644 index 0000000..f52bc46 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/sqlite3/Android.mk @@ -0,0 +1,11 @@ +LOCAL_PATH := $(call my-dir)/.. + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := sqlite3.c + +LOCAL_MODULE := sqlite3 + +LOCAL_CFLAGS := -DSQLITE_ENABLE_FTS4 -D_FILE_OFFSET_BITS=32 + +include $(BUILD_SHARED_LIBRARY) diff --git a/p4a/pythonforandroidold/recipes/sqlite3/__init__.py b/p4a/pythonforandroidold/recipes/sqlite3/__init__.py new file mode 100644 index 0000000..cfdcb0f --- /dev/null +++ b/p4a/pythonforandroidold/recipes/sqlite3/__init__.py @@ -0,0 +1,35 @@ +from pythonforandroid.recipe import NDKRecipe +from pythonforandroid.toolchain import shutil +from os.path import join +import sh + + +class Sqlite3Recipe(NDKRecipe): + version = '3.15.1' + # Don't forget to change the URL when changing the version + url = 'https://www.sqlite.org/2016/sqlite-amalgamation-3150100.zip' + generated_libraries = ['sqlite3'] + + def should_build(self, arch): + return not self.has_libs(arch, 'libsqlite3.so') + + def prebuild_arch(self, arch): + super(Sqlite3Recipe, self).prebuild_arch(arch) + # Copy the Android make file + sh.mkdir('-p', join(self.get_build_dir(arch.arch), 'jni')) + shutil.copyfile(join(self.get_recipe_dir(), 'Android.mk'), + join(self.get_build_dir(arch.arch), 'jni/Android.mk')) + + def build_arch(self, arch, *extra_args): + super(Sqlite3Recipe, self).build_arch(arch) + # Copy the shared library + shutil.copyfile(join(self.get_build_dir(arch.arch), 'libs', arch.arch, 'libsqlite3.so'), + join(self.ctx.get_libs_dir(arch.arch), 'libsqlite3.so')) + + def get_recipe_env(self, arch): + env = super(Sqlite3Recipe, self).get_recipe_env(arch) + env['NDK_PROJECT_PATH'] = self.get_build_dir(arch.arch) + return env + + +recipe = Sqlite3Recipe() diff --git a/p4a/pythonforandroidold/recipes/storm/__init__.py b/p4a/pythonforandroidold/recipes/storm/__init__.py new file mode 100644 index 0000000..6b64465 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/storm/__init__.py @@ -0,0 +1,22 @@ +from pythonforandroid.recipe import PythonRecipe, current_directory, shprint +import sh + + +class StormRecipe(PythonRecipe): + version = '0.20' + url = 'https://launchpad.net/storm/trunk/{version}/+download/storm-{version}.tar.bz2' + depends = [] + site_packages_name = 'storm' + call_hostpython_via_targetpython = False + + def prebuild_arch(self, arch): + with current_directory(self.get_build_dir(arch.arch)): + # Cross compiling for 32 bits in 64 bit ubuntu before precise is + # failing. See + # https://bugs.launchpad.net/ubuntu/+source/python2.7/+bug/873007 + shprint(sh.sed, '-i', + "s|BUILD_CEXTENSIONS = True|BUILD_CEXTENSIONS = False|", + 'setup.py') + + +recipe = StormRecipe() diff --git a/p4a/pythonforandroidold/recipes/sympy/__init__.py b/p4a/pythonforandroidold/recipes/sympy/__init__.py new file mode 100644 index 0000000..8684a95 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/sympy/__init__.py @@ -0,0 +1,16 @@ + +from pythonforandroid.recipe import PythonRecipe + + +class SympyRecipe(PythonRecipe): + version = '1.1.1' + url = 'https://github.com/sympy/sympy/releases/download/sympy-{version}/sympy-{version}.tar.gz' + + depends = ['mpmath'] + + call_hostpython_via_targetpython = True + + patches = ['fix_timeutils.patch', 'fix_pretty_print.patch'] + + +recipe = SympyRecipe() diff --git a/p4a/pythonforandroidold/recipes/sympy/fix_android_detection.patch b/p4a/pythonforandroidold/recipes/sympy/fix_android_detection.patch new file mode 100644 index 0000000..964c3db --- /dev/null +++ b/p4a/pythonforandroidold/recipes/sympy/fix_android_detection.patch @@ -0,0 +1,47 @@ +diff --git a/pip/download.py b/pip/download.py +index 54d3131..1aab70f 100644 +--- a/pip/download.py ++++ b/pip/download.py +@@ -89,23 +89,25 @@ def user_agent(): + # Complete Guess + data["implementation"]["version"] = platform.python_version() + +- if sys.platform.startswith("linux"): +- from pip._vendor import distro +- distro_infos = dict(filter( +- lambda x: x[1], +- zip(["name", "version", "id"], distro.linux_distribution()), +- )) +- libc = dict(filter( +- lambda x: x[1], +- zip(["lib", "version"], libc_ver()), +- )) +- if libc: +- distro_infos["libc"] = libc +- if distro_infos: +- data["distro"] = distro_infos +- +- if sys.platform.startswith("darwin") and platform.mac_ver()[0]: +- data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]} ++ # if sys.platform.startswith("linux"): ++ # from pip._vendor import distro ++ # distro_infos = dict(filter( ++ # lambda x: x[1], ++ # zip(["name", "version", "id"], distro.linux_distribution()), ++ # )) ++ # libc = dict(filter( ++ # lambda x: x[1], ++ # zip(["lib", "version"], libc_ver()), ++ # )) ++ # if libc: ++ # distro_infos["libc"] = libc ++ # if distro_infos: ++ # data["distro"] = distro_infos ++ ++ # if sys.platform.startswith("darwin") and platform.mac_ver()[0]: ++ # data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]} ++ ++ data['distro'] = {'name': 'Android'} + + if platform.system(): + data.setdefault("system", {})["name"] = platform.system() diff --git a/p4a/pythonforandroidold/recipes/sympy/fix_pretty_print.patch b/p4a/pythonforandroidold/recipes/sympy/fix_pretty_print.patch new file mode 100644 index 0000000..f94cb22 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/sympy/fix_pretty_print.patch @@ -0,0 +1,223 @@ +diff --git a/sympy/printing/pretty/pretty.py b/sympy/printing/pretty/pretty.py +index 604e97c..ddd3eb2 100644 +--- a/sympy/printing/pretty/pretty.py ++++ b/sympy/printing/pretty/pretty.py +@@ -166,14 +166,14 @@ class PrettyPrinter(Printer): + arg = e.args[0] + pform = self._print(arg) + if isinstance(arg, Equivalent): +- return self._print_Equivalent(arg, altchar=u"\N{NOT IDENTICAL TO}") ++ return self._print_Equivalent(arg, altchar=u"NOT IDENTICAL TO") + if isinstance(arg, Implies): +- return self._print_Implies(arg, altchar=u"\N{RIGHTWARDS ARROW WITH STROKE}") ++ return self._print_Implies(arg, altchar=u"RIGHTWARDS ARROW WITH STROKE") + + if arg.is_Boolean and not arg.is_Not: + pform = prettyForm(*pform.parens()) + +- return prettyForm(*pform.left(u"\N{NOT SIGN}")) ++ return prettyForm(*pform.left(u"NOT SIGN")) + else: + return self._print_Function(e) + +@@ -200,43 +200,43 @@ class PrettyPrinter(Printer): + + def _print_And(self, e): + if self._use_unicode: +- return self.__print_Boolean(e, u"\N{LOGICAL AND}") ++ return self.__print_Boolean(e, u"LOGICAL AND") + else: + return self._print_Function(e, sort=True) + + def _print_Or(self, e): + if self._use_unicode: +- return self.__print_Boolean(e, u"\N{LOGICAL OR}") ++ return self.__print_Boolean(e, u"LOGICAL OR") + else: + return self._print_Function(e, sort=True) + + def _print_Xor(self, e): + if self._use_unicode: +- return self.__print_Boolean(e, u"\N{XOR}") ++ return self.__print_Boolean(e, u"XOR") + else: + return self._print_Function(e, sort=True) + + def _print_Nand(self, e): + if self._use_unicode: +- return self.__print_Boolean(e, u"\N{NAND}") ++ return self.__print_Boolean(e, u"NAND") + else: + return self._print_Function(e, sort=True) + + def _print_Nor(self, e): + if self._use_unicode: +- return self.__print_Boolean(e, u"\N{NOR}") ++ return self.__print_Boolean(e, u"NOR") + else: + return self._print_Function(e, sort=True) + + def _print_Implies(self, e, altchar=None): + if self._use_unicode: +- return self.__print_Boolean(e, altchar or u"\N{RIGHTWARDS ARROW}", sort=False) ++ return self.__print_Boolean(e, altchar or u"RIGHTWARDS ARROW", sort=False) + else: + return self._print_Function(e) + + def _print_Equivalent(self, e, altchar=None): + if self._use_unicode: +- return self.__print_Boolean(e, altchar or u"\N{IDENTICAL TO}") ++ return self.__print_Boolean(e, altchar or u"IDENTICAL TO") + else: + return self._print_Function(e, sort=True) + +@@ -425,7 +425,7 @@ class PrettyPrinter(Printer): + if self._use_unicode: + # use unicode corners + horizontal_chr = xobj('-', 1) +- corner_chr = u'\N{BOX DRAWINGS LIGHT DOWN AND HORIZONTAL}' ++ corner_chr = u'BOX DRAWINGS LIGHT DOWN AND HORIZONTAL' + + func_height = pretty_func.height() + +@@ -580,7 +580,7 @@ class PrettyPrinter(Printer): + + LimArg = self._print(z) + if self._use_unicode: +- LimArg = prettyForm(*LimArg.right(u'\N{BOX DRAWINGS LIGHT HORIZONTAL}\N{RIGHTWARDS ARROW}')) ++ LimArg = prettyForm(*LimArg.right(u'BOX DRAWINGS LIGHT HORIZONTALRIGHTWARDS ARROW')) + else: + LimArg = prettyForm(*LimArg.right('->')) + LimArg = prettyForm(*LimArg.right(self._print(z0))) +@@ -589,7 +589,7 @@ class PrettyPrinter(Printer): + dir = "" + else: + if self._use_unicode: +- dir = u'\N{SUPERSCRIPT PLUS SIGN}' if str(dir) == "+" else u'\N{SUPERSCRIPT MINUS}' ++ dir = u'SUPERSCRIPT PLUS SIGN' if str(dir) == "+" else u'SUPERSCRIPT MINUS' + + LimArg = prettyForm(*LimArg.right(self._print(dir))) + +@@ -740,7 +740,7 @@ class PrettyPrinter(Printer): + def _print_Adjoint(self, expr): + pform = self._print(expr.arg) + if self._use_unicode: +- dag = prettyForm(u'\N{DAGGER}') ++ dag = prettyForm(u'DAGGER') + else: + dag = prettyForm('+') + from sympy.matrices import MatrixSymbol +@@ -850,8 +850,8 @@ class PrettyPrinter(Printer): + if '\n' in partstr: + tempstr = partstr + tempstr = tempstr.replace(vectstrs[i], '') +- tempstr = tempstr.replace(u'\N{RIGHT PARENTHESIS UPPER HOOK}', +- u'\N{RIGHT PARENTHESIS UPPER HOOK}' ++ tempstr = tempstr.replace(u'RIGHT PARENTHESIS UPPER HOOK', ++ u'RIGHT PARENTHESIS UPPER HOOK' + + ' ' + vectstrs[i]) + o1[i] = tempstr + o1 = [x.split('\n') for x in o1] +@@ -1153,7 +1153,7 @@ class PrettyPrinter(Printer): + def _print_Lambda(self, e): + vars, expr = e.args + if self._use_unicode: +- arrow = u" \N{RIGHTWARDS ARROW FROM BAR} " ++ arrow = u" RIGHTWARDS ARROW FROM BAR " + else: + arrow = " -> " + if len(vars) == 1: +@@ -1173,7 +1173,7 @@ class PrettyPrinter(Printer): + elif len(expr.variables): + pform = prettyForm(*pform.right(self._print(expr.variables[0]))) + if self._use_unicode: +- pform = prettyForm(*pform.right(u" \N{RIGHTWARDS ARROW} ")) ++ pform = prettyForm(*pform.right(u" RIGHTWARDS ARROW ")) + else: + pform = prettyForm(*pform.right(" -> ")) + if len(expr.point) > 1: +@@ -1462,7 +1462,7 @@ class PrettyPrinter(Printer): + and expt is S.Half and bpretty.height() == 1 + and (bpretty.width() == 1 + or (base.is_Integer and base.is_nonnegative))): +- return prettyForm(*bpretty.left(u'\N{SQUARE ROOT}')) ++ return prettyForm(*bpretty.left(u'SQUARE ROOT')) + + # Construct root sign, start with the \/ shape + _zZ = xobj('/', 1) +@@ -1558,7 +1558,7 @@ class PrettyPrinter(Printer): + from sympy import Pow + return self._print(Pow(p.sets[0], len(p.sets), evaluate=False)) + else: +- prod_char = u"\N{MULTIPLICATION SIGN}" if self._use_unicode else 'x' ++ prod_char = u"MULTIPLICATION SIGN" if self._use_unicode else 'x' + return self._print_seq(p.sets, None, None, ' %s ' % prod_char, + parenthesize=lambda set: set.is_Union or + set.is_Intersection or set.is_ProductSet) +@@ -1570,7 +1570,7 @@ class PrettyPrinter(Printer): + def _print_Range(self, s): + + if self._use_unicode: +- dots = u"\N{HORIZONTAL ELLIPSIS}" ++ dots = u"HORIZONTAL ELLIPSIS" + else: + dots = '...' + +@@ -1641,7 +1641,7 @@ class PrettyPrinter(Printer): + + def _print_ImageSet(self, ts): + if self._use_unicode: +- inn = u"\N{SMALL ELEMENT OF}" ++ inn = u"SMALL ELEMENT OF" + else: + inn = 'in' + variables = self._print_seq(ts.lamda.variables) +@@ -1653,10 +1653,10 @@ class PrettyPrinter(Printer): + + def _print_ConditionSet(self, ts): + if self._use_unicode: +- inn = u"\N{SMALL ELEMENT OF}" ++ inn = u"SMALL ELEMENT OF" + # using _and because and is a keyword and it is bad practice to + # overwrite them +- _and = u"\N{LOGICAL AND}" ++ _and = u"LOGICAL AND" + else: + inn = 'in' + _and = 'and' +@@ -1677,7 +1677,7 @@ class PrettyPrinter(Printer): + + def _print_ComplexRegion(self, ts): + if self._use_unicode: +- inn = u"\N{SMALL ELEMENT OF}" ++ inn = u"SMALL ELEMENT OF" + else: + inn = 'in' + variables = self._print_seq(ts.variables) +@@ -1690,7 +1690,7 @@ class PrettyPrinter(Printer): + def _print_Contains(self, e): + var, set = e.args + if self._use_unicode: +- el = u" \N{ELEMENT OF} " ++ el = u" ELEMENT OF " + return prettyForm(*stringPict.next(self._print(var), + el, self._print(set)), binding=8) + else: +@@ -1698,7 +1698,7 @@ class PrettyPrinter(Printer): + + def _print_FourierSeries(self, s): + if self._use_unicode: +- dots = u"\N{HORIZONTAL ELLIPSIS}" ++ dots = u"HORIZONTAL ELLIPSIS" + else: + dots = '...' + return self._print_Add(s.truncate()) + self._print(dots) +@@ -1708,7 +1708,7 @@ class PrettyPrinter(Printer): + + def _print_SeqFormula(self, s): + if self._use_unicode: +- dots = u"\N{HORIZONTAL ELLIPSIS}" ++ dots = u"HORIZONTAL ELLIPSIS" + else: + dots = '...' + diff --git a/p4a/pythonforandroidold/recipes/sympy/fix_timeutils.patch b/p4a/pythonforandroidold/recipes/sympy/fix_timeutils.patch new file mode 100644 index 0000000..c8424ea --- /dev/null +++ b/p4a/pythonforandroidold/recipes/sympy/fix_timeutils.patch @@ -0,0 +1,13 @@ +diff --git a/sympy/utilities/timeutils.py b/sympy/utilities/timeutils.py +index 3770d85..c53594e 100644 +--- a/sympy/utilities/timeutils.py ++++ b/sympy/utilities/timeutils.py +@@ -8,7 +8,7 @@ import math + from sympy.core.compatibility import range + + _scales = [1e0, 1e3, 1e6, 1e9] +-_units = [u's', u'ms', u'\N{GREEK SMALL LETTER MU}s', u'ns'] ++_units = [u's', u'ms', u'mus', u'ns'] + + + def timed(func, setup="pass", limit=None): diff --git a/p4a/pythonforandroidold/recipes/twisted/__init__.py b/p4a/pythonforandroidold/recipes/twisted/__init__.py new file mode 100644 index 0000000..ca22279 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/twisted/__init__.py @@ -0,0 +1,29 @@ +from pythonforandroid.recipe import CythonRecipe + + +class TwistedRecipe(CythonRecipe): + version = '17.9.0' + url = 'https://github.com/twisted/twisted/archive/twisted-{version}.tar.gz' + + depends = ['setuptools', 'zope_interface', 'incremental', 'constantly'] + patches = ['incremental.patch'] + + call_hostpython_via_targetpython = False + install_in_hostpython = False + + def prebuild_arch(self, arch): + super(TwistedRecipe, self).prebuild_arch(arch) + # TODO Need to whitelist tty.pyo and termios.so here + print('Should remove twisted tests etc. here, but skipping for now') + + def get_recipe_env(self, arch): + env = super(TwistedRecipe, self).get_recipe_env(arch) + # We add BUILDLIB_PATH to PYTHONPATH so twisted can find _io.so + env['PYTHONPATH'] = ':'.join([ + self.ctx.get_site_packages_dir(), + env['BUILDLIB_PATH'], + ]) + return env + + +recipe = TwistedRecipe() diff --git a/p4a/pythonforandroidold/recipes/twisted/incremental.patch b/p4a/pythonforandroidold/recipes/twisted/incremental.patch new file mode 100644 index 0000000..85e5307 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/twisted/incremental.patch @@ -0,0 +1,18 @@ +diff -Naur twisted-twisted-17.9.0/src/twisted/python/_setup.py twisted-twisted-17.9.0_patched/src/twisted/python/_setup.py +--- twisted-twisted-17.9.0/src/twisted/python/_setup.py 2017-09-23 07:56:08.000000000 +0200 ++++ twisted-twisted-17.9.0_patched/src/twisted/python/_setup.py 2018-10-05 11:06:23.305860722 +0200 +@@ -227,14 +227,11 @@ + requirements = ["zope.interface >= 3.6.0"] + + requirements.append("constantly >= 15.1") +- requirements.append("incremental >= 16.10.1") + requirements.append("Automat >= 0.3.0") + requirements.append("hyperlink >= 17.1.1") + + arguments.update(dict( + packages=find_packages("src"), +- use_incremental=True, +- setup_requires=["incremental >= 16.10.1"], + install_requires=requirements, + entry_points={ + 'console_scripts': _CONSOLE_SCRIPTS diff --git a/p4a/pythonforandroidold/recipes/ujson/__init__.py b/p4a/pythonforandroidold/recipes/ujson/__init__.py new file mode 100644 index 0000000..421e4d9 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/ujson/__init__.py @@ -0,0 +1,10 @@ +from pythonforandroid.recipe import CompiledComponentsPythonRecipe + + +class UJsonRecipe(CompiledComponentsPythonRecipe): + version = '1.35' + url = 'https://pypi.python.org/packages/source/u/ujson/ujson-{version}.tar.gz' + depends = [] + + +recipe = UJsonRecipe() diff --git a/p4a/pythonforandroidold/recipes/vispy/__init__.py b/p4a/pythonforandroidold/recipes/vispy/__init__.py new file mode 100644 index 0000000..7ea046b --- /dev/null +++ b/p4a/pythonforandroidold/recipes/vispy/__init__.py @@ -0,0 +1,14 @@ +from pythonforandroid.recipe import PythonRecipe + + +class VispyRecipe(PythonRecipe): + version = '0.4.0' + url = 'https://github.com/vispy/vispy/archive/v{version}.tar.gz' + depends = ['numpy', 'pysdl2'] + patches = ['disable_freetype.patch', + 'disable_font_triage.patch', + 'use_es2.patch', + 'remove_ati_check.patch'] + + +recipe = VispyRecipe() diff --git a/p4a/pythonforandroidold/recipes/vispy/disable_font_triage.patch b/p4a/pythonforandroidold/recipes/vispy/disable_font_triage.patch new file mode 100644 index 0000000..512642a --- /dev/null +++ b/p4a/pythonforandroidold/recipes/vispy/disable_font_triage.patch @@ -0,0 +1,27 @@ +diff --git a/vispy/util/fonts/_triage.py b/vispy/util/fonts/_triage.py +index ddbc93d..324c161 100644 +--- a/vispy/util/fonts/_triage.py ++++ b/vispy/util/fonts/_triage.py +@@ -9,14 +9,14 @@ import sys + from ._vispy_fonts import _vispy_fonts + if sys.platform.startswith('linux'): + from ._freetype import _load_glyph +- from ...ext.fontconfig import _list_fonts +-elif sys.platform == 'darwin': +- from ._quartz import _load_glyph, _list_fonts +-elif sys.platform.startswith('win'): +- from ._freetype import _load_glyph # noqa, analysis:ignore +- from ._win32 import _list_fonts # noqa, analysis:ignore +-else: +- raise NotImplementedError('unknown system %s' % sys.platform) ++ # from ...ext.fontconfig import _list_fonts ++# elif sys.platform == 'darwin': ++# from ._quartz import _load_glyph, _list_fonts ++# elif sys.platform.startswith('win'): ++# from ._freetype import _load_glyph # noqa, analysis:ignore ++# from ._win32 import _list_fonts # noqa, analysis:ignore ++# else: ++# raise NotImplementedError('unknown system %s' % sys.platform) + + _fonts = {} + diff --git a/p4a/pythonforandroidold/recipes/vispy/disable_freetype.patch b/p4a/pythonforandroidold/recipes/vispy/disable_freetype.patch new file mode 100644 index 0000000..22f4089 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/vispy/disable_freetype.patch @@ -0,0 +1,31 @@ +diff --git a/vispy/util/fonts/_freetype.py b/vispy/util/fonts/_freetype.py +index 3b33d0b..229d559 100644 +--- a/vispy/util/fonts/_freetype.py ++++ b/vispy/util/fonts/_freetype.py +@@ -12,12 +12,12 @@ import numpy as np + + # Convert face to filename + from ._vispy_fonts import _vispy_fonts, _get_vispy_font_filename +-if sys.platform.startswith('linux'): +- from ...ext.fontconfig import find_font +-elif sys.platform.startswith('win'): +- from ._win32 import find_font # noqa, analysis:ignore +-else: +- raise NotImplementedError ++# if sys.platform.startswith('linux'): ++# from ...ext.fontconfig import find_font ++# elif sys.platform.startswith('win'): ++# from ._win32 import find_font # noqa, analysis:ignore ++# else: ++# raise NotImplementedError + + _font_dict = {} + +@@ -41,6 +41,7 @@ def _load_font(face, bold, italic): + + def _load_glyph(f, char, glyphs_dict): + """Load glyph from font into dict""" ++ return + from ...ext.freetype import (FT_LOAD_RENDER, FT_LOAD_NO_HINTING, + FT_LOAD_NO_AUTOHINT) + flags = FT_LOAD_RENDER | FT_LOAD_NO_HINTING | FT_LOAD_NO_AUTOHINT diff --git a/p4a/pythonforandroidold/recipes/vispy/disable_freetype.patch_backup b/p4a/pythonforandroidold/recipes/vispy/disable_freetype.patch_backup new file mode 100644 index 0000000..22f4089 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/vispy/disable_freetype.patch_backup @@ -0,0 +1,31 @@ +diff --git a/vispy/util/fonts/_freetype.py b/vispy/util/fonts/_freetype.py +index 3b33d0b..229d559 100644 +--- a/vispy/util/fonts/_freetype.py ++++ b/vispy/util/fonts/_freetype.py +@@ -12,12 +12,12 @@ import numpy as np + + # Convert face to filename + from ._vispy_fonts import _vispy_fonts, _get_vispy_font_filename +-if sys.platform.startswith('linux'): +- from ...ext.fontconfig import find_font +-elif sys.platform.startswith('win'): +- from ._win32 import find_font # noqa, analysis:ignore +-else: +- raise NotImplementedError ++# if sys.platform.startswith('linux'): ++# from ...ext.fontconfig import find_font ++# elif sys.platform.startswith('win'): ++# from ._win32 import find_font # noqa, analysis:ignore ++# else: ++# raise NotImplementedError + + _font_dict = {} + +@@ -41,6 +41,7 @@ def _load_font(face, bold, italic): + + def _load_glyph(f, char, glyphs_dict): + """Load glyph from font into dict""" ++ return + from ...ext.freetype import (FT_LOAD_RENDER, FT_LOAD_NO_HINTING, + FT_LOAD_NO_AUTOHINT) + flags = FT_LOAD_RENDER | FT_LOAD_NO_HINTING | FT_LOAD_NO_AUTOHINT diff --git a/p4a/pythonforandroidold/recipes/vispy/remove_ati_check.patch b/p4a/pythonforandroidold/recipes/vispy/remove_ati_check.patch new file mode 100644 index 0000000..f8df633 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/vispy/remove_ati_check.patch @@ -0,0 +1,34 @@ +diff --git a/vispy/gloo/glir.py b/vispy/gloo/glir.py +index 67419b5..341c13d 100644 +--- a/vispy/gloo/glir.py ++++ b/vispy/gloo/glir.py +@@ -878,19 +878,19 @@ class GlirBuffer(GlirObject): + self.activate() + nbytes = data.nbytes + +- # Determine whether to check errors to try handling the ATI bug +- check_ati_bug = ((not self._bufferSubDataOk) and +- (gl.current_backend is gl.gl2) and +- sys.platform.startswith('win')) +- +- # flush any pending errors +- if check_ati_bug: +- gl.check_error('periodic check') ++ # # Determine whether to check errors to try handling the ATI bug ++ # check_ati_bug = ((not self._bufferSubDataOk) and ++ # (gl.current_backend is gl.gl2) and ++ # sys.platform.startswith('win')) ++ ++ # # flush any pending errors ++ # if check_ati_bug: ++ # gl.check_error('periodic check') + + try: + gl.glBufferSubData(self._target, offset, data) +- if check_ati_bug: +- gl.check_error('glBufferSubData') ++ # if check_ati_bug: ++ # gl.check_error('glBufferSubData') + self._bufferSubDataOk = True # glBufferSubData seems to work + except Exception: + # This might be due to a driver error (seen on ATI), issue #64. diff --git a/p4a/pythonforandroidold/recipes/vispy/use_es2.patch b/p4a/pythonforandroidold/recipes/vispy/use_es2.patch new file mode 100644 index 0000000..4183865 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/vispy/use_es2.patch @@ -0,0 +1,14 @@ +diff --git a/vispy/gloo/gl/__init__.py b/vispy/gloo/gl/__init__.py +index 93813fa..c41859c 100644 +--- a/vispy/gloo/gl/__init__.py ++++ b/vispy/gloo/gl/__init__.py +@@ -210,7 +210,7 @@ def check_error(when='periodic check'): + + + # Load default gl backend +-from . import gl2 as default_backend # noqa ++from . import es2 as default_backend # noqa + + # Call use to start using our default backend +-use_gl() ++use_gl('es2') diff --git a/p4a/pythonforandroidold/recipes/vlc/__init__.py b/p4a/pythonforandroidold/recipes/vlc/__init__.py new file mode 100644 index 0000000..66f51b9 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/vlc/__init__.py @@ -0,0 +1,75 @@ +from pythonforandroid.toolchain import Recipe, current_directory +from pythonforandroid.logger import info, debug, shprint, warning +from os.path import join, isdir, isfile +from os import environ +import sh + + +class VlcRecipe(Recipe): + version = '3.0.0' + url = None + name = 'vlc' + + depends = [] + + port_git = 'http://git.videolan.org/git/vlc-ports/android.git' +# vlc_git = 'http://git.videolan.org/git/vlc.git' + ENV_LIBVLC_AAR = 'LIBVLC_AAR' + aars = {} # for future use of multiple arch + + def prebuild_arch(self, arch): + super(VlcRecipe, self).prebuild_arch(arch) + build_dir = self.get_build_dir(arch.arch) + port_dir = join(build_dir, 'vlc-port-android') + if self.ENV_LIBVLC_AAR in environ: + aar = environ.get(self.ENV_LIBVLC_AAR) + if isdir(aar): + aar = join(aar, 'libvlc-{}.aar'.format(self.version)) + if not isfile(aar): + warning("Error: {} is not valid libvlc-.aar bundle".format(aar)) + info("check {} environment!".format(self.ENV_LIBVLC_AAR)) + exit(1) + self.aars[arch] = aar + else: + aar_path = join(port_dir, 'libvlc', 'build', 'outputs', 'aar') + self.aars[arch] = aar = join(aar_path, 'libvlc-{}.aar'.format(self.version)) + warning("HINT: set path to precompiled libvlc-.aar bundle " + "in {} environment!".format(self.ENV_LIBVLC_AAR)) + info("libvlc-.aar should build " + "from sources at {}".format(port_dir)) + if not isfile(join(port_dir, 'compile.sh')): + info("clone vlc port for android sources from {}".format( + self.port_git)) + shprint(sh.git, 'clone', self.port_git, port_dir, + _tail=20, _critical=True) +# now "git clone ..." is a part of compile.sh +# vlc_dir = join(port_dir, 'vlc') +# if not isfile(join(vlc_dir, 'Makefile.am')): +# info("clone vlc sources from {}".format(self.vlc_git)) +# shprint(sh.git, 'clone', self.vlc_git, vlc_dir, +# _tail=20, _critical=True) + + def build_arch(self, arch): + super(VlcRecipe, self).build_arch(arch) + build_dir = self.get_build_dir(arch.arch) + port_dir = join(build_dir, 'vlc-port-android') + aar = self.aars[arch] + if not isfile(aar): + with current_directory(port_dir): + env = dict(environ) + env.update({ + 'ANDROID_ABI': arch.arch, + 'ANDROID_NDK': self.ctx.ndk_dir, + 'ANDROID_SDK': self.ctx.sdk_dir, + }) + info("compiling vlc from sources") + debug("environment: {}".format(env)) + if not isfile(join('bin', 'VLC-debug.apk')): + shprint(sh.Command('./compile.sh'), _env=env, + _tail=50, _critical=True) + shprint(sh.Command('./compile-libvlc.sh'), _env=env, + _tail=50, _critical=True) + shprint(sh.cp, '-a', aar, self.ctx.aars_dir) + + +recipe = VlcRecipe() diff --git a/p4a/pythonforandroid/recipes/websocket-client/__init__.py b/p4a/pythonforandroidold/recipes/websocket-client/__init__.py similarity index 100% rename from p4a/pythonforandroid/recipes/websocket-client/__init__.py rename to p4a/pythonforandroidold/recipes/websocket-client/__init__.py diff --git a/p4a/pythonforandroid/recipes/websocket-client/websocket.patch b/p4a/pythonforandroidold/recipes/websocket-client/websocket.patch similarity index 100% rename from p4a/pythonforandroid/recipes/websocket-client/websocket.patch rename to p4a/pythonforandroidold/recipes/websocket-client/websocket.patch diff --git a/p4a/pythonforandroidold/recipes/wsaccel/__init__.py b/p4a/pythonforandroidold/recipes/wsaccel/__init__.py new file mode 100644 index 0000000..7bfc346 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/wsaccel/__init__.py @@ -0,0 +1,11 @@ +from pythonforandroid.recipe import CythonRecipe + + +class WSAccellRecipe(CythonRecipe): + version = '0.6.2' + url = 'https://pypi.python.org/packages/source/w/wsaccel/wsaccel-{version}.tar.gz' + depends = [] + call_hostpython_via_targetpython = False + + +recipe = WSAccellRecipe() diff --git a/p4a/pythonforandroidold/recipes/x3dh/__init__.py b/p4a/pythonforandroidold/recipes/x3dh/__init__.py new file mode 100644 index 0000000..134bf29 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/x3dh/__init__.py @@ -0,0 +1,18 @@ +from pythonforandroid.recipe import PythonRecipe + + +class X3DHRecipe(PythonRecipe): + name = 'x3dh' + version = '0.5.3' + url = 'https://pypi.python.org/packages/source/X/X3DH/X3DH-{version}.tar.gz' + site_packages_name = 'x3dh' + depends = [ + 'setuptools', + 'cryptography', + 'xeddsa', + ] + patches = ['requires_fix.patch'] + call_hostpython_via_targetpython = False + + +recipe = X3DHRecipe() diff --git a/p4a/pythonforandroidold/recipes/x3dh/requires_fix.patch b/p4a/pythonforandroidold/recipes/x3dh/requires_fix.patch new file mode 100644 index 0000000..250df05 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/x3dh/requires_fix.patch @@ -0,0 +1,12 @@ +diff -urN X3DH-0.5.3.ori/setup.py X3DH-0.5.3/setup.py +--- X3DH-0.5.3.ori/setup.py 2018-10-28 19:15:16.444766623 +0100 ++++ X3DH-0.5.3/setup.py 2018-10-28 19:15:38.028060948 +0100 +@@ -24,7 +24,7 @@ + author_email = "tim@cifg.io", + license = "MIT", + packages = find_packages(), +- install_requires = [ "cryptography>=1.7.1", "XEdDSA>=0.4.2" ], ++ install_requires = [], + python_requires = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4", + zip_safe = True, + classifiers = [ diff --git a/p4a/pythonforandroidold/recipes/xeddsa/__init__.py b/p4a/pythonforandroidold/recipes/xeddsa/__init__.py new file mode 100644 index 0000000..eb0e2ae --- /dev/null +++ b/p4a/pythonforandroidold/recipes/xeddsa/__init__.py @@ -0,0 +1,36 @@ +from pythonforandroid.recipe import CythonRecipe +from pythonforandroid.toolchain import current_directory, shprint +from os.path import join +import sh + + +class XedDSARecipe(CythonRecipe): + name = 'xeddsa' + version = '0.4.4' + url = 'https://pypi.python.org/packages/source/X/XEdDSA/XEdDSA-{version}.tar.gz' + depends = [ + 'setuptools', + 'cffi', + 'pynacl', + ] + patches = ['remove_dependencies.patch'] + call_hostpython_via_targetpython = False + + def build_arch(self, arch): + with current_directory(join(self.get_build_dir(arch.arch))): + env = self.get_recipe_env(arch) + hostpython = sh.Command(self.ctx.hostpython) + shprint( + hostpython, 'ref10/build.py', + _env=env + ) + python_version = self.ctx.python_recipe.version[0:3] + site_packages_dir = 'lib/python{python_version}/site-packages'.format( + python_version=python_version) + site_packages = join(self.ctx.get_python_install_dir(), + site_packages_dir) + shprint(sh.cp, '_crypto_sign.so', site_packages) + self.install_python_package(arch) + + +recipe = XedDSARecipe() diff --git a/p4a/pythonforandroidold/recipes/xeddsa/remove_dependencies.patch b/p4a/pythonforandroidold/recipes/xeddsa/remove_dependencies.patch new file mode 100644 index 0000000..8bd762f --- /dev/null +++ b/p4a/pythonforandroidold/recipes/xeddsa/remove_dependencies.patch @@ -0,0 +1,15 @@ +diff -urN XEdDSA-0.4.4.ori/setup.py XEdDSA-0.4.4/setup.py +--- XEdDSA-0.4.4.ori/setup.py 2018-09-23 16:08:35.000000000 +0200 ++++ XEdDSA-0.4.4/setup.py 2018-10-30 08:21:23.338790184 +0100 +@@ -22,9 +22,8 @@ + author_email = "tim@cifg.io", + license = "MIT", + packages = find_packages(), +- install_requires = [ "cffi>=1.9.1", "pynacl>=1.0.1" ], +- setup_requires = [ "cffi>=1.9.1" ], +- cffi_modules = [ os.path.join("ref10", "build.py") + ":ffibuilder" ], ++ install_requires = ["pynacl>=1.0.1" ], ++ setup_requires = [], + python_requires = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4", + include_package_data = True, + zip_safe = False, diff --git a/p4a/pythonforandroidold/recipes/zbar/__init__.py b/p4a/pythonforandroidold/recipes/zbar/__init__.py new file mode 100644 index 0000000..62aa85b --- /dev/null +++ b/p4a/pythonforandroidold/recipes/zbar/__init__.py @@ -0,0 +1,33 @@ +from os.path import join +from pythonforandroid.recipe import PythonRecipe + + +class ZBarRecipe(PythonRecipe): + + version = '0.10' + + # For some reason the version 0.10 on PyPI is not the same as the ones + # in sourceforge and GitHub. The one in PyPI has a setup.py. + # url = 'https://github.com/ZBar/ZBar/archive/{version}.zip' + url = 'https://pypi.python.org/packages/e0/5c/' + \ + 'bd2a96a9f2adacffceb4482cdd56831735ab5a67ea6a60c0a8757c17b62e' + \ + '/zbar-{version}.tar.gz' + + call_hostpython_via_targetpython = False + + depends = ['setuptools', 'libzbar'] + + patches = ["zbar-0.10-python-crash.patch"] + + def get_recipe_env(self, arch=None, with_flags_in_cc=True): + env = super(ZBarRecipe, self).get_recipe_env(arch, with_flags_in_cc) + libzbar = self.get_recipe('libzbar', self.ctx) + libzbar_dir = libzbar.get_build_dir(arch.arch) + env['PYTHON_ROOT'] = self.ctx.get_python_install_dir() + env['CFLAGS'] += ' -I' + join(libzbar_dir, 'include') + env['LDFLAGS'] += ' -L' + join(libzbar_dir, 'zbar', '.libs') + env['LIBS'] = env.get('LIBS', '') + ' -landroid -lzbar' + return env + + +recipe = ZBarRecipe() diff --git a/p4a/pythonforandroidold/recipes/zbar/zbar-0.10-python-crash.patch b/p4a/pythonforandroidold/recipes/zbar/zbar-0.10-python-crash.patch new file mode 100644 index 0000000..196a356 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/zbar/zbar-0.10-python-crash.patch @@ -0,0 +1,19 @@ +https://sourceforge.net/p/zbar/patches/37/ + +fix from Debian for crashes when importing the python module. +http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=702499 + +this doesn't happen on some arches as the data naturally ends up with zero +data after the structure, but on some (like arm), it isn't so we crash when +python walks the list. + +--- a/imagescanner.c ++++ b/imagescanner.c +@@ -68,6 +68,7 @@ imagescanner_get_results (zbarImageScanner *self, + + static PyGetSetDef imagescanner_getset[] = { + { "results", (getter)imagescanner_get_results, }, ++ { NULL }, + }; + + static PyObject* diff --git a/p4a/pythonforandroidold/recipes/zbarlight/__init__.py b/p4a/pythonforandroidold/recipes/zbarlight/__init__.py new file mode 100644 index 0000000..966c7fb --- /dev/null +++ b/p4a/pythonforandroidold/recipes/zbarlight/__init__.py @@ -0,0 +1,26 @@ +from os.path import join +from pythonforandroid.recipe import PythonRecipe + + +class ZBarLightRecipe(PythonRecipe): + + version = '2.1' + + url = 'https://github.com/Polyconseil/zbarlight/archive/{version}.tar.gz' # noqa + + call_hostpython_via_targetpython = False + + depends = ['setuptools', 'libzbar'] + + def get_recipe_env(self, arch=None, with_flags_in_cc=True): + env = super(ZBarLightRecipe, self).get_recipe_env(arch, with_flags_in_cc) + libzbar = self.get_recipe('libzbar', self.ctx) + libzbar_dir = libzbar.get_build_dir(arch.arch) + env['PYTHON_ROOT'] = self.ctx.get_python_install_dir() + env['CFLAGS'] += ' -I' + join(libzbar_dir, 'include') + env['LDFLAGS'] += ' -L' + join(libzbar_dir, 'zbar', '.libs') + env['LIBS'] = env.get('LIBS', '') + ' -landroid -lzbar' + return env + + +recipe = ZBarLightRecipe() diff --git a/p4a/pythonforandroidold/recipes/zeroconf/__init__.py b/p4a/pythonforandroidold/recipes/zeroconf/__init__.py new file mode 100644 index 0000000..5ca5708 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/zeroconf/__init__.py @@ -0,0 +1,12 @@ +from pythonforandroid.recipe import PythonRecipe + + +class ZeroconfRecipe(PythonRecipe): + name = 'zeroconf' + version = '0.17.4' + url = 'https://pypi.python.org/packages/source/z/zeroconf/zeroconf-{version}.tar.gz' + depends = ['setuptools', 'enum34', 'six'] + call_hostpython_via_targetpython = False + + +recipe = ZeroconfRecipe() diff --git a/p4a/pythonforandroidold/recipes/zeroconf/patches/setup.patch b/p4a/pythonforandroidold/recipes/zeroconf/patches/setup.patch new file mode 100644 index 0000000..2b7900a --- /dev/null +++ b/p4a/pythonforandroidold/recipes/zeroconf/patches/setup.patch @@ -0,0 +1,15 @@ +--- zeroconf.orig/setup.py 2015-07-11 21:55:09.000000000 +0200 ++++ zeroconf/setup.py 2017-02-23 01:04:13.370018716 +0100 +@@ -55,12 +55,5 @@ + 'mDNS', + ], + install_requires=[ +- 'enum-compat', +- # netifaces 0.10.5 has a bug that results in all interfaces' netmasks +- # to be 255.255.255.255 on Windows which breaks things. See: +- # * https://github.com/jstasiak/python-zeroconf/issues/84 +- # * https://bitbucket.org/al45tair/netifaces/issues/39/netmask-is-always-255255255255 +- 'netifaces<=0.10.4', +- 'six', + ], + ) diff --git a/p4a/pythonforandroidold/recipes/zope/__init__.py b/p4a/pythonforandroidold/recipes/zope/__init__.py new file mode 100644 index 0000000..579a760 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/zope/__init__.py @@ -0,0 +1,27 @@ + +from pythonforandroid.recipe import PythonRecipe +from os.path import join + + +class ZopeRecipe(PythonRecipe): + name = 'zope' + version = '4.1.3' + url = 'http://pypi.python.org/packages/source/z/zope.interface/zope.interface-{version}.tar.gz' + + depends = [] + + def get_recipe_env(self, arch): + env = super(ZopeRecipe, self).get_recipe_env(arch) + + # These are in the old zope recipe but seem like they shouldn't actually be necessary + env['LDFLAGS'] = env['LDFLAGS'] + ' -L{}'.format( + self.ctx.get_libs_dir(arch.arch)) + env['LDSHARED'] = join(self.ctx.root_dir, 'tools', 'liblink') + + def postbuild_arch(self, arch): + super(ZopeRecipe, self).postbuild_arch(arch) + + # Should do some deleting here + + +recipe = ZopeRecipe() diff --git a/p4a/pythonforandroidold/recipes/zope_interface/__init__.py b/p4a/pythonforandroidold/recipes/zope_interface/__init__.py new file mode 100644 index 0000000..b1fb0bd --- /dev/null +++ b/p4a/pythonforandroidold/recipes/zope_interface/__init__.py @@ -0,0 +1,31 @@ +from pythonforandroid.recipe import PythonRecipe +from pythonforandroid.toolchain import current_directory +from os.path import join +import sh + + +class ZopeInterfaceRecipe(PythonRecipe): + call_hostpython_via_targetpython = False + name = 'zope_interface' + version = '4.1.3' + url = 'https://pypi.python.org/packages/source/z/zope.interface/zope.interface-{version}.tar.gz' + site_packages_name = 'zope.interface' + depends = ['setuptools'] + patches = ['no_tests.patch'] + + def build_arch(self, arch): + super(ZopeInterfaceRecipe, self).build_arch(arch) + # The zope.interface module lacks of the __init__.py file in one of his + # folders (once is installed), that leads into an ImportError. + # Here we intentionally apply a patch to solve that, so, in case that + # this is solved in the future an error will be triggered + zope_install = join(self.ctx.get_site_packages_dir(arch.arch), 'zope') + self.apply_patch('fix-init.patch', arch.arch, build_dir=zope_install) + + def prebuild_arch(self, arch): + super(ZopeInterfaceRecipe, self).prebuild_arch(arch) + with current_directory(self.get_build_dir(arch.arch)): + sh.rm('-rf', 'src/zope/interface/tests', 'src/zope/interface/common/tests') + + +recipe = ZopeInterfaceRecipe() diff --git a/p4a/pythonforandroidold/recipes/zope_interface/fix-init.patch b/p4a/pythonforandroidold/recipes/zope_interface/fix-init.patch new file mode 100644 index 0000000..b618eb5 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/zope_interface/fix-init.patch @@ -0,0 +1,9 @@ +The zope.interface module lacks of the __init__.py file in `zope` folder +(once is installed), this patch creates that missing file. This seems to be +caused during the installation process because that file exists in source +files. +diff -Naurp zope.orig/__init__.py zope/__init__.py +--- zope.orig/__init__.py 1970-01-01 01:00:00.000000000 +0100 ++++ zope/__init__.py 2019-02-05 11:29:22.666757227 +0100 +@@ -0,0 +1 @@ ++ diff --git a/p4a/pythonforandroidold/recipes/zope_interface/no_tests.patch b/p4a/pythonforandroidold/recipes/zope_interface/no_tests.patch new file mode 100644 index 0000000..09a3872 --- /dev/null +++ b/p4a/pythonforandroidold/recipes/zope_interface/no_tests.patch @@ -0,0 +1,13 @@ +--- zope_interface/setup.py 2015-10-05 09:35:14.000000000 +0200 ++++ b/setup.py 2016-06-15 17:44:35.108263993 +0200 +@@ -139,9 +139,8 @@ + "Topic :: Software Development :: Libraries :: Python Modules", + ], + +- packages = ['zope', 'zope.interface', 'zope.interface.tests'], ++ packages = ['zope', 'zope.interface'], + package_dir = {'': 'src'}, + cmdclass = {'build_ext': optional_build_ext, + }, +- test_suite = 'zope.interface.tests', + **extra) diff --git a/p4a/pythonforandroidold/recommendations.py b/p4a/pythonforandroidold/recommendations.py new file mode 100644 index 0000000..fd2fd3a --- /dev/null +++ b/p4a/pythonforandroidold/recommendations.py @@ -0,0 +1,107 @@ +"""Simple functions for checking dependency versions.""" + +from distutils.version import LooseVersion +from os.path import join +from pythonforandroid.logger import info, warning +from pythonforandroid.util import BuildInterruptingException + +# We only check the NDK major version +MIN_NDK_VERSION = 17 +MAX_NDK_VERSION = 17 + +RECOMMENDED_NDK_VERSION = '17c' +OLD_NDK_MESSAGE = 'Older NDKs may not be compatible with all p4a features.' +NEW_NDK_MESSAGE = 'Newer NDKs may not be fully supported by p4a.' + + +def check_ndk_version(ndk_dir): + # Check the NDK version against what is currently recommended + version = read_ndk_version(ndk_dir) + + if version is None: + return # if we failed to read the version, just don't worry about it + + major_version = version.version[0] + + info('Found NDK revision {}'.format(version)) + + if major_version < MIN_NDK_VERSION: + warning('Minimum recommended NDK version is {}'.format( + RECOMMENDED_NDK_VERSION)) + warning(OLD_NDK_MESSAGE) + elif major_version > MAX_NDK_VERSION: + warning('Maximum recommended NDK version is {}'.format( + RECOMMENDED_NDK_VERSION)) + warning(NEW_NDK_MESSAGE) + + +def read_ndk_version(ndk_dir): + """Read the NDK version from the NDK dir, if possible""" + try: + with open(join(ndk_dir, 'source.properties')) as fileh: + ndk_data = fileh.read() + except IOError: + info('Could not determine NDK version, no source.properties ' + 'in the NDK dir') + return + + for line in ndk_data.split('\n'): + if line.startswith('Pkg.Revision'): + break + else: + info('Could not parse $NDK_DIR/source.properties, not checking ' + 'NDK version') + return + + # Line should have the form "Pkg.Revision = ..." + ndk_version = LooseVersion(line.split('=')[-1].strip()) + + return ndk_version + + +MIN_TARGET_API = 26 + +# highest version tested to work fine with SDL2 +# should be a good default for other bootstraps too +RECOMMENDED_TARGET_API = 27 + +ARMEABI_MAX_TARGET_API = 21 +OLD_API_MESSAGE = ( + 'Target APIs lower than 26 are no longer supported on Google Play, ' + 'and are not recommended. Note that the Target API can be higher than ' + 'your device Android version, and should usually be as high as possible.') + + +def check_target_api(api, arch): + """Warn if the user's target API is less than the current minimum + recommendation + """ + + if api >= ARMEABI_MAX_TARGET_API and arch == 'armeabi': + raise BuildInterruptingException( + 'Asked to build for armeabi architecture with API ' + '{}, but API {} or greater does not support armeabi'.format( + api, ARMEABI_MAX_TARGET_API), + instructions='You probably want to build with --arch=armeabi-v7a instead') + + if api < MIN_TARGET_API: + warning('Target API {} < {}'.format(api, MIN_TARGET_API)) + warning(OLD_API_MESSAGE) + + +MIN_NDK_API = 21 +RECOMMENDED_NDK_API = 21 +OLD_NDK_API_MESSAGE = ('NDK API less than {} is not supported'.format(MIN_NDK_API)) + + +def check_ndk_api(ndk_api, android_api): + """Warn if the user's NDK is too high or low.""" + if ndk_api > android_api: + raise BuildInterruptingException( + 'Target NDK API is {}, higher than the target Android API {}.'.format( + ndk_api, android_api), + instructions=('The NDK API is a minimum supported API number and must be lower ' + 'than the target Android API')) + + if ndk_api < MIN_NDK_API: + warning(OLD_NDK_API_MESSAGE) diff --git a/p4a/pythonforandroidold/toolchain.py b/p4a/pythonforandroidold/toolchain.py new file mode 100644 index 0000000..ddf745a --- /dev/null +++ b/p4a/pythonforandroidold/toolchain.py @@ -0,0 +1,1057 @@ +#!/usr/bin/env python +""" +Tool for packaging Python apps for Android +========================================== + +This module defines the entry point for command line and programmatic use. +""" + +from __future__ import print_function +from os import environ +from pythonforandroid import __version__ +from pythonforandroid.recommendations import ( + RECOMMENDED_NDK_API, RECOMMENDED_TARGET_API) +from pythonforandroid.util import BuildInterruptingException, handle_build_exception + + +def check_python_dependencies(): + # Check if the Python requirements are installed. This appears + # before the imports because otherwise they're imported elsewhere. + + # Using the ok check instead of failing immediately so that all + # errors are printed at once + + from distutils.version import LooseVersion + from importlib import import_module + import sys + + ok = True + + modules = [('colorama', '0.3.3'), 'appdirs', ('sh', '1.10'), 'jinja2', + 'six'] + + for module in modules: + if isinstance(module, tuple): + module, version = module + else: + version = None + + try: + import_module(module) + except ImportError: + if version is None: + print('ERROR: The {} Python module could not be found, please ' + 'install it.'.format(module)) + ok = False + else: + print('ERROR: The {} Python module could not be found, ' + 'please install version {} or higher'.format( + module, version)) + ok = False + else: + if version is None: + continue + try: + cur_ver = sys.modules[module].__version__ + except AttributeError: # this is sometimes not available + continue + if LooseVersion(cur_ver) < LooseVersion(version): + print('ERROR: {} version is {}, but python-for-android needs ' + 'at least {}.'.format(module, cur_ver, version)) + ok = False + + if not ok: + print('python-for-android is exiting due to the errors logged above') + exit(1) + + +check_python_dependencies() + + +import sys +from sys import platform +from os.path import (join, dirname, realpath, exists, expanduser, basename) +import os +import glob +import shutil +import re +import shlex +from functools import wraps + +import argparse +import sh +import imp +from appdirs import user_data_dir +import logging +from distutils.version import LooseVersion + +from pythonforandroid.recipe import Recipe +from pythonforandroid.logger import (logger, info, warning, setup_color, + Out_Style, Out_Fore, + info_notify, info_main, shprint) +from pythonforandroid.util import current_directory +from pythonforandroid.bootstrap import Bootstrap +from pythonforandroid.distribution import Distribution, pretty_log_dists +from pythonforandroid.graph import get_recipe_order_and_bootstrap +from pythonforandroid.build import Context, build_recipes + +user_dir = dirname(realpath(os.path.curdir)) +toolchain_dir = dirname(__file__) +sys.path.insert(0, join(toolchain_dir, "tools", "external")) + + +def add_boolean_option(parser, names, no_names=None, + default=True, dest=None, description=None): + group = parser.add_argument_group(description=description) + if not isinstance(names, (list, tuple)): + names = [names] + if dest is None: + dest = names[0].strip("-").replace("-", "_") + + def add_dashes(x): + return x if x.startswith("-") else "--"+x + + opts = [add_dashes(x) for x in names] + group.add_argument( + *opts, help=("(this is the default)" if default else None), + dest=dest, action='store_true') + if no_names is None: + def add_no(x): + x = x.lstrip("-") + return ("no_"+x) if "_" in x else ("no-"+x) + no_names = [add_no(x) for x in names] + opts = [add_dashes(x) for x in no_names] + group.add_argument( + *opts, help=(None if default else "(this is the default)"), + dest=dest, action='store_false') + parser.set_defaults(**{dest: default}) + + +def require_prebuilt_dist(func): + """Decorator for ToolchainCL methods. If present, the method will + automatically make sure a dist has been built before continuing + or, if no dists are present or can be obtained, will raise an + error. + """ + + @wraps(func) + def wrapper_func(self, args): + ctx = self.ctx + ctx.set_archs(self._archs) + ctx.prepare_build_environment(user_sdk_dir=self.sdk_dir, + user_ndk_dir=self.ndk_dir, + user_android_api=self.android_api, + user_ndk_api=self.ndk_api) + dist = self._dist + if dist.needs_build: + if dist.folder_exists(): # possible if the dist is being replaced + dist.delete() + info_notify('No dist exists that meets your requirements, ' + 'so one will be built.') + build_dist_from_args(ctx, dist, args) + func(self, args) + return wrapper_func + + +def dist_from_args(ctx, args): + """Parses out any distribution-related arguments, and uses them to + obtain a Distribution class instance for the build. + """ + return Distribution.get_distribution( + ctx, + name=args.dist_name, + recipes=split_argument_list(args.requirements), + ndk_api=args.ndk_api, + force_build=args.force_build, + require_perfect_match=args.require_perfect_match, + allow_replace_dist=args.allow_replace_dist) + + +def build_dist_from_args(ctx, dist, args): + """Parses out any bootstrap related arguments, and uses them to build + a dist.""" + bs = Bootstrap.get_bootstrap(args.bootstrap, ctx) + build_order, python_modules, bs \ + = get_recipe_order_and_bootstrap(ctx, dist.recipes, bs) + ctx.recipe_build_order = build_order + ctx.python_modules = python_modules + + info('The selected bootstrap is {}'.format(bs.name)) + info_main('# Creating dist with {} bootstrap'.format(bs.name)) + bs.distribution = dist + info_notify('Dist will have name {} and recipes ({})'.format( + dist.name, ', '.join(dist.recipes))) + info('Dist will also contain modules ({}) installed from pip'.format( + ', '.join(ctx.python_modules))) + + ctx.dist_name = bs.distribution.name + ctx.prepare_bootstrap(bs) + if dist.needs_build: + ctx.prepare_dist(ctx.dist_name) + + build_recipes(build_order, python_modules, ctx) + + ctx.bootstrap.run_distribute() + + info_main('# Your distribution was created successfully, exiting.') + info('Dist can be found at (for now) {}' + .format(join(ctx.dist_dir, ctx.dist_name))) + + +def split_argument_list(l): + if not len(l): + return [] + return re.split(r'[ ,]+', l) + + +class NoAbbrevParser(argparse.ArgumentParser): + """We want to disable argument abbreviation so as not to interfere + with passing through arguments to build.py, but in python2 argparse + doesn't have this option. + + This subclass alternative is follows the suggestion at + https://bugs.python.org/issue14910. + """ + def _get_option_tuples(self, option_string): + return [] + + +class ToolchainCL(object): + + def __init__(self): + + argv = sys.argv + # Buildozer used to pass these arguments in a now-invalid order + # If that happens, apply this fix + # This fix will be removed once a fixed buildozer is released + if (len(argv) > 2 + and argv[1].startswith('--color') + and argv[2].startswith('--storage-dir')): + argv.append(argv.pop(1)) # the --color arg + argv.append(argv.pop(1)) # the --storage-dir arg + + parser = NoAbbrevParser( + description='A packaging tool for turning Python scripts and apps ' + 'into Android APKs') + + generic_parser = argparse.ArgumentParser( + add_help=False, + description='Generic arguments applied to all commands') + argparse.ArgumentParser( + add_help=False, description='Arguments for dist building') + + generic_parser.add_argument( + '--debug', dest='debug', action='store_true', default=False, + help='Display debug output and all build info') + generic_parser.add_argument( + '--color', dest='color', choices=['always', 'never', 'auto'], + help='Enable or disable color output (default enabled on tty)') + generic_parser.add_argument( + '--sdk-dir', '--sdk_dir', dest='sdk_dir', default='', + help='The filepath where the Android SDK is installed') + generic_parser.add_argument( + '--ndk-dir', '--ndk_dir', dest='ndk_dir', default='', + help='The filepath where the Android NDK is installed') + generic_parser.add_argument( + '--android-api', + '--android_api', + dest='android_api', + default=0, + type=int, + help=('The Android API level to build against defaults to {} if ' + 'not specified.').format(RECOMMENDED_TARGET_API)) + generic_parser.add_argument( + '--ndk-version', '--ndk_version', dest='ndk_version', default=None, + help=('DEPRECATED: the NDK version is now found automatically or ' + 'not at all.')) + generic_parser.add_argument( + '--ndk-api', type=int, default=None, + help=('The Android API level to compile against. This should be your ' + '*minimal supported* API, not normally the same as your --android-api. ' + 'Defaults to min(ANDROID_API, {}) if not specified.').format(RECOMMENDED_NDK_API)) + generic_parser.add_argument( + '--symlink-java-src', '--symlink_java_src', + action='store_true', + dest='symlink_java_src', + default=False, + help=('If True, symlinks the java src folder during build and dist ' + 'creation. This is useful for development only, it could also' + ' cause weird problems.')) + + default_storage_dir = user_data_dir('python-for-android') + if ' ' in default_storage_dir: + default_storage_dir = '~/.python-for-android' + generic_parser.add_argument( + '--storage-dir', dest='storage_dir', default=default_storage_dir, + help=('Primary storage directory for downloads and builds ' + '(default: {})'.format(default_storage_dir))) + + generic_parser.add_argument( + '--arch', help='The archs to build for, separated by commas.', + default='arm64-v8a') + + # Options for specifying the Distribution + generic_parser.add_argument( + '--dist-name', '--dist_name', + help='The name of the distribution to use or create', default='') + + generic_parser.add_argument( + '--requirements', + help=('Dependencies of your app, should be recipe names or ' + 'Python modules'), + default='') + + generic_parser.add_argument( + '--bootstrap', + help='The bootstrap to build with. Leave unset to choose ' + 'automatically.', + default=None) + + generic_parser.add_argument( + '--hook', + help='Filename to a module that contains python-for-android hooks', + default=None) + + add_boolean_option( + generic_parser, ["force-build"], + default=False, + description='Whether to force compilation of a new distribution') + + add_boolean_option( + generic_parser, ["require-perfect-match"], + default=False, + description=('Whether the dist recipes must perfectly match ' + 'those requested')) + + add_boolean_option( + generic_parser, ["allow-replace-dist"], + default=True, + description='Whether existing dist names can be automatically replaced' + ) + + generic_parser.add_argument( + '--local-recipes', '--local_recipes', + dest='local_recipes', default='./p4a-recipes', + help='Directory to look for local recipes') + + generic_parser.add_argument( + '--java-build-tool', + dest='java_build_tool', default='auto', + choices=['auto', 'ant', 'gradle'], + help=('The java build tool to use when packaging the APK, defaults ' + 'to automatically selecting an appropriate tool.')) + + add_boolean_option( + generic_parser, ['copy-libs'], + default=False, + description='Copy libraries instead of using biglink (Android 4.3+)' + ) + + self._read_configuration() + + subparsers = parser.add_subparsers(dest='subparser_name', + help='The command to run') + + def add_parser(subparsers, *args, **kwargs): + """ + argparse in python2 doesn't support the aliases option, + so we just don't provide the aliases there. + """ + if 'aliases' in kwargs and sys.version_info.major < 3: + kwargs.pop('aliases') + return subparsers.add_parser(*args, **kwargs) + + parser_recommendations = add_parser( + subparsers, + 'recommendations', + parents=[generic_parser], + help='List recommended p4a dependencies') + parser_recipes = add_parser( + subparsers, + 'recipes', + parents=[generic_parser], + help='List the available recipes') + parser_recipes.add_argument( + "--compact", + action="store_true", default=False, + help="Produce a compact list suitable for scripting") + add_parser( + subparsers, 'bootstraps', + help='List the available bootstraps', + parents=[generic_parser]) + add_parser( + subparsers, 'clean_all', + aliases=['clean-all'], + help='Delete all builds, dists and caches', + parents=[generic_parser]) + add_parser( + subparsers, 'clean_dists', + aliases=['clean-dists'], + help='Delete all dists', + parents=[generic_parser]) + add_parser( + subparsers, 'clean_bootstrap_builds', + aliases=['clean-bootstrap-builds'], + help='Delete all bootstrap builds', + parents=[generic_parser]) + add_parser( + subparsers, 'clean_builds', + aliases=['clean-builds'], + help='Delete all builds', + parents=[generic_parser]) + + parser_clean = add_parser( + subparsers, 'clean', + help='Delete build components.', + parents=[generic_parser]) + parser_clean.add_argument( + 'component', nargs='+', + help=('The build component(s) to delete. You can pass any ' + 'number of arguments from "all", "builds", "dists", ' + '"distributions", "bootstrap_builds", "downloads".')) + + parser_clean_recipe_build = add_parser( + subparsers, + 'clean_recipe_build', aliases=['clean-recipe-build'], + help=('Delete the build components of the given recipe. ' + 'By default this will also delete built dists'), + parents=[generic_parser]) + parser_clean_recipe_build.add_argument( + 'recipe', help='The recipe name') + parser_clean_recipe_build.add_argument( + '--no-clean-dists', default=False, + dest='no_clean_dists', + action='store_true', + help='If passed, do not delete existing dists') + + parser_clean_download_cache = add_parser( + subparsers, + 'clean_download_cache', aliases=['clean-download-cache'], + help='Delete cached downloads for requirement builds', + parents=[generic_parser]) + parser_clean_download_cache.add_argument( + 'recipes', + nargs='*', + help='The recipes to clean (space-separated). If no recipe name is' + ' provided, the entire cache is cleared.') + + parser_export_dist = add_parser( + subparsers, + 'export_dist', aliases=['export-dist'], + help='Copy the named dist to the given path', + parents=[generic_parser]) + parser_export_dist.add_argument('output_dir', + help='The output dir to copy to') + parser_export_dist.add_argument( + '--symlink', + action='store_true', + help='Symlink the dist instead of copying') + + parser_apk = add_parser( + + subparsers, + 'apk', help='Build an APK', + parents=[generic_parser]) + parser_apk.add_argument( + '--release', dest='build_mode', action='store_const', + const='release', default='debug', + help='Build the PARSER_APK. in Release mode') + parser_apk.add_argument( + '--keystore', dest='keystore', action='store', default=None, + help=('Keystore for JAR signing key, will use jarsigner ' + 'default if not specified (release build only)')) + parser_apk.add_argument( + '--signkey', dest='signkey', action='store', default=None, + help='Key alias to sign PARSER_APK. with (release build only)') + parser_apk.add_argument( + '--keystorepw', dest='keystorepw', action='store', default=None, + help='Password for keystore') + parser_apk.add_argument( + '--signkeypw', dest='signkeypw', action='store', default=None, + help='Password for key alias') + + parser_create = add_parser( + subparsers, + 'create', help='Compile a set of requirements into a dist', + parents=[generic_parser]) + parser_archs = add_parser( + subparsers, + 'archs', help='List the available target architectures', + parents=[generic_parser]) + parser_distributions = add_parser( + subparsers, + 'distributions', aliases=['dists'], + help='List the currently available (compiled) dists', + parents=[generic_parser]) + parser_delete_dist = add_parser( + subparsers, + 'delete_dist', aliases=['delete-dist'], help='Delete a compiled dist', + parents=[generic_parser]) + + parser_sdk_tools = add_parser( + subparsers, + 'sdk_tools', aliases=['sdk-tools'], + help='Run the given binary from the SDK tools dis', + parents=[generic_parser]) + parser_sdk_tools.add_argument( + 'tool', help='The binary tool name to run') + + parser_adb = add_parser( + subparsers, + 'adb', help='Run adb from the given SDK', + parents=[generic_parser]) + parser_logcat = add_parser( + subparsers, + 'logcat', help='Run logcat from the given SDK', + parents=[generic_parser]) + parser_build_status = add_parser( + subparsers, + 'build_status', aliases=['build-status'], + help='Print some debug information about current built components', + parents=[generic_parser]) + + parser.add_argument('-v', '--version', action='version', + version=__version__) + + args, unknown = parser.parse_known_args(sys.argv[1:]) + args.unknown_args = unknown + + self.args = args + + if args.subparser_name is None: + parser.print_help() + exit(1) + + setup_color(args.color) + + if args.debug: + logger.setLevel(logging.DEBUG) + + # strip version from requirements, and put them in environ + if hasattr(args, 'requirements'): + requirements = [] + for requirement in split_argument_list(args.requirements): + if "==" in requirement: + requirement, version = requirement.split(u"==", 1) + os.environ["VERSION_{}".format(requirement)] = version + info('Recipe {}: version "{}" requested'.format( + requirement, version)) + requirements.append(requirement) + args.requirements = u",".join(requirements) + + self.warn_on_deprecated_args(args) + + self.ctx = Context() + self.storage_dir = args.storage_dir + self.ctx.setup_dirs(self.storage_dir) + self.sdk_dir = args.sdk_dir + self.ndk_dir = args.ndk_dir + self.android_api = args.android_api + self.ndk_api = args.ndk_api + self.ctx.symlink_java_src = args.symlink_java_src + self.ctx.java_build_tool = args.java_build_tool + + self._archs = split_argument_list(args.arch) + + self.ctx.local_recipes = args.local_recipes + self.ctx.copy_libs = args.copy_libs + + # Each subparser corresponds to a method + getattr(self, args.subparser_name.replace('-', '_'))(args) + + def warn_on_deprecated_args(self, args): + """ + Print warning messages for any deprecated arguments that were passed. + """ + + # NDK version is now determined automatically + if args.ndk_version is not None: + warning('--ndk-version is deprecated and no longer necessary, ' + 'the value you passed is ignored') + if 'ANDROIDNDKVER' in environ: + warning('$ANDROIDNDKVER is deprecated and no longer necessary, ' + 'the value you set is ignored') + + def hook(self, name): + if not self.args.hook: + return + if not hasattr(self, "hook_module"): + # first time, try to load the hook module + self.hook_module = imp.load_source("pythonforandroid.hook", + self.args.hook) + if hasattr(self.hook_module, name): + info("Hook: execute {}".format(name)) + getattr(self.hook_module, name)(self) + else: + info("Hook: ignore {}".format(name)) + + @property + def default_storage_dir(self): + udd = user_data_dir('python-for-android') + if ' ' in udd: + udd = '~/.python-for-android' + return udd + + @staticmethod + def _read_configuration(): + # search for a .p4a configuration file in the current directory + if not exists(".p4a"): + return + info("Reading .p4a configuration") + with open(".p4a") as fd: + lines = fd.readlines() + lines = [shlex.split(line) + for line in lines if not line.startswith("#")] + for line in lines: + for arg in line: + sys.argv.append(arg) + + def recipes(self, args): + ctx = self.ctx + if args.compact: + print(" ".join(set(Recipe.list_recipes(ctx)))) + else: + for name in sorted(Recipe.list_recipes(ctx)): + try: + recipe = Recipe.get_recipe(name, ctx) + except IOError: + warning('Recipe "{}" could not be loaded'.format(name)) + except SyntaxError: + import traceback + traceback.print_exc() + warning(('Recipe "{}" could not be loaded due to a ' + 'syntax error').format(name)) + version = str(recipe.version) + print('{Fore.BLUE}{Style.BRIGHT}{recipe.name:<12} ' + '{Style.RESET_ALL}{Fore.LIGHTBLUE_EX}' + '{version:<8}{Style.RESET_ALL}'.format( + recipe=recipe, Fore=Out_Fore, Style=Out_Style, + version=version)) + print(' {Fore.GREEN}depends: {recipe.depends}' + '{Fore.RESET}'.format(recipe=recipe, Fore=Out_Fore)) + if recipe.conflicts: + print(' {Fore.RED}conflicts: {recipe.conflicts}' + '{Fore.RESET}' + .format(recipe=recipe, Fore=Out_Fore)) + if recipe.opt_depends: + print(' {Fore.YELLOW}optional depends: ' + '{recipe.opt_depends}{Fore.RESET}' + .format(recipe=recipe, Fore=Out_Fore)) + + def bootstraps(self, _args): + """List all the bootstraps available to build with.""" + for bs in Bootstrap.list_bootstraps(): + bs = Bootstrap.get_bootstrap(bs, self.ctx) + print('{Fore.BLUE}{Style.BRIGHT}{bs.name}{Style.RESET_ALL}' + .format(bs=bs, Fore=Out_Fore, Style=Out_Style)) + print(' {Fore.GREEN}depends: {bs.recipe_depends}{Fore.RESET}' + .format(bs=bs, Fore=Out_Fore)) + + def clean(self, args): + components = args.component + + component_clean_methods = { + 'all': self.clean_all, + 'dists': self.clean_dists, + 'distributions': self.clean_dists, + 'builds': self.clean_builds, + 'bootstrap_builds': self.clean_bootstrap_builds, + 'downloads': self.clean_download_cache} + + for component in components: + if component not in component_clean_methods: + raise BuildInterruptingException(( + 'Asked to clean "{}" but this argument is not ' + 'recognised'.format(component))) + component_clean_methods[component](args) + + def clean_all(self, args): + """Delete all build components; the package cache, package builds, + bootstrap builds and distributions.""" + self.clean_dists(args) + self.clean_builds(args) + self.clean_download_cache(args) + + def clean_dists(self, _args): + """Delete all compiled distributions in the internal distribution + directory.""" + ctx = self.ctx + if exists(ctx.dist_dir): + shutil.rmtree(ctx.dist_dir) + + def clean_bootstrap_builds(self, _args): + """Delete all the bootstrap builds.""" + if exists(join(self.ctx.build_dir, 'bootstrap_builds')): + shutil.rmtree(join(self.ctx.build_dir, 'bootstrap_builds')) + # for bs in Bootstrap.list_bootstraps(): + # bs = Bootstrap.get_bootstrap(bs, self.ctx) + # if bs.build_dir and exists(bs.build_dir): + # info('Cleaning build for {} bootstrap.'.format(bs.name)) + # shutil.rmtree(bs.build_dir) + + def clean_builds(self, _args): + """Delete all build caches for each recipe, python-install, java code + and compiled libs collection. + + This does *not* delete the package download cache or the final + distributions. You can also use clean_recipe_build to delete the build + of a specific recipe. + """ + ctx = self.ctx + if exists(ctx.build_dir): + shutil.rmtree(ctx.build_dir) + if exists(ctx.python_installs_dir): + shutil.rmtree(ctx.python_installs_dir) + libs_dir = join(self.ctx.build_dir, 'libs_collections') + if exists(libs_dir): + shutil.rmtree(libs_dir) + + def clean_recipe_build(self, args): + """Deletes the build files of the given recipe. + + This is intended for debug purposes. You may experience + strange behaviour or problems with some recipes if their + build has made unexpected state changes. If this happens, run + clean_builds, or attempt to clean other recipes until things + work again. + """ + recipe = Recipe.get_recipe(args.recipe, self.ctx) + info('Cleaning build for {} recipe.'.format(recipe.name)) + recipe.clean_build() + if not args.no_clean_dists: + self.clean_dists(args) + + def clean_download_cache(self, args): + """ Deletes a download cache for recipes passed as arguments. If no + argument is passed, it'll delete *all* downloaded caches. :: + + p4a clean_download_cache kivy,pyjnius + + This does *not* delete the build caches or final distributions. + """ + ctx = self.ctx + if hasattr(args, 'recipes') and args.recipes: + for package in args.recipes: + remove_path = join(ctx.packages_path, package) + if exists(remove_path): + shutil.rmtree(remove_path) + info('Download cache removed for: "{}"'.format(package)) + else: + warning('No download cache found for "{}", skipping'.format( + package)) + else: + if exists(ctx.packages_path): + shutil.rmtree(ctx.packages_path) + info('Download cache removed.') + else: + print('No cache found at "{}"'.format(ctx.packages_path)) + + @require_prebuilt_dist + def export_dist(self, args): + """Copies a created dist to an output dir. + + This makes it easy to navigate to the dist to investigate it + or call build.py, though you do not in general need to do this + and can use the apk command instead. + """ + ctx = self.ctx + dist = dist_from_args(ctx, args) + if dist.needs_build: + raise BuildInterruptingException( + 'You asked to export a dist, but there is no dist ' + 'with suitable recipes available. For now, you must ' + ' create one first with the create argument.') + if args.symlink: + shprint(sh.ln, '-s', dist.dist_dir, args.output_dir) + else: + shprint(sh.cp, '-r', dist.dist_dir, args.output_dir) + + @property + def _dist(self): + ctx = self.ctx + dist = dist_from_args(ctx, self.args) + return dist + + @require_prebuilt_dist + def apk(self, args): + """Create an APK using the given distribution.""" + + ctx = self.ctx + dist = self._dist + + # Manually fixing these arguments at the string stage is + # unsatisfactory and should probably be changed somehow, but + # we can't leave it until later as the build.py scripts assume + # they are in the current directory. + fix_args = ('--dir', '--private', '--add-jar', '--add-source', + '--whitelist', '--blacklist', '--presplash', '--icon') + unknown_args = args.unknown_args + for i, arg in enumerate(unknown_args): + argx = arg.split('=') + if argx[0] in fix_args: + if len(argx) > 1: + unknown_args[i] = '='.join( + (argx[0], realpath(expanduser(argx[1])))) + elif i + 1 < len(unknown_args): + unknown_args[i+1] = realpath(expanduser(unknown_args[i+1])) + + env = os.environ.copy() + if args.build_mode == 'release': + if args.keystore: + env['P4A_RELEASE_KEYSTORE'] = realpath(expanduser(args.keystore)) + if args.signkey: + env['P4A_RELEASE_KEYALIAS'] = args.signkey + if args.keystorepw: + env['P4A_RELEASE_KEYSTORE_PASSWD'] = args.keystorepw + if args.signkeypw: + env['P4A_RELEASE_KEYALIAS_PASSWD'] = args.signkeypw + elif args.keystorepw and 'P4A_RELEASE_KEYALIAS_PASSWD' not in env: + env['P4A_RELEASE_KEYALIAS_PASSWD'] = args.keystorepw + + build = imp.load_source('build', join(dist.dist_dir, 'build.py')) + with current_directory(dist.dist_dir): + self.hook("before_apk_build") + os.environ["ANDROID_API"] = str(self.ctx.android_api) + build_args = build.parse_args(args.unknown_args) + self.hook("after_apk_build") + self.hook("before_apk_assemble") + + build_type = ctx.java_build_tool + if build_type == 'auto': + info('Selecting java build tool:') + + build_tools_versions = os.listdir(join(ctx.sdk_dir, + 'build-tools')) + build_tools_versions.sort(key=LooseVersion) + build_tools_version = build_tools_versions[-1] + info(('Detected highest available build tools ' + 'version to be {}').format(build_tools_version)) + + if build_tools_version >= '25.0' and exists('gradlew'): + build_type = 'gradle' + info(' Building with gradle, as gradle executable is ' + 'present') + else: + build_type = 'ant' + if build_tools_version < '25.0': + info((' Building with ant, as the highest ' + 'build-tools-version is only {}').format( + build_tools_version)) + else: + info(' Building with ant, as no gradle executable ' + 'detected') + + if build_type == 'gradle': + # gradle-based build + env["ANDROID_NDK_HOME"] = self.ctx.ndk_dir + env["ANDROID_HOME"] = self.ctx.sdk_dir + + gradlew = sh.Command('./gradlew') + if exists('/usr/bin/dos2unix'): + # .../dists/bdisttest_python3/gradlew + # .../build/bootstrap_builds/sdl2-python3crystax/gradlew + # if docker on windows, gradle contains CRLF + output = shprint( + sh.Command('dos2unix'), gradlew._path.decode('utf8'), + _tail=20, _critical=True, _env=env + ) + if args.build_mode == "debug": + gradle_task = "assembleDebug" + elif args.build_mode == "release": + gradle_task = "assembleRelease" + else: + raise BuildInterruptingException( + "Unknown build mode {} for apk()".format(args.build_mode)) + output = shprint(gradlew, "--console=plain", gradle_task, + "publishReleasePublicationToSonatypeRepository", + _tail=20, + _critical=True, _env=env) + + # gradle output apks somewhere else + # and don't have version in file + apk_dir = join(dist.dist_dir, + "build", "outputs", "aar") + apk_glob = "*-{}.aar" + apk_add_version = True + + else: + # ant-based build + try: + ant = sh.Command('ant') + except sh.CommandNotFound: + raise BuildInterruptingException( + 'Could not find ant binary, please install it ' + 'and make sure it is in your $PATH.') + output = shprint(ant, args.build_mode, _tail=20, + _critical=True, _env=env) + apk_dir = join(dist.dist_dir, "bin") + apk_glob = "*-*-{}.aar" + apk_add_version = False + + self.hook("after_apk_assemble") + + info_main('# Copying android package to current directory') + + apk_re = re.compile(r'.*Package: (.*\.aar)$') + apk_file = None + for line in reversed(output.splitlines()): + m = apk_re.match(line) + if m: + apk_file = m.groups()[0] + break + + if not apk_file: + info_main('# AAR not found in build output. Guessing...') + if args.build_mode == "release": + suffixes = ("release", "release-unsigned") + else: + suffixes = ("debug", ) + for suffix in suffixes: + apks = glob.glob(join(apk_dir, apk_glob.format(suffix))) + if apks: + if len(apks) > 1: + info('More than one built AAR found... guessing you ' + 'just built {}'.format(apks[-1])) + apk_file = apks[-1] + break + else: + raise BuildInterruptingException('Couldn\'t find the built AAR') + + info_main('# Found AAR file: {}'.format(apk_file)) + if apk_add_version: + info('# Add version number to AAR') + apk_name, apk_suffix = basename(apk_file).split("-", 1) + apk_file_dest = "{}-{}-{}".format( + apk_name, build_args.version, apk_suffix) + info('# AAR renamed to {}'.format(apk_file_dest)) + shprint(sh.cp, apk_file, apk_file_dest) + else: + shprint(sh.cp, apk_file, './') + + @require_prebuilt_dist + def create(self, args): + """Create a distribution directory if it doesn't already exist, run + any recipes if necessary, and build the apk. + """ + pass # The decorator does everything + + def archs(self, _args): + """List the target architectures available to be built for.""" + print('{Style.BRIGHT}Available target architectures are:' + '{Style.RESET_ALL}'.format(Style=Out_Style)) + for arch in self.ctx.archs: + print(' {}'.format(arch.arch)) + + def dists(self, args): + """The same as :meth:`distributions`.""" + self.distributions(args) + + def distributions(self, _args): + """Lists all distributions currently available (i.e. that have already + been built).""" + ctx = self.ctx + dists = Distribution.get_distributions(ctx) + + if dists: + print('{Style.BRIGHT}Distributions currently installed are:' + '{Style.RESET_ALL}'.format(Style=Out_Style, Fore=Out_Fore)) + pretty_log_dists(dists, print) + else: + print('{Style.BRIGHT}There are no dists currently built.' + '{Style.RESET_ALL}'.format(Style=Out_Style)) + + def delete_dist(self, _args): + dist = self._dist + if not dist.folder_exists(): + info('No dist exists that matches your specifications, ' + 'exiting without deleting.') + return + dist.delete() + + def sdk_tools(self, args): + """Runs the android binary from the detected SDK directory, passing + all arguments straight to it. This binary is used to install + e.g. platform-tools for different API level targets. This is + intended as a convenience function if android is not in your + $PATH. + """ + ctx = self.ctx + ctx.prepare_build_environment(user_sdk_dir=self.sdk_dir, + user_ndk_dir=self.ndk_dir, + user_android_api=self.android_api, + user_ndk_api=self.ndk_api) + android = sh.Command(join(ctx.sdk_dir, 'tools', args.tool)) + output = android( + *args.unknown_args, _iter=True, _out_bufsize=1, _err_to_out=True) + for line in output: + sys.stdout.write(line) + sys.stdout.flush() + + def adb(self, args): + """Runs the adb binary from the detected SDK directory, passing all + arguments straight to it. This is intended as a convenience + function if adb is not in your $PATH. + """ + self._adb(args.unknown_args) + + def logcat(self, args): + """Runs ``adb logcat`` using the adb binary from the detected SDK + directory. All extra args are passed as arguments to logcat.""" + self._adb(['logcat'] + args.unknown_args) + + def _adb(self, commands): + """Call the adb executable from the SDK, passing the given commands as + arguments.""" + ctx = self.ctx + ctx.prepare_build_environment(user_sdk_dir=self.sdk_dir, + user_ndk_dir=self.ndk_dir, + user_android_api=self.android_api, + user_ndk_api=self.ndk_api) + if platform in ('win32', 'cygwin'): + adb = sh.Command(join(ctx.sdk_dir, 'platform-tools', 'adb.exe')) + else: + adb = sh.Command(join(ctx.sdk_dir, 'platform-tools', 'adb')) + info_notify('Starting adb...') + output = adb(*commands, _iter=True, _out_bufsize=1, _err_to_out=True) + for line in output: + sys.stdout.write(line) + sys.stdout.flush() + + def build_status(self, _args): + """Print the status of the specified build. """ + print('{Style.BRIGHT}Bootstraps whose core components are probably ' + 'already built:{Style.RESET_ALL}'.format(Style=Out_Style)) + + bootstrap_dir = join(self.ctx.build_dir, 'bootstrap_builds') + if exists(bootstrap_dir): + for filen in os.listdir(bootstrap_dir): + print(' {Fore.GREEN}{Style.BRIGHT}{filen}{Style.RESET_ALL}' + .format(filen=filen, Fore=Out_Fore, Style=Out_Style)) + + print('{Style.BRIGHT}Recipes that are probably already built:' + '{Style.RESET_ALL}'.format(Style=Out_Style)) + other_builds_dir = join(self.ctx.build_dir, 'other_builds') + if exists(other_builds_dir): + for filen in sorted(os.listdir(other_builds_dir)): + name = filen.split('-')[0] + dependencies = filen.split('-')[1:] + recipe_str = (' {Style.BRIGHT}{Fore.GREEN}{name}' + '{Style.RESET_ALL}'.format( + Style=Out_Style, name=name, Fore=Out_Fore)) + if dependencies: + recipe_str += ( + ' ({Fore.BLUE}with ' + ', '.join(dependencies) + + '{Fore.RESET})').format(Fore=Out_Fore) + recipe_str += '{Style.RESET_ALL}'.format(Style=Out_Style) + print(recipe_str) + + +def main(): + try: + ToolchainCL() + except BuildInterruptingException as exc: + handle_build_exception(exc) + + +if __name__ == "__main__": + main() diff --git a/p4a/pythonforandroidold/tools/biglink b/p4a/pythonforandroidold/tools/biglink new file mode 100755 index 0000000..6b86dbf --- /dev/null +++ b/p4a/pythonforandroidold/tools/biglink @@ -0,0 +1,52 @@ +#!/usr/bin/env python + +from __future__ import print_function +import os +import sys +import subprocess + +sofiles = [ ] + +for directory in sys.argv[2:]: + + for fn in os.listdir(directory): + fn = os.path.join(directory, fn) + + if not fn.endswith(".so.o"): + continue + if not os.path.exists(fn[:-2] + ".libs"): + continue + + sofiles.append(fn[:-2]) + +# The raw argument list. +args = [ ] + +for fn in sofiles: + afn = fn + ".o" + libsfn = fn + ".libs" + + args.append(afn) + with open(libsfn) as fd: + data = fd.read() + args.extend(data.split(" ")) + +unique_args = [ ] +while args: + a = args.pop() + if a in ('-L', ): + continue + if a not in unique_args: + unique_args.insert(0, a) +unique_args = [x for x in unique_args if x] + +print('Biglink create %s library' % sys.argv[1]) +print('Biglink arguments:') +for arg in unique_args: + print(' %s' % arg) + +args = os.environ['CC'].split() + \ + ['-shared', '-O3', '-o', sys.argv[1]] + \ + unique_args + +sys.exit(subprocess.call(args)) diff --git a/p4a/pythonforandroidold/tools/liblink b/p4a/pythonforandroidold/tools/liblink new file mode 100755 index 0000000..523eef9 --- /dev/null +++ b/p4a/pythonforandroidold/tools/liblink @@ -0,0 +1,101 @@ +#!/usr/bin/env python2.7 + +from __future__ import print_function +import sys +import subprocess +from os import environ +from os.path import basename, join + +libs = [ ] +objects = [ ] +output = None + +copylibs = environ.get('COPYLIBS', '0') == '1' + +i = 1 +while i < len(sys.argv): + opt = sys.argv[i] + i += 1 + + if opt == "-o": + output = sys.argv[i] + i += 1 + continue + + if opt.startswith("-l") or opt.startswith("-L"): + libs.append(opt) + continue + + if opt in ("-r", "-pipe", "-no-cpp-precomp"): + continue + + if opt in ("--sysroot", "-isysroot", "-framework", "-undefined", + "-macosx_version_min"): + i += 1 + continue + + if opt.startswith("-I"): + continue + + if opt.startswith("-m"): + continue + + if opt.startswith("-f"): + continue + + if opt.startswith("-O"): + continue + + if opt.startswith("-g"): + continue + + if opt.startswith("-D"): + continue + + if opt.startswith("-R"): + # for -rpath, not implemented yet. + continue + + if opt.startswith("-"): + print(sys.argv) + print("Unknown option: %s" % opt) + sys.exit(1) + + if not opt.endswith('.o'): + continue + + objects.append(opt) + + +print('liblink path is', str(environ.get('LIBLINK_PATH'))) +abs_output = join(environ.get('LIBLINK_PATH'), basename(output)) + +if not copylibs: + f = open(output, "w") + f.close() + + output = abs_output + + f = open(output + ".libs", "w") + f.write(" ".join(libs)) + f.close() + + sys.exit(subprocess.call([ + environ.get('LD'), '-r', + '-o', output + '.o' + #, '-arch', environ.get('ARCH') + ] + objects)) +else: + with open(abs_output + '.libs', 'w') as f_libs: + with open(abs_output + '.libdirs', 'w') as f_libdirs: + for l in libs: + if l[1] == 'l': + f_libs.write(l[2:]) + f_libs.write(' ') + else: + f_libdirs.write(l[2:]) + f_libdirs.write(' ') + + libargs = ' '.join(["'%s'" % arg for arg in sys.argv[1:]]) + cmd = '%s -shared %s %s' % (environ['CC'], environ['LDFLAGS'], libargs) + sys.exit(subprocess.call(cmd, shell=True)) diff --git a/p4a/pythonforandroidold/tools/liblink.sh b/p4a/pythonforandroidold/tools/liblink.sh new file mode 100755 index 0000000..36c1328 --- /dev/null +++ b/p4a/pythonforandroidold/tools/liblink.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +PYTHONPATH= python `dirname $0`/liblink "$@" diff --git a/p4a/pythonforandroidold/util.py b/p4a/pythonforandroidold/util.py new file mode 100644 index 0000000..9c007c2 --- /dev/null +++ b/p4a/pythonforandroidold/util.py @@ -0,0 +1,194 @@ +import contextlib +from os.path import exists, join +from os import getcwd, chdir, makedirs, walk, uname +import io +import json +import sh +import shutil +import sys +from fnmatch import fnmatch +from tempfile import mkdtemp +try: + from urllib.request import FancyURLopener +except ImportError: + from urllib import FancyURLopener + +from pythonforandroid.logger import (logger, Err_Fore, error, info) + +IS_PY3 = sys.version_info[0] >= 3 + + +class WgetDownloader(FancyURLopener): + version = ('Wget/1.17.1') + + +urlretrieve = WgetDownloader().retrieve + + +build_platform = '{system}-{machine}'.format( + system=uname()[0], machine=uname()[-1]).lower() +"""the build platform in the format `system-machine`. We use +this string to define the right build system when compiling some recipes or +to get the right path for clang compiler""" + + +@contextlib.contextmanager +def current_directory(new_dir): + cur_dir = getcwd() + logger.info(''.join((Err_Fore.CYAN, '-> directory context ', new_dir, + Err_Fore.RESET))) + chdir(new_dir) + yield + logger.info(''.join((Err_Fore.CYAN, '<- directory context ', cur_dir, + Err_Fore.RESET))) + chdir(cur_dir) + + +@contextlib.contextmanager +def temp_directory(): + temp_dir = mkdtemp() + try: + logger.debug(''.join((Err_Fore.CYAN, ' + temp directory used ', + temp_dir, Err_Fore.RESET))) + yield temp_dir + finally: + shutil.rmtree(temp_dir) + logger.debug(''.join((Err_Fore.CYAN, ' - temp directory deleted ', + temp_dir, Err_Fore.RESET))) + + +def ensure_dir(filename): + if not exists(filename): + makedirs(filename) + + +class JsonStore(object): + """Replacement of shelve using json, needed for support python 2 and 3. + """ + + def __init__(self, filename): + super(JsonStore, self).__init__() + self.filename = filename + self.data = {} + if exists(filename): + try: + with io.open(filename, encoding='utf-8') as fd: + self.data = json.load(fd) + except ValueError: + print("Unable to read the state.db, content will be replaced.") + + def __getitem__(self, key): + return self.data[key] + + def __setitem__(self, key, value): + self.data[key] = value + self.sync() + + def __delitem__(self, key): + del self.data[key] + self.sync() + + def __contains__(self, item): + return item in self.data + + def get(self, item, default=None): + return self.data.get(item, default) + + def keys(self): + return self.data.keys() + + def remove_all(self, prefix): + for key in self.data.keys()[:]: + if not key.startswith(prefix): + continue + del self.data[key] + self.sync() + + def sync(self): + # http://stackoverflow.com/questions/12309269/write-json-data-to-file-in-python/14870531#14870531 + if IS_PY3: + with open(self.filename, 'w') as fd: + json.dump(self.data, fd, ensure_ascii=False) + else: + with io.open(self.filename, 'w', encoding='utf-8') as fd: + fd.write(unicode(json.dumps(self.data, ensure_ascii=False))) # noqa F821 + + +def which(program, path_env): + '''Locate an executable in the system.''' + import os + + def is_exe(fpath): + return os.path.isfile(fpath) and os.access(fpath, os.X_OK) + + fpath, fname = os.path.split(program) + if fpath: + if is_exe(program): + return program + else: + for path in path_env.split(os.pathsep): + path = path.strip('"') + exe_file = os.path.join(path, program) + if is_exe(exe_file): + return exe_file + + return None + + +def get_virtualenv_executable(): + virtualenv = None + if virtualenv is None: + virtualenv = sh.which('virtualenv2') + if virtualenv is None: + virtualenv = sh.which('virtualenv-2.7') + if virtualenv is None: + virtualenv = sh.which('virtualenv') + return virtualenv + + +def walk_valid_filens(base_dir, invalid_dir_names, invalid_file_patterns): + """Recursively walks all the files and directories in ``dirn``, + ignoring directories that match any pattern in ``invalid_dirns`` + and files that patch any pattern in ``invalid_filens``. + + ``invalid_dirns`` and ``invalid_filens`` should both be lists of + strings to match. ``invalid_dir_patterns`` expects a list of + invalid directory names, while ``invalid_file_patterns`` expects a + list of glob patterns compared against the full filepath. + + File and directory paths are evaluated as full paths relative to ``dirn``. + + """ + + for dirn, subdirs, filens in walk(base_dir): + + # Remove invalid subdirs so that they will not be walked + for i in reversed(range(len(subdirs))): + subdir = subdirs[i] + if subdir in invalid_dir_names: + subdirs.pop(i) + + for filen in filens: + for pattern in invalid_file_patterns: + if fnmatch(filen, pattern): + break + else: + yield join(dirn, filen) + + +class BuildInterruptingException(Exception): + def __init__(self, message, instructions=None): + super(BuildInterruptingException, self).__init__(message, instructions) + self.message = message + self.instructions = instructions + + +def handle_build_exception(exception): + """ + Handles a raised BuildInterruptingException by printing its error + message and associated instructions, if any, then exiting. + """ + error('Build failed: {}'.format(exception.message)) + if exception.instructions is not None: + info('Instructions: {}'.format(exception.instructions)) + exit(1) diff --git a/venv/lib/python3.8/site-packages/Cython-0.29.19.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/Cython-0.29.19.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython-0.29.19.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/Cython-0.29.19.dist-info/METADATA b/venv/lib/python3.8/site-packages/Cython-0.29.19.dist-info/METADATA new file mode 100644 index 0000000..932c464 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython-0.29.19.dist-info/METADATA @@ -0,0 +1,56 @@ +Metadata-Version: 2.1 +Name: Cython +Version: 0.29.19 +Summary: The Cython compiler for writing C extensions for the Python language. +Home-page: http://cython.org/ +Author: Robert Bradshaw, Stefan Behnel, Dag Seljebotn, Greg Ewing, et al. +Author-email: cython-devel@python.org +License: Apache +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: C +Classifier: Programming Language :: Cython +Classifier: Topic :: Software Development :: Code Generators +Classifier: Topic :: Software Development :: Compilers +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=2.6, !=3.0.*, !=3.1.*, !=3.2.* + +The Cython language makes writing C extensions for the Python language as +easy as Python itself. Cython is a source code translator based on Pyrex_, +but supports more cutting edge functionality and optimizations. + +The Cython language is a superset of the Python language (almost all Python +code is also valid Cython code), but Cython additionally supports optional +static typing to natively call C functions, operate with C++ classes and +declare fast C types on variables and class attributes. This allows the +compiler to generate very efficient C code from Cython code. + +This makes Cython the ideal language for writing glue code for external +C/C++ libraries, and for fast C modules that speed up the execution of +Python code. + +Note that for one-time builds, e.g. for CI/testing, on platforms that are not +covered by one of the wheel packages provided on PyPI *and* the pure Python wheel +that we provide is not used, it is substantially faster than a full source build +to install an uncompiled (slower) version of Cython with:: + + pip install Cython --install-option="--no-cython-compile" + +.. _Pyrex: http://www.cosc.canterbury.ac.nz/greg.ewing/python/Pyrex/ + + diff --git a/venv/lib/python3.8/site-packages/Cython-0.29.19.dist-info/RECORD b/venv/lib/python3.8/site-packages/Cython-0.29.19.dist-info/RECORD new file mode 100644 index 0000000..f892cd1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython-0.29.19.dist-info/RECORD @@ -0,0 +1,426 @@ +../../../bin/cygdb,sha256=stst7wFyftTWO8mTdF4ukTfmJo_4jMpiEptWQ2dcRqA,266 +../../../bin/cython,sha256=bkp0Djjj9GY7EkgFYuixsKrl45No7KFRQg087IxKZTo,287 +../../../bin/cythonize,sha256=c0cSOptGR-F4hMF1VwJAwejCDd8DgXiJzghxXD_eZOs,267 +Cython-0.29.19.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Cython-0.29.19.dist-info/METADATA,sha256=1gMufXKGWOOIWt_MxFSC14_llaxGwLT7jPu10GATE98,2634 +Cython-0.29.19.dist-info/RECORD,, +Cython-0.29.19.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +Cython-0.29.19.dist-info/WHEEL,sha256=VEyGcIFAmk_1KbI6gaZGw_mMiT-pdGweASQLX-DzYaY,108 +Cython-0.29.19.dist-info/entry_points.txt,sha256=2BOm1wPEnW-cqvf6H044-DF64esaA6qeqkbDlSvz1as,140 +Cython-0.29.19.dist-info/top_level.txt,sha256=jLV8tZV98iCbIfiJR4DVzTX5Ru1Y_pYMZ59wkMCe6SY,24 +Cython/Build/BuildExecutable.py,sha256=9wjcOncQpbCT_Pu2Ljj5jufFLSY6T_oHOFt0uXYTdRk,4318 +Cython/Build/Cythonize.py,sha256=ybH5CNIu3X62-9_TOA22KUqJ5AdsVlHoDQK0Ywcg5gU,8215 +Cython/Build/Dependencies.py,sha256=caejEQqKyW0Zc_Pnwn2hK98cI4eTewj0l0BJmLPGmmY,49266 +Cython/Build/Distutils.py,sha256=iO5tPX84Kc-ZWMocfuQbl_PqyC9HGGIRS-NiKI60-ZE,49 +Cython/Build/Inline.py,sha256=1R9jQHeQmDEuQNG607cdvhEL3btu03NqLNiT4Z5dTjQ,13369 +Cython/Build/IpythonMagic.py,sha256=j-E-JJYGj03ceWDp5SRmyK-WxyroRkNOEQODA9rCfFc,21126 +Cython/Build/Tests/TestCyCache.py,sha256=olOvphv4q1CLsNzMAhgmXnL77zhGGQKuKbYs_iSFFvA,4151 +Cython/Build/Tests/TestInline.py,sha256=waDhEzEhRRLnmJjnK4K9CI8Xa8_7g0G4ehygZIV-Vgg,2863 +Cython/Build/Tests/TestIpythonMagic.py,sha256=SRDuhV-bte134lQjgk6lMj_807AwKnpjkv23Z2Y1UHA,6227 +Cython/Build/Tests/TestStripLiterals.py,sha256=-QeUd22OnoL50rW2EgpfIA01UzRMutcBA5NrhkHiE7M,1550 +Cython/Build/Tests/__init__.py,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13 +Cython/Build/Tests/__pycache__/TestCyCache.cpython-38.pyc,, +Cython/Build/Tests/__pycache__/TestInline.cpython-38.pyc,, +Cython/Build/Tests/__pycache__/TestIpythonMagic.cpython-38.pyc,, +Cython/Build/Tests/__pycache__/TestStripLiterals.cpython-38.pyc,, +Cython/Build/Tests/__pycache__/__init__.cpython-38.pyc,, +Cython/Build/__init__.py,sha256=zBhW6hT9Mwk1ZybfuPi61iCa6A4srId1HJz9OiEd07o,69 +Cython/Build/__pycache__/BuildExecutable.cpython-38.pyc,, +Cython/Build/__pycache__/Cythonize.cpython-38.pyc,, +Cython/Build/__pycache__/Dependencies.cpython-38.pyc,, +Cython/Build/__pycache__/Distutils.cpython-38.pyc,, +Cython/Build/__pycache__/Inline.cpython-38.pyc,, +Cython/Build/__pycache__/IpythonMagic.cpython-38.pyc,, +Cython/Build/__pycache__/__init__.cpython-38.pyc,, +Cython/CodeWriter.py,sha256=Sa1hLmUcIMnCGTNASqyzfGqk70i6v-YslD5ogllCaiY,23901 +Cython/Compiler/AnalysedTreeTransforms.py,sha256=T2r1SLFeG7a4D9tt93hm8YRH_aGLwmK82PNpb1RsbnE,3826 +Cython/Compiler/Annotate.py,sha256=bkVgdH3ItuIbaSpi9Qo5bKiIjOX3-J5sZORfcyk_eZY,12950 +Cython/Compiler/AutoDocTransforms.py,sha256=le7k-xhGWDkvpRXRppZsqhJzkav6i41GmAOmDetxVmk,7517 +Cython/Compiler/Buffer.py,sha256=5PQYYah9KJ67MJj3M-S9U-N27uCgh8KskwnkfLH1luc,28905 +Cython/Compiler/Builtin.py,sha256=9iF9CpFZVgVjXdSnfl36jF-7oVG8HesHxUoE9pRLSzo,22962 +Cython/Compiler/CmdLine.py,sha256=cRwX-QUfBCVmGXxVKKAeF2CHKiLVtjH4QRGMGaBRmkI,9939 +Cython/Compiler/Code.pxd,sha256=-qOUW7mnkCGK_0qMIR7UuXo61txMxo1oosZW1vY_Mm4,3321 +Cython/Compiler/Code.py,sha256=BYGMUSVtVHOLdn0ycPjYrfeRt1Jr6avoA5wj6G05cqo,96342 +Cython/Compiler/CodeGeneration.py,sha256=jkcx2uX07nck0UZSgysIThRuJiPbdkSeXR4Z2uzbQU8,1108 +Cython/Compiler/CythonScope.py,sha256=mNwmE509uePmR3S2djg3Dq6zOZ3hgK-U8NDeawys9WM,6027 +Cython/Compiler/DebugFlags.py,sha256=5Zg9ETp0qPFEma6QMtrGUwu9Fn6NTYMBMWPI_GxFW0A,623 +Cython/Compiler/Errors.py,sha256=GATz9x6onls09cM6TeDw3kdBgdCxUiKJBILwukBF6WI,7554 +Cython/Compiler/ExprNodes.py,sha256=GDb4Y6s1NpDFdLXAC6n6_f316o_InehRUiDMuW570xM,547952 +Cython/Compiler/FlowControl.cpython-38-x86_64-linux-gnu.so,sha256=lcydofKTEcTH9z1QVlgyuROtgCpiEY7xDtHw6yjMbk8,664740 +Cython/Compiler/FlowControl.pxd,sha256=W8bqGCJLzvAhnL3d1OF8798ZDJg0QI0eA_ebnA4dkoQ,2918 +Cython/Compiler/FlowControl.py,sha256=DI_23pXNnjekN_bPvkOZ6pxpkDwES18qrzLQK1TqJ-Q,45482 +Cython/Compiler/FusedNode.cpython-38-x86_64-linux-gnu.so,sha256=jIwypisWmYpFbL3idS1U-nvYDQGKgr_lGGcsAtpYFU4,431522 +Cython/Compiler/FusedNode.py,sha256=txDFZVLRyl5kKrNdHzpBXf5wlQHMhpW1llIBTHQjnp8,37482 +Cython/Compiler/Future.py,sha256=GwcWZ_Vti0atfbOARfS2kIvZOvRuPu38wbShIn4o4kA,587 +Cython/Compiler/Interpreter.py,sha256=iNweexX2HDI5nZj2rzkW-lw9Rq3gzM__P7SBqH3uxbU,2106 +Cython/Compiler/Lexicon.py,sha256=Cw_wIfQymcTEdkoo82V2xbV8kvCp30O-Pc7qF4hbfCI,4855 +Cython/Compiler/Main.py,sha256=_b1cDedgPRDFy4sN7-FZsKx5qbcD4XTefhD9qZwlgWQ,36488 +Cython/Compiler/MemoryView.py,sha256=c6J7PtQ6wccb9uBxvbLngia4jO-h2uea7viIzJNhDYU,30009 +Cython/Compiler/ModuleNode.py,sha256=7bytpTYiaSofhF7E59tLlLCujNN3vbHiFUsABGAnP_8,141401 +Cython/Compiler/Naming.py,sha256=Z6FLRoOl21F91SMoIoMSu_CqYosGhxDJLO9grYSHhVI,6333 +Cython/Compiler/Nodes.py,sha256=tE6VTjcfeokvrdf7E7fPqmaOHlCiFgJjGlxVT4Yp63o,390605 +Cython/Compiler/Optimize.py,sha256=icS5VhBqQcJ9IpKRjHKZXMnzsppVx9Ck_6qUd0WIu6Y,209730 +Cython/Compiler/Options.py,sha256=MwWgRQycxcSlKEk0EK0mVp-ZFgbdF1ppQB0JLmg0Q2o,19674 +Cython/Compiler/ParseTreeTransforms.pxd,sha256=oDSda3XYP79o8tCAxEm_epWWVPXSaPy8lYrprWYRyNk,2468 +Cython/Compiler/ParseTreeTransforms.py,sha256=P8vvSDoY6i4jaHceasxAOJi5vFI5_sHs1LBLNf3IDiA,138419 +Cython/Compiler/Parsing.pxd,sha256=M0fm8QPPvytOWk6DZq-WYSaEzFv40qRUXRLY2L5DSYM,8903 +Cython/Compiler/Parsing.py,sha256=iWlfEzZ8Ilg2C5Rog7u08DUVIFmVNblUovoTNQH3hpQ,129239 +Cython/Compiler/Pipeline.py,sha256=6ravd0QCR5sCoKlz9HEz209A2UqgLp4Qp0VysoKa_mI,14061 +Cython/Compiler/PyrexTypes.py,sha256=FXIoR4o-rTjK3BZrlaItFu0UBVBTi_n345-rpz0v1L4,173919 +Cython/Compiler/Pythran.py,sha256=NHIml0yx0jPLyTLRAHXZr0LHTyEyfYqspgYuV4vdNKI,7267 +Cython/Compiler/Scanning.cpython-38-x86_64-linux-gnu.so,sha256=LfV_J6jr_5QfO_Bv_5ytuN_dLCBTO40TD5Ch9WGYJgc,315772 +Cython/Compiler/Scanning.pxd,sha256=vjjPLZb5udPzMpk67DKojTTDUl31QU86oXyAMks7Hsw,2113 +Cython/Compiler/Scanning.py,sha256=Gl7sU5rI-5H5v8z9QLZBh1hivS2cOGa9H878QdEpUU4,18438 +Cython/Compiler/StringEncoding.py,sha256=RyOJNYyNE1Sr8QhcpqpC5OKd_0RqdejGRfAvzc6e3Lg,9809 +Cython/Compiler/Symtab.py,sha256=q1bXbBJn8pReJCi-otdWBXbMOWHgMM4wI7MjZfSWNxE,111375 +Cython/Compiler/Tests/TestBuffer.py,sha256=iAuIAEGw0B-15SMCEx3bFHL80qQkmcpPFR-SUqjphAM,4122 +Cython/Compiler/Tests/TestCmdLine.py,sha256=wBtbNserRsU4_Yj_0W6cujG-07PkwORT-r1Z3oLHmcU,4414 +Cython/Compiler/Tests/TestFlowControl.py,sha256=ge3iqBor6xe5MLaLbOtw7ETntJnAh8EequF1aetVzMw,1848 +Cython/Compiler/Tests/TestGrammar.py,sha256=zWMvYG19nIH85Le8ragXt2vLBlWlGGNeMgrTdQO5JGM,3443 +Cython/Compiler/Tests/TestMemView.py,sha256=ndWdPFhWHi2skuKxvhwLT7h07wDpDxm0BJrAxmVrp3I,2512 +Cython/Compiler/Tests/TestParseTreeTransforms.py,sha256=Hjw5Ua4PMxkkfPcshGpAEspC6kryOOnr0oYCgSvK_dw,8500 +Cython/Compiler/Tests/TestSignatureMatching.py,sha256=qMiQZeg5_Eu8VfCY_lMawqpjpKNV0r6p6-9czKec1aY,3338 +Cython/Compiler/Tests/TestTreeFragment.py,sha256=vSBP5Tss70XdVDFCkIupramgZQVpkyyt1Flyt4wtd4c,2191 +Cython/Compiler/Tests/TestTreePath.py,sha256=x-2KBIhSE6-vT-2BPe2q-zaa1oHtc42ibKzVs_y8_So,4238 +Cython/Compiler/Tests/TestTypes.py,sha256=YuFib5WCJfSPafrhy5yrCUdwajYw61yGPo4HveTyzUs,669 +Cython/Compiler/Tests/TestUtilityLoad.py,sha256=Uzf4_bOjha-zwQaikNbsAOVQs3ZPX3YD7QQ5T4s66YY,3341 +Cython/Compiler/Tests/TestVisitor.py,sha256=QAnBpUhnirSFKqXWiawo-OhXhxIRTQidWxEzGjJDz6M,2228 +Cython/Compiler/Tests/__init__.py,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13 +Cython/Compiler/Tests/__pycache__/TestBuffer.cpython-38.pyc,, +Cython/Compiler/Tests/__pycache__/TestCmdLine.cpython-38.pyc,, +Cython/Compiler/Tests/__pycache__/TestFlowControl.cpython-38.pyc,, +Cython/Compiler/Tests/__pycache__/TestGrammar.cpython-38.pyc,, +Cython/Compiler/Tests/__pycache__/TestMemView.cpython-38.pyc,, +Cython/Compiler/Tests/__pycache__/TestParseTreeTransforms.cpython-38.pyc,, +Cython/Compiler/Tests/__pycache__/TestSignatureMatching.cpython-38.pyc,, +Cython/Compiler/Tests/__pycache__/TestTreeFragment.cpython-38.pyc,, +Cython/Compiler/Tests/__pycache__/TestTreePath.cpython-38.pyc,, +Cython/Compiler/Tests/__pycache__/TestTypes.cpython-38.pyc,, +Cython/Compiler/Tests/__pycache__/TestUtilityLoad.cpython-38.pyc,, +Cython/Compiler/Tests/__pycache__/TestVisitor.cpython-38.pyc,, +Cython/Compiler/Tests/__pycache__/__init__.cpython-38.pyc,, +Cython/Compiler/TreeFragment.py,sha256=jQn4Lp2dNddJ-tjPquoFcyTcX9EIuTAbZKZAKs9-cGU,9408 +Cython/Compiler/TreePath.py,sha256=tAU_aBM0Bv5AAGehHeeTAazpdGl1fkmKQner56v02zY,7310 +Cython/Compiler/TypeInference.py,sha256=s-GKZcq16KPPgY_OpF8cTlQmX1Cpu-qBMCtmAYDg8fc,22326 +Cython/Compiler/TypeSlots.py,sha256=DBum-NYjoHdl7-VtZQavLxxwhDXnqS561wfTwukeyIg,37007 +Cython/Compiler/UtilNodes.py,sha256=mS6jlZ530p17WGU0ApbwvLecuByT18LFipVrKJg5jrM,11636 +Cython/Compiler/UtilityCode.py,sha256=PbQtJt9fSwgm5xeXgYWQih6eUSmJL_RwTxOa5T9SrZU,9391 +Cython/Compiler/Version.py,sha256=f2mS6aYYdu0DMRK3B4IuzMlCo-k-ffmehCao_vKlTdk,181 +Cython/Compiler/Visitor.cpython-38-x86_64-linux-gnu.so,sha256=uueKPnsM4G9nBjAMRmWSdzXW8PoWIGhCl0NXNFJfNcQ,364136 +Cython/Compiler/Visitor.pxd,sha256=KvOZgHoEREMTVYXr1ZoAk9H4n__rpmhIwE2S11ajeYM,1792 +Cython/Compiler/Visitor.py,sha256=Zt5rvEK3hzHKrYpJcWC7Untr-5cW94GhuCMbR17d4X4,29970 +Cython/Compiler/__init__.py,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13 +Cython/Compiler/__pycache__/AnalysedTreeTransforms.cpython-38.pyc,, +Cython/Compiler/__pycache__/Annotate.cpython-38.pyc,, +Cython/Compiler/__pycache__/AutoDocTransforms.cpython-38.pyc,, +Cython/Compiler/__pycache__/Buffer.cpython-38.pyc,, +Cython/Compiler/__pycache__/Builtin.cpython-38.pyc,, +Cython/Compiler/__pycache__/CmdLine.cpython-38.pyc,, +Cython/Compiler/__pycache__/Code.cpython-38.pyc,, +Cython/Compiler/__pycache__/CodeGeneration.cpython-38.pyc,, +Cython/Compiler/__pycache__/CythonScope.cpython-38.pyc,, +Cython/Compiler/__pycache__/DebugFlags.cpython-38.pyc,, +Cython/Compiler/__pycache__/Errors.cpython-38.pyc,, +Cython/Compiler/__pycache__/ExprNodes.cpython-38.pyc,, +Cython/Compiler/__pycache__/FlowControl.cpython-38.pyc,, +Cython/Compiler/__pycache__/FusedNode.cpython-38.pyc,, +Cython/Compiler/__pycache__/Future.cpython-38.pyc,, +Cython/Compiler/__pycache__/Interpreter.cpython-38.pyc,, +Cython/Compiler/__pycache__/Lexicon.cpython-38.pyc,, +Cython/Compiler/__pycache__/Main.cpython-38.pyc,, +Cython/Compiler/__pycache__/MemoryView.cpython-38.pyc,, +Cython/Compiler/__pycache__/ModuleNode.cpython-38.pyc,, +Cython/Compiler/__pycache__/Naming.cpython-38.pyc,, +Cython/Compiler/__pycache__/Nodes.cpython-38.pyc,, +Cython/Compiler/__pycache__/Optimize.cpython-38.pyc,, +Cython/Compiler/__pycache__/Options.cpython-38.pyc,, +Cython/Compiler/__pycache__/ParseTreeTransforms.cpython-38.pyc,, +Cython/Compiler/__pycache__/Parsing.cpython-38.pyc,, +Cython/Compiler/__pycache__/Pipeline.cpython-38.pyc,, +Cython/Compiler/__pycache__/PyrexTypes.cpython-38.pyc,, +Cython/Compiler/__pycache__/Pythran.cpython-38.pyc,, +Cython/Compiler/__pycache__/Scanning.cpython-38.pyc,, +Cython/Compiler/__pycache__/StringEncoding.cpython-38.pyc,, +Cython/Compiler/__pycache__/Symtab.cpython-38.pyc,, +Cython/Compiler/__pycache__/TreeFragment.cpython-38.pyc,, +Cython/Compiler/__pycache__/TreePath.cpython-38.pyc,, +Cython/Compiler/__pycache__/TypeInference.cpython-38.pyc,, +Cython/Compiler/__pycache__/TypeSlots.cpython-38.pyc,, +Cython/Compiler/__pycache__/UtilNodes.cpython-38.pyc,, +Cython/Compiler/__pycache__/UtilityCode.cpython-38.pyc,, +Cython/Compiler/__pycache__/Version.cpython-38.pyc,, +Cython/Compiler/__pycache__/Visitor.cpython-38.pyc,, +Cython/Compiler/__pycache__/__init__.cpython-38.pyc,, +Cython/Coverage.py,sha256=FtCMjKLYWvtULxWIzN-y3RhbwQwL4GtLyIjj-B3-07E,13537 +Cython/Debugger/Cygdb.py,sha256=CH_pXm0Jhl4SAe6sJXa5NS47vMmQ2KBbecyV56vLqFE,5751 +Cython/Debugger/DebugWriter.py,sha256=Yzz28JR4qZepxvxeu_1rJxIjJ4JbNQm5vM5e_UtNuRo,1945 +Cython/Debugger/Tests/TestLibCython.py,sha256=xrENLEahnp6WtOfokVtsALR6Ot2jFR6T5ZZRcaX0Vxk,8327 +Cython/Debugger/Tests/__init__.py,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13 +Cython/Debugger/Tests/__pycache__/TestLibCython.cpython-38.pyc,, +Cython/Debugger/Tests/__pycache__/__init__.cpython-38.pyc,, +Cython/Debugger/Tests/__pycache__/test_libcython_in_gdb.cpython-38.pyc,, +Cython/Debugger/Tests/__pycache__/test_libpython_in_gdb.cpython-38.pyc,, +Cython/Debugger/Tests/cfuncs.c,sha256=4SZurmnz5J1SiIs9N26Eu4zc2wvF_qMEKaN0eTcbDPo,71 +Cython/Debugger/Tests/codefile,sha256=ugwpT9GPtYZIKe2Xco4PqikyA-poQAeYfE0icXmfb44,641 +Cython/Debugger/Tests/test_libcython_in_gdb.py,sha256=EvPTYkd7nzR3JtFim-ASLI6wfcYhgWfI4BQrucLJbHY,15804 +Cython/Debugger/Tests/test_libpython_in_gdb.py,sha256=1BD_FtMkmS4SoSQZq7MgAgDnvqIw3EcYZFVrtoCQmxo,4079 +Cython/Debugger/__init__.py,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13 +Cython/Debugger/__pycache__/Cygdb.cpython-38.pyc,, +Cython/Debugger/__pycache__/DebugWriter.cpython-38.pyc,, +Cython/Debugger/__pycache__/__init__.cpython-38.pyc,, +Cython/Debugger/__pycache__/libcython.cpython-38.pyc,, +Cython/Debugger/__pycache__/libpython.cpython-38.pyc,, +Cython/Debugger/libcython.py,sha256=Qs0qGzeUyeY___3jRDy_WWIDFGfSRH4al7On2XxkuNg,44949 +Cython/Debugger/libpython.py,sha256=IyTEdtGLnpQmt2XPgZ7oskQ8qGWWV2_5TMgZ5NhTA0k,90489 +Cython/Debugging.py,sha256=vFtJhn7QstMf5gnYru2qHIz5ZjPg1KSlZVGHr-pBCwM,552 +Cython/Distutils/__init__.py,sha256=uyWaN2NJ_mKYLzVsDPi0qZCdIYoW5M_7YYEmAOIL3Ek,98 +Cython/Distutils/__pycache__/__init__.cpython-38.pyc,, +Cython/Distutils/__pycache__/build_ext.cpython-38.pyc,, +Cython/Distutils/__pycache__/extension.cpython-38.pyc,, +Cython/Distutils/__pycache__/old_build_ext.cpython-38.pyc,, +Cython/Distutils/build_ext.py,sha256=Fc_cI5wN0fT1Mf2k5B5nH-PgZ8Gq2lL6OlzF_qzy3dA,1007 +Cython/Distutils/extension.py,sha256=FHvtK3Tj9MqE17TuZ_jWg1Mh4X7e-CXIPUpJK7nqcQE,4706 +Cython/Distutils/old_build_ext.py,sha256=Hy34A1HqhoDOyU-krN2gJUYXK2mYWc8E2EZB-stvmrE,13635 +Cython/Includes/Deprecated/python.pxd,sha256=l7crg8H9cVRedMcjDf_9xDLdnUT57Vt1BxlZWom-h88,61 +Cython/Includes/Deprecated/python_bool.pxd,sha256=qOaFbsP6_pKoB3HGTjQUkFhQqukXheCmcSnnBQLdKGQ,66 +Cython/Includes/Deprecated/python_buffer.pxd,sha256=gv2a3ngcOnRKZZHSox_bW1WD8jGbxfH9NJm1-iUXf9U,68 +Cython/Includes/Deprecated/python_bytes.pxd,sha256=07-Hk3YpN_i4mIlbWYbNgDkjEytQAYOepJLJTY1CrVk,67 +Cython/Includes/Deprecated/python_cobject.pxd,sha256=V9F0DHQbFZPbJ8RRnN9mft2ipq4wubM8ghBCGHr6NwE,69 +Cython/Includes/Deprecated/python_complex.pxd,sha256=ITmq55v0b1gibEpLSCTCz68ViljenSuGGjiWn_nvIvI,69 +Cython/Includes/Deprecated/python_dict.pxd,sha256=gYhGkJhMmzWcrXoPnJHUcp-vdtcwUACbGlfv3wtGsKU,66 +Cython/Includes/Deprecated/python_exc.pxd,sha256=irWdwDYRWU16-P54uGDNfUSUtkL5Sj_1zBDWext_80g,65 +Cython/Includes/Deprecated/python_float.pxd,sha256=v1Hbpd4SF3hSF7ZL_olMaYJzmBNA9jWn0eO9ggLBlvc,67 +Cython/Includes/Deprecated/python_function.pxd,sha256=lkYKySQy1W36hfyyAJsc3E-8d9bsx5k8OhIMFQ6k2jA,70 +Cython/Includes/Deprecated/python_getargs.pxd,sha256=NEdeqPqu4di0YJm_7yLfvuS903CAe4K2Pzb13TRfBdE,69 +Cython/Includes/Deprecated/python_instance.pxd,sha256=FX9UlYrSxDrzch7wUvh_Y5Ix-bsDYARkXzZJOg2FvEI,70 +Cython/Includes/Deprecated/python_int.pxd,sha256=Cwd4J4KTKjxwEMz1BbCso0g0pOID9AnySKOC1g0kLqA,65 +Cython/Includes/Deprecated/python_iterator.pxd,sha256=nPJ0nKSmnUVzI1SPrTSt9wSD7SQILyhONJdP0H_-FGc,70 +Cython/Includes/Deprecated/python_list.pxd,sha256=VHpylsg46-5Ud8rwlPe63bb3zSToXm9R_fPorZrJsUE,66 +Cython/Includes/Deprecated/python_long.pxd,sha256=pg8hOKNoKaW-Mslugzeq6NCeznJw939LT24AVQn_cqE,66 +Cython/Includes/Deprecated/python_mapping.pxd,sha256=AZtJdYm37glDSNChduAsgavz-_DPDkxxQEAO9lDGy84,69 +Cython/Includes/Deprecated/python_mem.pxd,sha256=Mxidel5P4yuJxJOvoYr0PN1FD78oCOIJUEMPYMYU7lE,65 +Cython/Includes/Deprecated/python_method.pxd,sha256=x5ye5_8KqtsW2HrEon5NdFJmIkmVDV1KeVpFsuC2UZE,68 +Cython/Includes/Deprecated/python_module.pxd,sha256=lKu5VYCgC6S7LSgFa22V2YTY9JfML0vABDZpChhxs60,68 +Cython/Includes/Deprecated/python_number.pxd,sha256=X4MxGoITZuJNPtC2cFJ8lQwui8MOC6rQfEDbFIcWA9k,68 +Cython/Includes/Deprecated/python_object.pxd,sha256=qr2OwYVot4ELK3_-mCfaktXgLJEaKWDyCEblQ2vXV-E,68 +Cython/Includes/Deprecated/python_oldbuffer.pxd,sha256=QyY4Vn5-cFaOt0oZ27GuRXa3tLawgMZN8KMamn9F1yo,71 +Cython/Includes/Deprecated/python_pycapsule.pxd,sha256=tHJfhgm1TrSwJQwQFdhwP7YE7oQFiegxhNhgCDmlB6A,71 +Cython/Includes/Deprecated/python_ref.pxd,sha256=wv39G35V7tN5sIhcL1APpe5NuhCwYwVy6X5DPPm5g5A,65 +Cython/Includes/Deprecated/python_sequence.pxd,sha256=9ycCua1ODfECKPd56_GBmeqzWrfdqmkjhbEmdt87NC0,70 +Cython/Includes/Deprecated/python_set.pxd,sha256=_Z5KVXs0V_T8fpgLX-2LbDAZIY1HnuhO-eTUHHRYwu0,65 +Cython/Includes/Deprecated/python_string.pxd,sha256=6VgAehwW9PcUC9Kp_HbRVMYPeF_Q-L8yr9o2ezuTzys,68 +Cython/Includes/Deprecated/python_tuple.pxd,sha256=_ZTQh7dRBmrRs9mtmOFjP37d0IFItxs20kzFtKtkY-g,67 +Cython/Includes/Deprecated/python_type.pxd,sha256=2OKmEdSqoyK8fXttlHG3NRguZ-ZikUUet-kjKLq-eEU,66 +Cython/Includes/Deprecated/python_unicode.pxd,sha256=TF8-N0un1WdyccTDo9hZVABc53SYzKnC3MEKrGb3vV0,69 +Cython/Includes/Deprecated/python_version.pxd,sha256=ZXrK0UGUt8vHbYPxm7PTdhMe1_h7Yj6Lo74oFxjnNns,69 +Cython/Includes/Deprecated/python_weakref.pxd,sha256=CUWMSmClrWPoTnlClOFCSHa6Xd55qDgIlcDCD6tfEhM,69 +Cython/Includes/Deprecated/stdio.pxd,sha256=lNc2YuvWJ-LNSSdN7adDo1lf-C2M0r10hH4bysha9Sg,64 +Cython/Includes/Deprecated/stdlib.pxd,sha256=PbCbjT8MjDjVRjx5Rod79gi22-9YI35jTulePAKCPXE,65 +Cython/Includes/Deprecated/stl.pxd,sha256=tHpByeYgNiclr3YtCdKKAeEs3CHJflqacC7YgV7YN8k,2187 +Cython/Includes/cpython/__init__.pxd,sha256=8URNRvb7JkYhqDZv2J0bVsdeZBEJBu7u2QFYkDyXPG8,8254 +Cython/Includes/cpython/array.pxd,sha256=GtKsHa5NBRa4Y3Utu7cjBonEgmV_A1yf6I0koBeQXeg,6034 +Cython/Includes/cpython/bool.pxd,sha256=FaNn8K-Toq8FAws8BguKMk0IPM7IJm9IiUUGARSrKYk,1359 +Cython/Includes/cpython/buffer.pxd,sha256=2Ri5P2JFsxOTr5xjOvUBZUOF4PptcxPwumZrvNbNHiM,4831 +Cython/Includes/cpython/bytearray.pxd,sha256=m0VdoHgouF1T0VtRjFLXZ5fi22vaMdVwFWpF3IxB6m4,1443 +Cython/Includes/cpython/bytes.pxd,sha256=tGLuiBMzQjurK_pq77CM7P0C-Hn0KUIDZCXW9QvlJAI,9906 +Cython/Includes/cpython/ceval.pxd,sha256=h6fBetZCUvWTcCn3bkXZg2kqnIuyC5ZSChyhOocxVus,236 +Cython/Includes/cpython/cobject.pxd,sha256=ZeMdbpZLqpcTywdv2VoppMTWD4X_yghL6Qox7LVfOyg,1524 +Cython/Includes/cpython/complex.pxd,sha256=-bu0Cq91tS_U5tTra18S0jqt1FgSJTHXJ5J8rk-MOAA,1777 +Cython/Includes/cpython/datetime.pxd,sha256=wQqB8i3tMZOTw9qrLdbHJRkxgZqscGEqmq0tIDfkkqw,6776 +Cython/Includes/cpython/dict.pxd,sha256=F-mrlcAfNmTSUkpJed63bp1IaO0cwG56t_DLk7f0xv0,6877 +Cython/Includes/cpython/exc.pxd,sha256=29-bGESwfoMqx1XU3MMggkIr8pz_l0UPruzy6KIzHxg,13606 +Cython/Includes/cpython/float.pxd,sha256=RD1qEAUocXG9qXrRiT8aCSSfGEyTzjTc9HQkv5xg1ZE,1424 +Cython/Includes/cpython/function.pxd,sha256=IoJUprbz8F10DEKh-vSSpY6nWkCHw7SqG9p2f-4gHek,2671 +Cython/Includes/cpython/genobject.pxd,sha256=emC1JPgkuvBbGC0rgeZapKDaXYEj48uWiDC-xF0Mx2I,1052 +Cython/Includes/cpython/getargs.pxd,sha256=268twKzdiAkQMXMsetNiNlNqaqzlKtiBENKbhOHd8x4,775 +Cython/Includes/cpython/instance.pxd,sha256=qCbxPeHKOJbuszDu3UEaI-KLX9lTopuaNCcpoHJ9ngU,985 +Cython/Includes/cpython/int.pxd,sha256=d9a0zUw_M3pRycCESWIjtfXWRvdvFOWxjdOjkcbX2gs,4131 +Cython/Includes/cpython/iterator.pxd,sha256=o52mLHbdm14Kqant2hR2zAdYzqK4fkSWZtBcRmpoP-I,1319 +Cython/Includes/cpython/iterobject.pxd,sha256=5UEZZwG5zyzxoCpknoQuh91zPUV11Uxr6F1taJdTv8k,1036 +Cython/Includes/cpython/list.pxd,sha256=t-xo7ROcewe6-0ztrNjsxMKV2KxD-ILUzemQ2tTuI7E,4084 +Cython/Includes/cpython/long.pxd,sha256=d6jHN1XJj7WL5PPAUK8U93IPyjWtlTmyhrBEVmxmGF8,7051 +Cython/Includes/cpython/longintrepr.pxd,sha256=_qawE2QRbCPGwj8vxEBSnyaIWjPfMVGPDu9xszuwZdE,445 +Cython/Includes/cpython/mapping.pxd,sha256=OIdvNVUoIpVCSQnkbLceTicSN0D_jRw6wQmbtxtxKuQ,2693 +Cython/Includes/cpython/mem.pxd,sha256=AWVinanXFBZXvU141we2dD8dkOqMJ8W3KAAzpBJqB5g,5386 +Cython/Includes/cpython/memoryview.pxd,sha256=l97J5-hbH3hp9aMbdXp3n73hJFNNsng6uyh40pc8P7I,2504 +Cython/Includes/cpython/method.pxd,sha256=UWXflhIlP4y7B5XDbH9rQ15iADciGW-iqV1-dlw2Wwg,2196 +Cython/Includes/cpython/module.pxd,sha256=Vc0Up7q1Mir38bN293E8RMugxWfuzjLFHM4g2dviPBM,9226 +Cython/Includes/cpython/number.pxd,sha256=tYJ0nn0k_llUx3ilniW9iXd2rKVejA-J5UUiIJ36Kww,11922 +Cython/Includes/cpython/object.pxd,sha256=AK5D-LrDbvisO6wpkh29G6xjA71sBF_KfKUyn0k2hzg,18366 +Cython/Includes/cpython/oldbuffer.pxd,sha256=v0-YZ_Iwwj3ZQdM8VE5NPTQcbBlJdWwJGtNO9DonGgw,2916 +Cython/Includes/cpython/pycapsule.pxd,sha256=yCjOeEtTm1VTq6_lG3BeIBSQB8QXmUHhm9nqGti0g6A,5721 +Cython/Includes/cpython/pylifecycle.pxd,sha256=LziJZHclGdtsr3yT28fULHNZ_n67bs1DmI9s8YzrBGg,2000 +Cython/Includes/cpython/pystate.pxd,sha256=xgf1BBkv36qvqMaR77zZWYOuonAwe4RfNKE2g91A6fk,3683 +Cython/Includes/cpython/pythread.pxd,sha256=0375TaYmtNCDDkWBh9WY4oJ_jhoTxhu_RR5QiOsXmYg,1946 +Cython/Includes/cpython/ref.pxd,sha256=2AmgyGDhwA4scts0jcBTdGTCG0b2P8-eYAKFJk44x0I,2557 +Cython/Includes/cpython/sequence.pxd,sha256=iTp3C6wOvTdvjLmdj3k9GqQqCGDlQFpzWi07wVQqSS4,6008 +Cython/Includes/cpython/set.pxd,sha256=ewHRPVMbHUGDInZ3NziisCq68LvtmEJ-SXFbzmuJxLc,5383 +Cython/Includes/cpython/slice.pxd,sha256=Rzgn8diAsN7lS2xGTq4VZucV3ziFNra4oz4tKGEAkMo,3111 +Cython/Includes/cpython/string.pxd,sha256=EKjDGFnPcjnkndwGMJqRrszDV390Mc6o7AADChnNCiA,9944 +Cython/Includes/cpython/tuple.pxd,sha256=eOLfH75ftJeYszztGFWWZP7LnyFOgw8GNuE7PQ9hAvs,3206 +Cython/Includes/cpython/type.pxd,sha256=FOypwX0ZYamPc4uO8bejzO-HzgiaSRaXEPsxxxPIneI,1831 +Cython/Includes/cpython/unicode.pxd,sha256=M5JgQxasktTQolB5H6LxwgKg8IDSHvcySaKKd0ZyNr0,25841 +Cython/Includes/cpython/version.pxd,sha256=l5KXt04isEv3qbGRJZ8fNlCYGO24HsA2l4EM3RxTEhE,847 +Cython/Includes/cpython/weakref.pxd,sha256=UU9H_ovHG07FFgP_kY2xhGv3yJDr_8iujCZnxH2jnlo,1984 +Cython/Includes/libc/__init__.pxd,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13 +Cython/Includes/libc/errno.pxd,sha256=j5hcKx7zinivU2b6SFMy8LZ9sJIQY5XLrp9cQUKv5AQ,2050 +Cython/Includes/libc/float.pxd,sha256=IhvZJljpTG0fZtcIp7EBO2Sqddozxoxwj4RFNVcKLpY,966 +Cython/Includes/libc/limits.pxd,sha256=xHlIyuDIKpjqclvRRYzZIcfd5G1re5QtbmoDMqZR_Ec,621 +Cython/Includes/libc/locale.pxd,sha256=sixG8EJ6wiVb0HIR1LWJ3lXTjTv463GJ9C_40HRovN4,1140 +Cython/Includes/libc/math.pxd,sha256=51YUxSe01R96_rr3sj4n4MLW-eOmQbcwdNn8YthTxqg,2948 +Cython/Includes/libc/setjmp.pxd,sha256=XRh-gSuhvFLl0nRvz5OhSWYe9eqX2attAck3JI7mwa4,297 +Cython/Includes/libc/signal.pxd,sha256=XOScPDA5vzlfEmu4D7DFT1-5Eu3qMpYdUarjt-fqlbw,1170 +Cython/Includes/libc/stddef.pxd,sha256=0rCyoocCfDL-1OQo3pxHQ-6fW20SAYktOLPoa4d97w8,164 +Cython/Includes/libc/stdint.pxd,sha256=qHJXzpWCrbvJWSaHYZL27VJPupQreTZl9VGj0jgLdRU,3449 +Cython/Includes/libc/stdio.pxd,sha256=qUaxEwNrQl1-4yHLorzzJZ-a-y5_-Rm_m7Z5meaRqH0,2476 +Cython/Includes/libc/stdlib.pxd,sha256=p62xq2XfB24WfNCjRXgD6cOYoRuV47AnYijkjWv4ugE,2444 +Cython/Includes/libc/string.pxd,sha256=tzYGbRrnccedFLes-KGgJqM0FEtwHF_q4f2fqltNvyE,2038 +Cython/Includes/libc/time.pxd,sha256=-IRH7fTq3wKBKmQQnpZRhaLsnl7D_qXFz_4BLB9O3u0,1317 +Cython/Includes/libcpp/__init__.pxd,sha256=PCx8ZRfOeoyMRu41PPlPY9uo2kZmt_7d0KR4Epzfe7c,94 +Cython/Includes/libcpp/algorithm.pxd,sha256=-2V0oR_cFbHHzeWT9RcfLvi5Oy-s_V2lO3OI6ZtX6fM,1770 +Cython/Includes/libcpp/cast.pxd,sha256=En4LBubdinfpm9Rel077tK_LGwg_3k4FAu9mlIbKjuw,501 +Cython/Includes/libcpp/complex.pxd,sha256=IjL8y9sAglhGbTKhqsJbW0mgMTYEUbYM1ekr5VDhQgY,3012 +Cython/Includes/libcpp/deque.pxd,sha256=aWqZ9j3OgQuqFLkqRO_U2FIwbSe2fKmmYDRAfD0vGqU,3106 +Cython/Includes/libcpp/forward_list.pxd,sha256=-So1ExEOkoPfsSdMlJSlI5665-zyWLMoUxlmm2Dlokk,2392 +Cython/Includes/libcpp/functional.pxd,sha256=BXPYkffEOlKO1erTLqlkBLex6Gb5byDMF4hq_MZ2aVI,381 +Cython/Includes/libcpp/iterator.pxd,sha256=mVc1rsAYfn_ARrdQ4JG-Ut5il5ynIa1CRXLk8Be8Zks,1432 +Cython/Includes/libcpp/limits.pxd,sha256=RKV3wPvk4tV_vX5CYQRJIK5m5xXav7SeBxltlLyk8es,1661 +Cython/Includes/libcpp/list.pxd,sha256=rGQfB3_mDcRkGKtMBuvDQvAPmgzR5jxSf3eOSRgR4YA,2658 +Cython/Includes/libcpp/map.pxd,sha256=GF2sDnFBHZoU3Rcuo1rn6yKh45nhkX0_iH29xj581ow,2551 +Cython/Includes/libcpp/memory.pxd,sha256=Hj20aSnmUTPAhFCrlmF_aeHJKiMiZ2bDKhaYn2yybJo,3600 +Cython/Includes/libcpp/pair.pxd,sha256=UBJXw43uHkDlNsr0Pu1aP5tZ-ILXhUAyOLam2qdWmZA,27 +Cython/Includes/libcpp/queue.pxd,sha256=FbL4Q7C3lgtZ2YzictU1XBXzQ7G-6y9i_7l2eqzA3Xc,649 +Cython/Includes/libcpp/set.pxd,sha256=3y5Ir2TjGD7g3VRvlkXV1a3V3ZYzJvwOAfeTv8ucOCw,2170 +Cython/Includes/libcpp/stack.pxd,sha256=zM3SQOqMWONVqud13ag3bUupA-ozU_YMq4Ad2QkL6fI,292 +Cython/Includes/libcpp/string.pxd,sha256=wIoZwysFIUptbOJZv-jlMEvZhnUeDu_slkIGRbUx6uU,5071 +Cython/Includes/libcpp/typeindex.pxd,sha256=mIHr5Mq6Lol0SlzqeK6w_giVERh3uAjZm78yPDLXzc4,524 +Cython/Includes/libcpp/typeinfo.pxd,sha256=tITsqurrdaZjsEGFksem9xZtVhSxQRxHZxcoC-4Y-DY,304 +Cython/Includes/libcpp/unordered_map.pxd,sha256=deGBCS7kfVixJiE9Pt-V3r6kMXajFh3z8UEYgP05vQw,2838 +Cython/Includes/libcpp/unordered_set.pxd,sha256=eUYSOMT5Gt8kZWCUKezQGyXWzatEyNg6-nmAlmcBo-k,2622 +Cython/Includes/libcpp/utility.pxd,sha256=DxgEyKxHWnFY3bRMkbUEWsx-cdaebbELmLfImcRNonI,903 +Cython/Includes/libcpp/vector.pxd,sha256=GYqLb74owhMmNQHUCcZSxGcYPgNuw6qULsfWKr7g6OQ,3350 +Cython/Includes/numpy/__init__.pxd,sha256=gV67p6_8BCmF2GYzHQmgPNGFcx5hZvgCshh3PvfQgLE,38102 +Cython/Includes/numpy/math.pxd,sha256=qZEdamaPgCFW4J7Itc6BWgOrQSKZdxDT6kbU_gqx2g4,5807 +Cython/Includes/openmp.pxd,sha256=orCIBYFuVPtLdRdhhCm5uhGbeV_fgVCA2Jk2Bts1e2g,1713 +Cython/Includes/posix/__init__.pxd,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13 +Cython/Includes/posix/dlfcn.pxd,sha256=2IFcGBfZEmArdE0BxB71eT_Yb7n9STaVM11AtUcg_pE,355 +Cython/Includes/posix/fcntl.pxd,sha256=IWhNrBuNjX1-xUtg4JTdihgUzP3bIzhdBKTZc34tAWk,1175 +Cython/Includes/posix/ioctl.pxd,sha256=2RC5zejPOCTkarDZM_6Vd2wc4oBuN7iaiL_C5MPBs90,99 +Cython/Includes/posix/mman.pxd,sha256=juJcLi92N9Bc6L2p4zrUmYQIgNmrTsZ6hExbl1181pc,3362 +Cython/Includes/posix/resource.pxd,sha256=MQe1bCTYQFVMsago3pgOvR6t6NElQElg7rhVANxYRcE,1254 +Cython/Includes/posix/select.pxd,sha256=e4nhGHR8TRw6Xs9du5JoFtkd8U9sm3gX_BHq2FfmU6E,546 +Cython/Includes/posix/signal.pxd,sha256=wFJI5UthdtU9mZWjEBeZ9IIfeX252JVwDk2tsbW_q3U,1876 +Cython/Includes/posix/stat.pxd,sha256=ZOcPCpXnxlRRHcUkvg559hrFfB75uTbIYRWoQeyBCYs,1734 +Cython/Includes/posix/stdio.pxd,sha256=K8DEH38hWMvy2A8zcKbHRrHSGsgwTIrQ9qCzU-0cWS0,1054 +Cython/Includes/posix/stdlib.pxd,sha256=uGRPa00_HWZ6Chv5E13F96eut0xWHSfR7IioK9rKVLY,934 +Cython/Includes/posix/strings.pxd,sha256=GNEteqND2wgXXSvkv6U9eKSC9oIom3C7o2zQ6W_J_S4,374 +Cython/Includes/posix/time.pxd,sha256=wPUD7AjxpxmnUYmogTMFjroB2VzcPh8-b_8NEj-yG14,1980 +Cython/Includes/posix/types.pxd,sha256=tWEWxST4EGHIgYS-Ce2SGjZ-KgmM2SVe1eggdcgv3JQ,1162 +Cython/Includes/posix/unistd.pxd,sha256=w9B4d9NaXBsQ62XOr2xe9UFPGewmEk5BG6sqiRWdoM8,8061 +Cython/Includes/posix/wait.pxd,sha256=WNogQvKu2hMfEQiCyaANfVWFnyJSk6TxBU0c6npeJrA,1244 +Cython/Plex/Actions.cpython-38-x86_64-linux-gnu.so,sha256=IskiUeK-JNm9TxlMg0FBD1oxhjktpxbsk7TluJTlHVE,69272 +Cython/Plex/Actions.pxd,sha256=FC-6ffzWR4i3rR6VSL2C64Xxs1qBhpBEzRsU7WpLn1Y,585 +Cython/Plex/Actions.py,sha256=Caxkx8Kup9m4sx24ZcDTq-fAfPGG06TAHu2NI1D9zPs,2545 +Cython/Plex/DFA.py,sha256=w4vl2ejXv6ptILtkTCbB8NcvF8ylwc6DaQ2gPFrWuo4,6012 +Cython/Plex/Errors.py,sha256=As5uuGmqZe4w0B7Dm981lZTnDG-nlXSHYqiGUKnhrrY,1169 +Cython/Plex/Lexicons.py,sha256=ay3yy9fqI5y5lfgpJ4ubBjYZQ53gFDVgNGbmoSl5DxI,6907 +Cython/Plex/Machines.py,sha256=bIKg3-yxD_r7x-zEowJ7EsPBWlgXm_XhIozqsLQBeTk,7760 +Cython/Plex/Regexps.py,sha256=qaP-Fr-GgKNmBVsMyXO3ltl2HH1JQcQiFmX2oyUyeOA,16208 +Cython/Plex/Scanners.cpython-38-x86_64-linux-gnu.so,sha256=vNQhQmjNP9IgiwEfv9UA_KQHxs6955MwNZXatJKveSI,106065 +Cython/Plex/Scanners.pxd,sha256=oSfcDUZ3syc2ag73udwU5xoaIGDxiNd8a2F_LLw5PzY,1481 +Cython/Plex/Scanners.py,sha256=-TXAxKW43ZbQNCSEkMWEJ0SmqYVVCkSOT9UngOCRZnQ,12259 +Cython/Plex/Timing.py,sha256=-VgQveS-Ip_2ErjrVxh4w7cXpyVBkUaSaiLadyD3bw0,472 +Cython/Plex/Traditional.py,sha256=cAT-pZnqIwCJaqgSqgKODSznFZ5DunUw_MLWx8Y650c,4120 +Cython/Plex/Transitions.py,sha256=Tvp7cFXR3ZBPPHm0TAhUMC_-uiRR9YdOkF4t0wtk-f0,7187 +Cython/Plex/__init__.py,sha256=dvMeQpSyZE75W0gkf4Xo5LAxgQLNhkAXiQoIOtcOkZ0,1282 +Cython/Plex/__pycache__/Actions.cpython-38.pyc,, +Cython/Plex/__pycache__/DFA.cpython-38.pyc,, +Cython/Plex/__pycache__/Errors.cpython-38.pyc,, +Cython/Plex/__pycache__/Lexicons.cpython-38.pyc,, +Cython/Plex/__pycache__/Machines.cpython-38.pyc,, +Cython/Plex/__pycache__/Regexps.cpython-38.pyc,, +Cython/Plex/__pycache__/Scanners.cpython-38.pyc,, +Cython/Plex/__pycache__/Timing.cpython-38.pyc,, +Cython/Plex/__pycache__/Traditional.cpython-38.pyc,, +Cython/Plex/__pycache__/Transitions.cpython-38.pyc,, +Cython/Plex/__pycache__/__init__.cpython-38.pyc,, +Cython/Runtime/__init__.py,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13 +Cython/Runtime/__pycache__/__init__.cpython-38.pyc,, +Cython/Runtime/refnanny.cpython-38-x86_64-linux-gnu.so,sha256=I0QTORq6qA4jr0564UfZTTNxHw8B5dsWln-m2deD4bY,78595 +Cython/Runtime/refnanny.pyx,sha256=f2p1_0YxK25lm8Qfsu2ytvl0Im7GYyix1Q9krEBwC6c,6279 +Cython/Shadow.py,sha256=pbPOldCcIdg5pIez2TgbFzRzgHY3Kcnqxs9ueB4xVWY,12987 +Cython/StringIOTree.py,sha256=GX-TWn9XHwY5ecb4in8ovsTS5CtPTsSxZpanLWmQxgE,3336 +Cython/Tempita/__init__.py,sha256=YHujYHiLoYUwFNNswJCgzSrDuie3sV08JsWT9Nbmp78,152 +Cython/Tempita/__pycache__/__init__.cpython-38.pyc,, +Cython/Tempita/__pycache__/_looper.cpython-38.pyc,, +Cython/Tempita/__pycache__/_tempita.cpython-38.pyc,, +Cython/Tempita/__pycache__/compat3.cpython-38.pyc,, +Cython/Tempita/_looper.py,sha256=jlStYhz9Pgp6NatX86k-netBNBmvwaeWxCRS_S8vcIM,4168 +Cython/Tempita/_tempita.cpython-38-x86_64-linux-gnu.so,sha256=5Ko_jclRWC6tV8APAPXhYWVOp6P7u0j4Zfr8A5L_g4g,610691 +Cython/Tempita/_tempita.py,sha256=4gnJhuVIsGciu_5Besbvw26g82Pm7CiXazMghZO3ejs,39588 +Cython/Tempita/compat3.py,sha256=cjW1y266vRF5Xvh8kAu7_qHGT8AGGu2kGSJRK6DI-0E,903 +Cython/TestUtils.py,sha256=fzpic9xU-LP0wempXqwUQWZapBvXnFgbW_W9--IKpIA,7979 +Cython/Tests/TestCodeWriter.py,sha256=qKad43J3hN7PLp7mVbEDESt96qsk8y3ELRwwIp9jnNw,2316 +Cython/Tests/TestCythonUtils.py,sha256=XF4Fw4J5HZ4jUPLVv7ea8ZZcl2i9yXn5cx27WTtrcmU,474 +Cython/Tests/TestJediTyper.py,sha256=F6MUG8SdzGXQwkbw6Wv1PqVlmlIT1z_7lH2buVOFT_I,6996 +Cython/Tests/TestStringIOTree.py,sha256=vTuu3z32WTcmJaf0fBq62NMghYtaPL2rRnfdl2WM--4,1946 +Cython/Tests/__init__.py,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13 +Cython/Tests/__pycache__/TestCodeWriter.cpython-38.pyc,, +Cython/Tests/__pycache__/TestCythonUtils.cpython-38.pyc,, +Cython/Tests/__pycache__/TestJediTyper.cpython-38.pyc,, +Cython/Tests/__pycache__/TestStringIOTree.cpython-38.pyc,, +Cython/Tests/__pycache__/__init__.cpython-38.pyc,, +Cython/Tests/__pycache__/xmlrunner.cpython-38.pyc,, +Cython/Tests/xmlrunner.py,sha256=FGdWRHlEoshnI-LfTdu3nWoHnqmOaOA1HoiGkY0oxSs,14799 +Cython/Utility/AsyncGen.c,sha256=l1BzmraPrHP1FMakXd2C-Mb7PDKBHrx5z7KH4yuG0B4,40051 +Cython/Utility/Buffer.c,sha256=VUF4xHKJGX7QMTvpJO40aI1JUL-SERLEvlXXXEk2dHU,29654 +Cython/Utility/Builtins.c,sha256=5J_Jq3SdcMDkAv2LQxr_E1U4aJ2X-L6IjkzDd-hB7O4,16589 +Cython/Utility/CConvert.pyx,sha256=fbZVHvm2vlWj2rgm8ajBt5jrbN30nY7dEmHlBCGomlU,4338 +Cython/Utility/CMath.c,sha256=GIc7gd2WzaZryDJM3tefqXifLJpUJs6_T_c_mFrr-s8,2566 +Cython/Utility/Capsule.c,sha256=SOeU7E7T7piQEx894T2QFH2RlSG-MmsiyuY4lVN1yso,505 +Cython/Utility/CommonStructures.c,sha256=p65HHgTrf7h7Tj7JK7tIgkLrrCrjouL8HL90EHfoMoU,2558 +Cython/Utility/Complex.c,sha256=J4HseVcBOzGo5dye0Gus8bf8rGwWLEzN9sRJM74SWiI,10043 +Cython/Utility/Coroutine.c,sha256=C28TPblACTNh3PUtH_NXmA6NZx14VDhFxZQrvMdmhq8,86539 +Cython/Utility/CpdefEnums.pyx,sha256=XMg8sdltQSNj2wGVfnHIWRvyHFCcLK8ZfpKznKi4lhY,1893 +Cython/Utility/CppConvert.pyx,sha256=-e5i3_J1SS_GbctsflQwylx9cqdk_CJ2SfQSEDHa71k,6098 +Cython/Utility/CppSupport.cpp,sha256=NTnSRCmi2PHuT3J6Qy15xMZGx0bf9l-MaxAbW6OVk6s,2234 +Cython/Utility/CythonFunction.c,sha256=7jhkZy25_-bCMJ2s0ULSrW9z-5rChIBMuF7z14tram4,45814 +Cython/Utility/Embed.c,sha256=JkU956r2aASdjhD1xxCgTGFmZGS-xLYecTlY2H63c_g,6675 +Cython/Utility/Exceptions.c,sha256=vcrvFn9qR4GmLSRWM9lXtdYtQX6-B3Lyjq9KRM9t8js,26379 +Cython/Utility/ExtensionTypes.c,sha256=IPaLp5o2SkS-JgJ8LVHf4ayJA-UXi1zSGlIAyo5Zv4k,10831 +Cython/Utility/FunctionArguments.c,sha256=IH9Y5aV_tNrJLo_CWHskEnFai9fp9cKLvRkIZYl2UGQ,12040 +Cython/Utility/ImportExport.c,sha256=ddOZhGYmdt2Z-TPoM_ALqnlzGgG62bn4CxLBo6LgX50,22202 +Cython/Utility/MemoryView.pyx,sha256=UiA-JREoGkNvfzE5qc-DES8vRR3O9Fx2vBg0fnmWVp0,49610 +Cython/Utility/MemoryView_C.c,sha256=B5aWv9D3nasoJqWiUoqK56nwR8A6TptszrdvJoi0LoA,29027 +Cython/Utility/ModuleSetupCode.c,sha256=zpSb0tv2gUtF0xpHfLpQAi_Xz7r_A6J-ablAuT10mAo,50535 +Cython/Utility/ObjectHandling.c,sha256=J-gCFFk8KvXVaWhavpMGQIXH4RSJXEK4w5xDHyU5gGw,88586 +Cython/Utility/Optimize.c,sha256=XZ_eNx9JfkOxnBKPbPHMBVp2hii89rEt4hldy_rzV60,45132 +Cython/Utility/Overflow.c,sha256=N_FL_Mn-JkEVBcXXx-NGlsEl_AYgifa-p7fmLHbQrss,12358 +Cython/Utility/Printing.c,sha256=o8XnfjNIT8Ub5KY4FAp_FNw-OE3xqjy0MgmYWgDcWao,5103 +Cython/Utility/Profile.c,sha256=SXacBqna5r-WbrmTC3t5b3H8enjuphNeD200ApORkv0,16749 +Cython/Utility/StringTools.c,sha256=3U92j--dFnCirIJod8vcPhsy5pmgg5owcGrSR6S-T3o,40989 +Cython/Utility/TestCyUtilityLoader.pyx,sha256=91lWWJub7l_6xNn3ncrvQZZ94RpkQzEx2NtAaFpvrxY,152 +Cython/Utility/TestCythonScope.pyx,sha256=HQm5E5Eehr3tkDDURURyVnDputKG3-Wn2k2aIAoru9g,1595 +Cython/Utility/TestUtilityLoader.c,sha256=dGy6ZWL2kBqtmUY7kF75UEox5kadQZ__BmZKscwg2aY,279 +Cython/Utility/TypeConversion.c,sha256=Avp5eTgp589kfsVr6uHfpaYI0Bf1CSm-NYiuStB72JQ,35453 +Cython/Utility/__init__.py,sha256=t2bpY-TYSX8lJdbKuBFJ1kBfpWVzgGw4xoZlCKfyj_s,1159 +Cython/Utility/__pycache__/__init__.cpython-38.pyc,, +Cython/Utility/arrayarray.h,sha256=Lt5L9uKhaVqO7rkpmS2xBocO0K3TdRVxcWVjgBqElFs,4065 +Cython/Utils.py,sha256=Ceg2tZwV_zDMaUkl1Z--pIGvS5cq73LSebypdxjujks,13747 +Cython/__init__.py,sha256=GMnkoIas6hfN_meqZAJF9BEs1NuY4-4B2L0Uls7hXaA,358 +Cython/__pycache__/CodeWriter.cpython-38.pyc,, +Cython/__pycache__/Coverage.cpython-38.pyc,, +Cython/__pycache__/Debugging.cpython-38.pyc,, +Cython/__pycache__/Shadow.cpython-38.pyc,, +Cython/__pycache__/StringIOTree.cpython-38.pyc,, +Cython/__pycache__/TestUtils.cpython-38.pyc,, +Cython/__pycache__/Utils.cpython-38.pyc,, +Cython/__pycache__/__init__.cpython-38.pyc,, +__pycache__/cython.cpython-38.pyc,, +cython.py,sha256=z2AtgHBGh0x0h0ZcGje7IhYlR6nGH_MmOh1fFMjqYn0,520 +pyximport/__init__.py,sha256=9hOyKolFtOerPiVEyktKrT1VtzbGexq9UmORzo52iHI,79 +pyximport/__pycache__/__init__.cpython-38.pyc,, +pyximport/__pycache__/pyxbuild.cpython-38.pyc,, +pyximport/__pycache__/pyximport.cpython-38.pyc,, +pyximport/pyxbuild.py,sha256=TiAkhtSxSbRW04JKtgO3FP3hfVzQ1mjjzCh5PqZDOrM,5702 +pyximport/pyximport.py,sha256=Vjxp3kbmFRf9j0ya4f0m0Ahytkjjmv2UkFueasXxL5A,23578 diff --git a/venv/lib/python3.8/site-packages/Cython-0.29.19.dist-info/REQUESTED b/venv/lib/python3.8/site-packages/Cython-0.29.19.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.8/site-packages/Cython-0.29.19.dist-info/WHEEL b/venv/lib/python3.8/site-packages/Cython-0.29.19.dist-info/WHEEL new file mode 100644 index 0000000..ae40efd --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython-0.29.19.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.31.1) +Root-Is-Purelib: false +Tag: cp38-cp38-manylinux1_x86_64 + diff --git a/venv/lib/python3.8/site-packages/Cython-0.29.19.dist-info/entry_points.txt b/venv/lib/python3.8/site-packages/Cython-0.29.19.dist-info/entry_points.txt new file mode 100644 index 0000000..6966136 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython-0.29.19.dist-info/entry_points.txt @@ -0,0 +1,5 @@ +[console_scripts] +cygdb = Cython.Debugger.Cygdb:main +cython = Cython.Compiler.Main:setuptools_main +cythonize = Cython.Build.Cythonize:main + diff --git a/venv/lib/python3.8/site-packages/Cython-0.29.19.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/Cython-0.29.19.dist-info/top_level.txt new file mode 100644 index 0000000..48cccd7 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython-0.29.19.dist-info/top_level.txt @@ -0,0 +1,3 @@ +Cython +cython +pyximport diff --git a/venv/lib/python3.8/site-packages/Cython/Build/BuildExecutable.py b/venv/lib/python3.8/site-packages/Cython/Build/BuildExecutable.py new file mode 100644 index 0000000..2db9e5d --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Build/BuildExecutable.py @@ -0,0 +1,142 @@ +""" +Compile a Python script into an executable that embeds CPython and run it. +Requires CPython to be built as a shared library ('libpythonX.Y'). + +Basic usage: + + python cythonrun somefile.py [ARGS] +""" + +from __future__ import absolute_import + +DEBUG = True + +import sys +import os +from distutils import sysconfig + + +def get_config_var(name, default=''): + return sysconfig.get_config_var(name) or default + +INCDIR = sysconfig.get_python_inc() +LIBDIR1 = get_config_var('LIBDIR') +LIBDIR2 = get_config_var('LIBPL') +PYLIB = get_config_var('LIBRARY') +PYLIB_DYN = get_config_var('LDLIBRARY') +if PYLIB_DYN == PYLIB: + # no shared library + PYLIB_DYN = '' +else: + PYLIB_DYN = os.path.splitext(PYLIB_DYN[3:])[0] # 'lib(XYZ).so' -> XYZ + +CC = get_config_var('CC', os.environ.get('CC', '')) +CFLAGS = get_config_var('CFLAGS') + ' ' + os.environ.get('CFLAGS', '') +LINKCC = get_config_var('LINKCC', os.environ.get('LINKCC', CC)) +LINKFORSHARED = get_config_var('LINKFORSHARED') +LIBS = get_config_var('LIBS') +SYSLIBS = get_config_var('SYSLIBS') +EXE_EXT = sysconfig.get_config_var('EXE') + +def _debug(msg, *args): + if DEBUG: + if args: + msg = msg % args + sys.stderr.write(msg + '\n') + +def dump_config(): + _debug('INCDIR: %s', INCDIR) + _debug('LIBDIR1: %s', LIBDIR1) + _debug('LIBDIR2: %s', LIBDIR2) + _debug('PYLIB: %s', PYLIB) + _debug('PYLIB_DYN: %s', PYLIB_DYN) + _debug('CC: %s', CC) + _debug('CFLAGS: %s', CFLAGS) + _debug('LINKCC: %s', LINKCC) + _debug('LINKFORSHARED: %s', LINKFORSHARED) + _debug('LIBS: %s', LIBS) + _debug('SYSLIBS: %s', SYSLIBS) + _debug('EXE_EXT: %s', EXE_EXT) + +def runcmd(cmd, shell=True): + if shell: + cmd = ' '.join(cmd) + _debug(cmd) + else: + _debug(' '.join(cmd)) + + try: + import subprocess + except ImportError: # Python 2.3 ... + returncode = os.system(cmd) + else: + returncode = subprocess.call(cmd, shell=shell) + + if returncode: + sys.exit(returncode) + +def clink(basename): + runcmd([LINKCC, '-o', basename + EXE_EXT, basename+'.o', '-L'+LIBDIR1, '-L'+LIBDIR2] + + [PYLIB_DYN and ('-l'+PYLIB_DYN) or os.path.join(LIBDIR1, PYLIB)] + + LIBS.split() + SYSLIBS.split() + LINKFORSHARED.split()) + +def ccompile(basename): + runcmd([CC, '-c', '-o', basename+'.o', basename+'.c', '-I' + INCDIR] + CFLAGS.split()) + +def cycompile(input_file, options=()): + from ..Compiler import Version, CmdLine, Main + options, sources = CmdLine.parse_command_line(list(options or ()) + ['--embed', input_file]) + _debug('Using Cython %s to compile %s', Version.version, input_file) + result = Main.compile(sources, options) + if result.num_errors > 0: + sys.exit(1) + +def exec_file(program_name, args=()): + runcmd([os.path.abspath(program_name)] + list(args), shell=False) + +def build(input_file, compiler_args=(), force=False): + """ + Build an executable program from a Cython module. + + Returns the name of the executable file. + """ + basename = os.path.splitext(input_file)[0] + exe_file = basename + EXE_EXT + if not force and os.path.abspath(exe_file) == os.path.abspath(input_file): + raise ValueError("Input and output file names are the same, refusing to overwrite") + if (not force and os.path.exists(exe_file) and os.path.exists(input_file) + and os.path.getmtime(input_file) <= os.path.getmtime(exe_file)): + _debug("File is up to date, not regenerating %s", exe_file) + return exe_file + cycompile(input_file, compiler_args) + ccompile(basename) + clink(basename) + return exe_file + +def build_and_run(args): + """ + Build an executable program from a Cython module and runs it. + + Arguments after the module name will be passed verbatimely to the + program. + """ + cy_args = [] + last_arg = None + for i, arg in enumerate(args): + if arg.startswith('-'): + cy_args.append(arg) + elif last_arg in ('-X', '--directive'): + cy_args.append(arg) + else: + input_file = arg + args = args[i+1:] + break + last_arg = arg + else: + raise ValueError('no input file provided') + + program_name = build(input_file, cy_args) + exec_file(program_name, args) + +if __name__ == '__main__': + build_and_run(sys.argv[1:]) diff --git a/venv/lib/python3.8/site-packages/Cython/Build/Cythonize.py b/venv/lib/python3.8/site-packages/Cython/Build/Cythonize.py new file mode 100644 index 0000000..9de84d5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Build/Cythonize.py @@ -0,0 +1,227 @@ +#!/usr/bin/env python + +from __future__ import absolute_import + +import os +import shutil +import tempfile +from distutils.core import setup + +from .Dependencies import cythonize, extended_iglob +from ..Utils import is_package_dir +from ..Compiler import Options + +try: + import multiprocessing + parallel_compiles = int(multiprocessing.cpu_count() * 1.5) +except ImportError: + multiprocessing = None + parallel_compiles = 0 + + +class _FakePool(object): + def map_async(self, func, args): + try: + from itertools import imap + except ImportError: + imap=map + for _ in imap(func, args): + pass + + def close(self): + pass + + def terminate(self): + pass + + def join(self): + pass + + +def parse_directives(option, name, value, parser): + dest = option.dest + old_directives = dict(getattr(parser.values, dest, + Options.get_directive_defaults())) + directives = Options.parse_directive_list( + value, relaxed_bool=True, current_settings=old_directives) + setattr(parser.values, dest, directives) + + +def parse_options(option, name, value, parser): + dest = option.dest + options = dict(getattr(parser.values, dest, {})) + for opt in value.split(','): + if '=' in opt: + n, v = opt.split('=', 1) + v = v.lower() not in ('false', 'f', '0', 'no') + else: + n, v = opt, True + options[n] = v + setattr(parser.values, dest, options) + + +def parse_compile_time_env(option, name, value, parser): + dest = option.dest + old_env = dict(getattr(parser.values, dest, {})) + new_env = Options.parse_compile_time_env(value, current_settings=old_env) + setattr(parser.values, dest, new_env) + + +def find_package_base(path): + base_dir, package_path = os.path.split(path) + while os.path.isfile(os.path.join(base_dir, '__init__.py')): + base_dir, parent = os.path.split(base_dir) + package_path = '%s/%s' % (parent, package_path) + return base_dir, package_path + + +def cython_compile(path_pattern, options): + pool = None + all_paths = map(os.path.abspath, extended_iglob(path_pattern)) + try: + for path in all_paths: + if options.build_inplace: + base_dir = path + while not os.path.isdir(base_dir) or is_package_dir(base_dir): + base_dir = os.path.dirname(base_dir) + else: + base_dir = None + + if os.path.isdir(path): + # recursively compiling a package + paths = [os.path.join(path, '**', '*.{py,pyx}')] + else: + # assume it's a file(-like thing) + paths = [path] + + ext_modules = cythonize( + paths, + nthreads=options.parallel, + exclude_failures=options.keep_going, + exclude=options.excludes, + compiler_directives=options.directives, + compile_time_env=options.compile_time_env, + force=options.force, + quiet=options.quiet, + **options.options) + + if ext_modules and options.build: + if len(ext_modules) > 1 and options.parallel > 1: + if pool is None: + try: + pool = multiprocessing.Pool(options.parallel) + except OSError: + pool = _FakePool() + pool.map_async(run_distutils, [ + (base_dir, [ext]) for ext in ext_modules]) + else: + run_distutils((base_dir, ext_modules)) + except: + if pool is not None: + pool.terminate() + raise + else: + if pool is not None: + pool.close() + pool.join() + + +def run_distutils(args): + base_dir, ext_modules = args + script_args = ['build_ext', '-i'] + cwd = os.getcwd() + temp_dir = None + try: + if base_dir: + os.chdir(base_dir) + temp_dir = tempfile.mkdtemp(dir=base_dir) + script_args.extend(['--build-temp', temp_dir]) + setup( + script_name='setup.py', + script_args=script_args, + ext_modules=ext_modules, + ) + finally: + if base_dir: + os.chdir(cwd) + if temp_dir and os.path.isdir(temp_dir): + shutil.rmtree(temp_dir) + + +def parse_args(args): + from optparse import OptionParser + parser = OptionParser(usage='%prog [options] [sources and packages]+') + + parser.add_option('-X', '--directive', metavar='NAME=VALUE,...', + dest='directives', default={}, type="str", + action='callback', callback=parse_directives, + help='set a compiler directive') + parser.add_option('-E', '--compile-time-env', metavar='NAME=VALUE,...', + dest='compile_time_env', default={}, type="str", + action='callback', callback=parse_compile_time_env, + help='set a compile time environment variable') + parser.add_option('-s', '--option', metavar='NAME=VALUE', + dest='options', default={}, type="str", + action='callback', callback=parse_options, + help='set a cythonize option') + parser.add_option('-2', dest='language_level', action='store_const', const=2, default=None, + help='use Python 2 syntax mode by default') + parser.add_option('-3', dest='language_level', action='store_const', const=3, + help='use Python 3 syntax mode by default') + parser.add_option('--3str', dest='language_level', action='store_const', const='3str', + help='use Python 3 syntax mode by default') + parser.add_option('-a', '--annotate', dest='annotate', action='store_true', + help='generate annotated HTML page for source files') + + parser.add_option('-x', '--exclude', metavar='PATTERN', dest='excludes', + action='append', default=[], + help='exclude certain file patterns from the compilation') + + parser.add_option('-b', '--build', dest='build', action='store_true', + help='build extension modules using distutils') + parser.add_option('-i', '--inplace', dest='build_inplace', action='store_true', + help='build extension modules in place using distutils (implies -b)') + parser.add_option('-j', '--parallel', dest='parallel', metavar='N', + type=int, default=parallel_compiles, + help=('run builds in N parallel jobs (default: %d)' % + parallel_compiles or 1)) + parser.add_option('-f', '--force', dest='force', action='store_true', + help='force recompilation') + parser.add_option('-q', '--quiet', dest='quiet', action='store_true', + help='be less verbose during compilation') + + parser.add_option('--lenient', dest='lenient', action='store_true', + help='increase Python compatibility by ignoring some compile time errors') + parser.add_option('-k', '--keep-going', dest='keep_going', action='store_true', + help='compile as much as possible, ignore compilation failures') + + options, args = parser.parse_args(args) + if not args: + parser.error("no source files provided") + if options.build_inplace: + options.build = True + if multiprocessing is None: + options.parallel = 0 + if options.language_level: + assert options.language_level in (2, 3, '3str') + options.options['language_level'] = options.language_level + return options, args + + +def main(args=None): + options, paths = parse_args(args) + + if options.lenient: + # increase Python compatibility by ignoring compile time errors + Options.error_on_unknown_names = False + Options.error_on_uninitialized = False + + if options.annotate: + Options.annotate = True + + for path in paths: + cython_compile(path, options) + + +if __name__ == '__main__': + main() diff --git a/venv/lib/python3.8/site-packages/Cython/Build/Dependencies.py b/venv/lib/python3.8/site-packages/Cython/Build/Dependencies.py new file mode 100644 index 0000000..c66afbf --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Build/Dependencies.py @@ -0,0 +1,1283 @@ +from __future__ import absolute_import, print_function + +import cython +from .. import __version__ + +import collections +import contextlib +import hashlib +import os +import shutil +import subprocess +import re, sys, time +import warnings +from glob import iglob +from io import open as io_open +from os.path import relpath as _relpath +from distutils.extension import Extension +from distutils.util import strtobool +import zipfile + +try: + from collections.abc import Iterable +except ImportError: + from collections import Iterable + +try: + import gzip + gzip_open = gzip.open + gzip_ext = '.gz' +except ImportError: + gzip_open = open + gzip_ext = '' + +try: + import zlib + zipfile_compression_mode = zipfile.ZIP_DEFLATED +except ImportError: + zipfile_compression_mode = zipfile.ZIP_STORED + +try: + import pythran +except: + pythran = None + +from .. import Utils +from ..Utils import (cached_function, cached_method, path_exists, + safe_makedirs, copy_file_to_dir_if_newer, is_package_dir, replace_suffix) +from ..Compiler.Main import Context, CompilationOptions, default_options + +join_path = cached_function(os.path.join) +copy_once_if_newer = cached_function(copy_file_to_dir_if_newer) +safe_makedirs_once = cached_function(safe_makedirs) + +if sys.version_info[0] < 3: + # stupid Py2 distutils enforces str type in list of sources + _fs_encoding = sys.getfilesystemencoding() + if _fs_encoding is None: + _fs_encoding = sys.getdefaultencoding() + def encode_filename_in_py2(filename): + if not isinstance(filename, bytes): + return filename.encode(_fs_encoding) + return filename +else: + def encode_filename_in_py2(filename): + return filename + basestring = str + + +def _make_relative(file_paths, base=None): + if not base: + base = os.getcwd() + if base[-1] != os.path.sep: + base += os.path.sep + return [_relpath(path, base) if path.startswith(base) else path + for path in file_paths] + + +def extended_iglob(pattern): + if '{' in pattern: + m = re.match('(.*){([^}]+)}(.*)', pattern) + if m: + before, switch, after = m.groups() + for case in switch.split(','): + for path in extended_iglob(before + case + after): + yield path + return + if '**/' in pattern: + seen = set() + first, rest = pattern.split('**/', 1) + if first: + first = iglob(first+'/') + else: + first = [''] + for root in first: + for path in extended_iglob(join_path(root, rest)): + if path not in seen: + seen.add(path) + yield path + for path in extended_iglob(join_path(root, '*', '**/' + rest)): + if path not in seen: + seen.add(path) + yield path + else: + for path in iglob(pattern): + yield path + + +def nonempty(it, error_msg="expected non-empty iterator"): + empty = True + for value in it: + empty = False + yield value + if empty: + raise ValueError(error_msg) + + +@cached_function +def file_hash(filename): + path = os.path.normpath(filename) + prefix = ('%d:%s' % (len(path), path)).encode("UTF-8") + m = hashlib.md5(prefix) + with open(path, 'rb') as f: + data = f.read(65000) + while data: + m.update(data) + data = f.read(65000) + return m.hexdigest() + + +def update_pythran_extension(ext): + if pythran is None: + raise RuntimeError("You first need to install Pythran to use the np_pythran directive.") + try: + pythran_ext = pythran.config.make_extension(python=True) + except TypeError: # older pythran version only + pythran_ext = pythran.config.make_extension() + + ext.include_dirs.extend(pythran_ext['include_dirs']) + ext.extra_compile_args.extend(pythran_ext['extra_compile_args']) + ext.extra_link_args.extend(pythran_ext['extra_link_args']) + ext.define_macros.extend(pythran_ext['define_macros']) + ext.undef_macros.extend(pythran_ext['undef_macros']) + ext.library_dirs.extend(pythran_ext['library_dirs']) + ext.libraries.extend(pythran_ext['libraries']) + ext.language = 'c++' + + # These options are not compatible with the way normal Cython extensions work + for bad_option in ["-fwhole-program", "-fvisibility=hidden"]: + try: + ext.extra_compile_args.remove(bad_option) + except ValueError: + pass + + +def parse_list(s): + """ + >>> parse_list("") + [] + >>> parse_list("a") + ['a'] + >>> parse_list("a b c") + ['a', 'b', 'c'] + >>> parse_list("[a, b, c]") + ['a', 'b', 'c'] + >>> parse_list('a " " b') + ['a', ' ', 'b'] + >>> parse_list('[a, ",a", "a,", ",", ]') + ['a', ',a', 'a,', ','] + """ + if len(s) >= 2 and s[0] == '[' and s[-1] == ']': + s = s[1:-1] + delimiter = ',' + else: + delimiter = ' ' + s, literals = strip_string_literals(s) + def unquote(literal): + literal = literal.strip() + if literal[0] in "'\"": + return literals[literal[1:-1]] + else: + return literal + return [unquote(item) for item in s.split(delimiter) if item.strip()] + + +transitive_str = object() +transitive_list = object() +bool_or = object() + +distutils_settings = { + 'name': str, + 'sources': list, + 'define_macros': list, + 'undef_macros': list, + 'libraries': transitive_list, + 'library_dirs': transitive_list, + 'runtime_library_dirs': transitive_list, + 'include_dirs': transitive_list, + 'extra_objects': list, + 'extra_compile_args': transitive_list, + 'extra_link_args': transitive_list, + 'export_symbols': list, + 'depends': transitive_list, + 'language': transitive_str, + 'np_pythran': bool_or +} + + +@cython.locals(start=cython.Py_ssize_t, end=cython.Py_ssize_t) +def line_iter(source): + if isinstance(source, basestring): + start = 0 + while True: + end = source.find('\n', start) + if end == -1: + yield source[start:] + return + yield source[start:end] + start = end+1 + else: + for line in source: + yield line + + +class DistutilsInfo(object): + + def __init__(self, source=None, exn=None): + self.values = {} + if source is not None: + for line in line_iter(source): + line = line.lstrip() + if not line: + continue + if line[0] != '#': + break + line = line[1:].lstrip() + kind = next((k for k in ("distutils:","cython:") if line.startswith(k)), None) + if kind is not None: + key, _, value = [s.strip() for s in line[len(kind):].partition('=')] + type = distutils_settings.get(key, None) + if line.startswith("cython:") and type is None: continue + if type in (list, transitive_list): + value = parse_list(value) + if key == 'define_macros': + value = [tuple(macro.split('=', 1)) + if '=' in macro else (macro, None) + for macro in value] + if type is bool_or: + value = strtobool(value) + self.values[key] = value + elif exn is not None: + for key in distutils_settings: + if key in ('name', 'sources','np_pythran'): + continue + value = getattr(exn, key, None) + if value: + self.values[key] = value + + def merge(self, other): + if other is None: + return self + for key, value in other.values.items(): + type = distutils_settings[key] + if type is transitive_str and key not in self.values: + self.values[key] = value + elif type is transitive_list: + if key in self.values: + # Change a *copy* of the list (Trac #845) + all = self.values[key][:] + for v in value: + if v not in all: + all.append(v) + value = all + self.values[key] = value + elif type is bool_or: + self.values[key] = self.values.get(key, False) | value + return self + + def subs(self, aliases): + if aliases is None: + return self + resolved = DistutilsInfo() + for key, value in self.values.items(): + type = distutils_settings[key] + if type in [list, transitive_list]: + new_value_list = [] + for v in value: + if v in aliases: + v = aliases[v] + if isinstance(v, list): + new_value_list += v + else: + new_value_list.append(v) + value = new_value_list + else: + if value in aliases: + value = aliases[value] + resolved.values[key] = value + return resolved + + def apply(self, extension): + for key, value in self.values.items(): + type = distutils_settings[key] + if type in [list, transitive_list]: + value = getattr(extension, key) + list(value) + setattr(extension, key, value) + + +@cython.locals(start=cython.Py_ssize_t, q=cython.Py_ssize_t, + single_q=cython.Py_ssize_t, double_q=cython.Py_ssize_t, + hash_mark=cython.Py_ssize_t, end=cython.Py_ssize_t, + k=cython.Py_ssize_t, counter=cython.Py_ssize_t, quote_len=cython.Py_ssize_t) +def strip_string_literals(code, prefix='__Pyx_L'): + """ + Normalizes every string literal to be of the form '__Pyx_Lxxx', + returning the normalized code and a mapping of labels to + string literals. + """ + new_code = [] + literals = {} + counter = 0 + start = q = 0 + in_quote = False + hash_mark = single_q = double_q = -1 + code_len = len(code) + quote_type = quote_len = None + + while True: + if hash_mark < q: + hash_mark = code.find('#', q) + if single_q < q: + single_q = code.find("'", q) + if double_q < q: + double_q = code.find('"', q) + q = min(single_q, double_q) + if q == -1: + q = max(single_q, double_q) + + # We're done. + if q == -1 and hash_mark == -1: + new_code.append(code[start:]) + break + + # Try to close the quote. + elif in_quote: + if code[q-1] == u'\\': + k = 2 + while q >= k and code[q-k] == u'\\': + k += 1 + if k % 2 == 0: + q += 1 + continue + if code[q] == quote_type and ( + quote_len == 1 or (code_len > q + 2 and quote_type == code[q+1] == code[q+2])): + counter += 1 + label = "%s%s_" % (prefix, counter) + literals[label] = code[start+quote_len:q] + full_quote = code[q:q+quote_len] + new_code.append(full_quote) + new_code.append(label) + new_code.append(full_quote) + q += quote_len + in_quote = False + start = q + else: + q += 1 + + # Process comment. + elif -1 != hash_mark and (hash_mark < q or q == -1): + new_code.append(code[start:hash_mark+1]) + end = code.find('\n', hash_mark) + counter += 1 + label = "%s%s_" % (prefix, counter) + if end == -1: + end_or_none = None + else: + end_or_none = end + literals[label] = code[hash_mark+1:end_or_none] + new_code.append(label) + if end == -1: + break + start = q = end + + # Open the quote. + else: + if code_len >= q+3 and (code[q] == code[q+1] == code[q+2]): + quote_len = 3 + else: + quote_len = 1 + in_quote = True + quote_type = code[q] + new_code.append(code[start:q]) + start = q + q += quote_len + + return "".join(new_code), literals + + +# We need to allow spaces to allow for conditional compilation like +# IF ...: +# cimport ... +dependency_regex = re.compile(r"(?:^\s*from +([0-9a-zA-Z_.]+) +cimport)|" + r"(?:^\s*cimport +([0-9a-zA-Z_.]+(?: *, *[0-9a-zA-Z_.]+)*))|" + r"(?:^\s*cdef +extern +from +['\"]([^'\"]+)['\"])|" + r"(?:^\s*include +['\"]([^'\"]+)['\"])", re.M) +dependency_after_from_regex = re.compile( + r"(?:^\s+\(([0-9a-zA-Z_., ]*)\)[#\n])|" + r"(?:^\s+([0-9a-zA-Z_., ]*)[#\n])", + re.M) + + +def normalize_existing(base_path, rel_paths): + return normalize_existing0(os.path.dirname(base_path), tuple(set(rel_paths))) + + +@cached_function +def normalize_existing0(base_dir, rel_paths): + """ + Given some base directory ``base_dir`` and a list of path names + ``rel_paths``, normalize each relative path name ``rel`` by + replacing it by ``os.path.join(base, rel)`` if that file exists. + + Return a couple ``(normalized, needed_base)`` where ``normalized`` + if the list of normalized file names and ``needed_base`` is + ``base_dir`` if we actually needed ``base_dir``. If no paths were + changed (for example, if all paths were already absolute), then + ``needed_base`` is ``None``. + """ + normalized = [] + needed_base = None + for rel in rel_paths: + if os.path.isabs(rel): + normalized.append(rel) + continue + path = join_path(base_dir, rel) + if path_exists(path): + normalized.append(os.path.normpath(path)) + needed_base = base_dir + else: + normalized.append(rel) + return (normalized, needed_base) + + +def resolve_depends(depends, include_dirs): + include_dirs = tuple(include_dirs) + resolved = [] + for depend in depends: + path = resolve_depend(depend, include_dirs) + if path is not None: + resolved.append(path) + return resolved + + +@cached_function +def resolve_depend(depend, include_dirs): + if depend[0] == '<' and depend[-1] == '>': + return None + for dir in include_dirs: + path = join_path(dir, depend) + if path_exists(path): + return os.path.normpath(path) + return None + + +@cached_function +def package(filename): + dir = os.path.dirname(os.path.abspath(str(filename))) + if dir != filename and is_package_dir(dir): + return package(dir) + (os.path.basename(dir),) + else: + return () + + +@cached_function +def fully_qualified_name(filename): + module = os.path.splitext(os.path.basename(filename))[0] + return '.'.join(package(filename) + (module,)) + + +@cached_function +def parse_dependencies(source_filename): + # Actual parsing is way too slow, so we use regular expressions. + # The only catch is that we must strip comments and string + # literals ahead of time. + with Utils.open_source_file(source_filename, error_handling='ignore') as fh: + source = fh.read() + distutils_info = DistutilsInfo(source) + source, literals = strip_string_literals(source) + source = source.replace('\\\n', ' ').replace('\t', ' ') + + # TODO: pure mode + cimports = [] + includes = [] + externs = [] + for m in dependency_regex.finditer(source): + cimport_from, cimport_list, extern, include = m.groups() + if cimport_from: + cimports.append(cimport_from) + m_after_from = dependency_after_from_regex.search(source, pos=m.end()) + if m_after_from: + multiline, one_line = m_after_from.groups() + subimports = multiline or one_line + cimports.extend("{0}.{1}".format(cimport_from, s.strip()) + for s in subimports.split(',')) + + elif cimport_list: + cimports.extend(x.strip() for x in cimport_list.split(",")) + elif extern: + externs.append(literals[extern]) + else: + includes.append(literals[include]) + return cimports, includes, externs, distutils_info + + +class DependencyTree(object): + + def __init__(self, context, quiet=False): + self.context = context + self.quiet = quiet + self._transitive_cache = {} + + def parse_dependencies(self, source_filename): + if path_exists(source_filename): + source_filename = os.path.normpath(source_filename) + return parse_dependencies(source_filename) + + @cached_method + def included_files(self, filename): + # This is messy because included files are textually included, resolving + # cimports (but not includes) relative to the including file. + all = set() + for include in self.parse_dependencies(filename)[1]: + include_path = join_path(os.path.dirname(filename), include) + if not path_exists(include_path): + include_path = self.context.find_include_file(include, None) + if include_path: + if '.' + os.path.sep in include_path: + include_path = os.path.normpath(include_path) + all.add(include_path) + all.update(self.included_files(include_path)) + elif not self.quiet: + print("Unable to locate '%s' referenced from '%s'" % (filename, include)) + return all + + @cached_method + def cimports_externs_incdirs(self, filename): + # This is really ugly. Nested cimports are resolved with respect to the + # includer, but includes are resolved with respect to the includee. + cimports, includes, externs = self.parse_dependencies(filename)[:3] + cimports = set(cimports) + externs = set(externs) + incdirs = set() + for include in self.included_files(filename): + included_cimports, included_externs, included_incdirs = self.cimports_externs_incdirs(include) + cimports.update(included_cimports) + externs.update(included_externs) + incdirs.update(included_incdirs) + externs, incdir = normalize_existing(filename, externs) + if incdir: + incdirs.add(incdir) + return tuple(cimports), externs, incdirs + + def cimports(self, filename): + return self.cimports_externs_incdirs(filename)[0] + + def package(self, filename): + return package(filename) + + def fully_qualified_name(self, filename): + return fully_qualified_name(filename) + + @cached_method + def find_pxd(self, module, filename=None): + is_relative = module[0] == '.' + if is_relative and not filename: + raise NotImplementedError("New relative imports.") + if filename is not None: + module_path = module.split('.') + if is_relative: + module_path.pop(0) # just explicitly relative + package_path = list(self.package(filename)) + while module_path and not module_path[0]: + try: + package_path.pop() + except IndexError: + return None # FIXME: error? + module_path.pop(0) + relative = '.'.join(package_path + module_path) + pxd = self.context.find_pxd_file(relative, None) + if pxd: + return pxd + if is_relative: + return None # FIXME: error? + return self.context.find_pxd_file(module, None) + + @cached_method + def cimported_files(self, filename): + if filename[-4:] == '.pyx' and path_exists(filename[:-4] + '.pxd'): + pxd_list = [filename[:-4] + '.pxd'] + else: + pxd_list = [] + # Cimports generates all possible combinations package.module + # when imported as from package cimport module. + for module in self.cimports(filename): + if module[:7] == 'cython.' or module == 'cython': + continue + pxd_file = self.find_pxd(module, filename) + if pxd_file is not None: + pxd_list.append(pxd_file) + return tuple(pxd_list) + + @cached_method + def immediate_dependencies(self, filename): + all = set([filename]) + all.update(self.cimported_files(filename)) + all.update(self.included_files(filename)) + return all + + def all_dependencies(self, filename): + return self.transitive_merge(filename, self.immediate_dependencies, set.union) + + @cached_method + def timestamp(self, filename): + return os.path.getmtime(filename) + + def extract_timestamp(self, filename): + return self.timestamp(filename), filename + + def newest_dependency(self, filename): + return max([self.extract_timestamp(f) for f in self.all_dependencies(filename)]) + + def transitive_fingerprint(self, filename, module, compilation_options): + r""" + Return a fingerprint of a cython file that is about to be cythonized. + + Fingerprints are looked up in future compilations. If the fingerprint + is found, the cythonization can be skipped. The fingerprint must + incorporate everything that has an influence on the generated code. + """ + try: + m = hashlib.md5(__version__.encode('UTF-8')) + m.update(file_hash(filename).encode('UTF-8')) + for x in sorted(self.all_dependencies(filename)): + if os.path.splitext(x)[1] not in ('.c', '.cpp', '.h'): + m.update(file_hash(x).encode('UTF-8')) + # Include the module attributes that change the compilation result + # in the fingerprint. We do not iterate over module.__dict__ and + # include almost everything here as users might extend Extension + # with arbitrary (random) attributes that would lead to cache + # misses. + m.update(str(( + module.language, + getattr(module, 'py_limited_api', False), + getattr(module, 'np_pythran', False) + )).encode('UTF-8')) + + m.update(compilation_options.get_fingerprint().encode('UTF-8')) + return m.hexdigest() + except IOError: + return None + + def distutils_info0(self, filename): + info = self.parse_dependencies(filename)[3] + kwds = info.values + cimports, externs, incdirs = self.cimports_externs_incdirs(filename) + basedir = os.getcwd() + # Add dependencies on "cdef extern from ..." files + if externs: + externs = _make_relative(externs, basedir) + if 'depends' in kwds: + kwds['depends'] = list(set(kwds['depends']).union(externs)) + else: + kwds['depends'] = list(externs) + # Add include_dirs to ensure that the C compiler will find the + # "cdef extern from ..." files + if incdirs: + include_dirs = list(kwds.get('include_dirs', [])) + for inc in _make_relative(incdirs, basedir): + if inc not in include_dirs: + include_dirs.append(inc) + kwds['include_dirs'] = include_dirs + return info + + def distutils_info(self, filename, aliases=None, base=None): + return (self.transitive_merge(filename, self.distutils_info0, DistutilsInfo.merge) + .subs(aliases) + .merge(base)) + + def transitive_merge(self, node, extract, merge): + try: + seen = self._transitive_cache[extract, merge] + except KeyError: + seen = self._transitive_cache[extract, merge] = {} + return self.transitive_merge_helper( + node, extract, merge, seen, {}, self.cimported_files)[0] + + def transitive_merge_helper(self, node, extract, merge, seen, stack, outgoing): + if node in seen: + return seen[node], None + deps = extract(node) + if node in stack: + return deps, node + try: + stack[node] = len(stack) + loop = None + for next in outgoing(node): + sub_deps, sub_loop = self.transitive_merge_helper(next, extract, merge, seen, stack, outgoing) + if sub_loop is not None: + if loop is not None and stack[loop] < stack[sub_loop]: + pass + else: + loop = sub_loop + deps = merge(deps, sub_deps) + if loop == node: + loop = None + if loop is None: + seen[node] = deps + return deps, loop + finally: + del stack[node] + + +_dep_tree = None + +def create_dependency_tree(ctx=None, quiet=False): + global _dep_tree + if _dep_tree is None: + if ctx is None: + ctx = Context(["."], CompilationOptions(default_options)) + _dep_tree = DependencyTree(ctx, quiet=quiet) + return _dep_tree + + +# If this changes, change also docs/src/reference/compilation.rst +# which mentions this function +def default_create_extension(template, kwds): + if 'depends' in kwds: + include_dirs = kwds.get('include_dirs', []) + ["."] + depends = resolve_depends(kwds['depends'], include_dirs) + kwds['depends'] = sorted(set(depends + template.depends)) + + t = template.__class__ + ext = t(**kwds) + metadata = dict(distutils=kwds, module_name=kwds['name']) + return (ext, metadata) + + +# This may be useful for advanced users? +def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=False, language=None, + exclude_failures=False): + if language is not None: + print('Warning: passing language={0!r} to cythonize() is deprecated. ' + 'Instead, put "# distutils: language={0}" in your .pyx or .pxd file(s)'.format(language)) + if exclude is None: + exclude = [] + if patterns is None: + return [], {} + elif isinstance(patterns, basestring) or not isinstance(patterns, Iterable): + patterns = [patterns] + explicit_modules = set([m.name for m in patterns if isinstance(m, Extension)]) + seen = set() + deps = create_dependency_tree(ctx, quiet=quiet) + to_exclude = set() + if not isinstance(exclude, list): + exclude = [exclude] + for pattern in exclude: + to_exclude.update(map(os.path.abspath, extended_iglob(pattern))) + + module_list = [] + module_metadata = {} + + # workaround for setuptools + if 'setuptools' in sys.modules: + Extension_distutils = sys.modules['setuptools.extension']._Extension + Extension_setuptools = sys.modules['setuptools'].Extension + else: + # dummy class, in case we do not have setuptools + Extension_distutils = Extension + class Extension_setuptools(Extension): pass + + # if no create_extension() function is defined, use a simple + # default function. + create_extension = ctx.options.create_extension or default_create_extension + + for pattern in patterns: + if isinstance(pattern, str): + filepattern = pattern + template = Extension(pattern, []) # Fake Extension without sources + name = '*' + base = None + ext_language = language + elif isinstance(pattern, (Extension_distutils, Extension_setuptools)): + cython_sources = [s for s in pattern.sources + if os.path.splitext(s)[1] in ('.py', '.pyx')] + if cython_sources: + filepattern = cython_sources[0] + if len(cython_sources) > 1: + print("Warning: Multiple cython sources found for extension '%s': %s\n" + "See http://cython.readthedocs.io/en/latest/src/userguide/sharing_declarations.html " + "for sharing declarations among Cython files." % (pattern.name, cython_sources)) + else: + # ignore non-cython modules + module_list.append(pattern) + continue + template = pattern + name = template.name + base = DistutilsInfo(exn=template) + ext_language = None # do not override whatever the Extension says + else: + msg = str("pattern is not of type str nor subclass of Extension (%s)" + " but of type %s and class %s" % (repr(Extension), + type(pattern), + pattern.__class__)) + raise TypeError(msg) + + for file in nonempty(sorted(extended_iglob(filepattern)), "'%s' doesn't match any files" % filepattern): + if os.path.abspath(file) in to_exclude: + continue + module_name = deps.fully_qualified_name(file) + if '*' in name: + if module_name in explicit_modules: + continue + elif name: + module_name = name + + Utils.raise_error_if_module_name_forbidden(module_name) + + if module_name not in seen: + try: + kwds = deps.distutils_info(file, aliases, base).values + except Exception: + if exclude_failures: + continue + raise + if base is not None: + for key, value in base.values.items(): + if key not in kwds: + kwds[key] = value + + kwds['name'] = module_name + + sources = [file] + [m for m in template.sources if m != filepattern] + if 'sources' in kwds: + # allow users to add .c files etc. + for source in kwds['sources']: + source = encode_filename_in_py2(source) + if source not in sources: + sources.append(source) + kwds['sources'] = sources + + if ext_language and 'language' not in kwds: + kwds['language'] = ext_language + + np_pythran = kwds.pop('np_pythran', False) + + # Create the new extension + m, metadata = create_extension(template, kwds) + m.np_pythran = np_pythran or getattr(m, 'np_pythran', False) + if m.np_pythran: + update_pythran_extension(m) + module_list.append(m) + + # Store metadata (this will be written as JSON in the + # generated C file but otherwise has no purpose) + module_metadata[module_name] = metadata + + if file not in m.sources: + # Old setuptools unconditionally replaces .pyx with .c/.cpp + target_file = os.path.splitext(file)[0] + ('.cpp' if m.language == 'c++' else '.c') + try: + m.sources.remove(target_file) + except ValueError: + # never seen this in the wild, but probably better to warn about this unexpected case + print("Warning: Cython source file not found in sources list, adding %s" % file) + m.sources.insert(0, file) + seen.add(name) + return module_list, module_metadata + + +# This is the user-exposed entry point. +def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False, force=False, language=None, + exclude_failures=False, **options): + """ + Compile a set of source modules into C/C++ files and return a list of distutils + Extension objects for them. + + :param module_list: As module list, pass either a glob pattern, a list of glob + patterns or a list of Extension objects. The latter + allows you to configure the extensions separately + through the normal distutils options. + You can also pass Extension objects that have + glob patterns as their sources. Then, cythonize + will resolve the pattern and create a + copy of the Extension for every matching file. + + :param exclude: When passing glob patterns as ``module_list``, you can exclude certain + module names explicitly by passing them into the ``exclude`` option. + + :param nthreads: The number of concurrent builds for parallel compilation + (requires the ``multiprocessing`` module). + + :param aliases: If you want to use compiler directives like ``# distutils: ...`` but + can only know at compile time (when running the ``setup.py``) which values + to use, you can use aliases and pass a dictionary mapping those aliases + to Python strings when calling :func:`cythonize`. As an example, say you + want to use the compiler + directive ``# distutils: include_dirs = ../static_libs/include/`` + but this path isn't always fixed and you want to find it when running + the ``setup.py``. You can then do ``# distutils: include_dirs = MY_HEADERS``, + find the value of ``MY_HEADERS`` in the ``setup.py``, put it in a python + variable called ``foo`` as a string, and then call + ``cythonize(..., aliases={'MY_HEADERS': foo})``. + + :param quiet: If True, Cython won't print error, warning, or status messages during the + compilation. + + :param force: Forces the recompilation of the Cython modules, even if the timestamps + don't indicate that a recompilation is necessary. + + :param language: To globally enable C++ mode, you can pass ``language='c++'``. Otherwise, this + will be determined at a per-file level based on compiler directives. This + affects only modules found based on file names. Extension instances passed + into :func:`cythonize` will not be changed. It is recommended to rather + use the compiler directive ``# distutils: language = c++`` than this option. + + :param exclude_failures: For a broad 'try to compile' mode that ignores compilation + failures and simply excludes the failed extensions, + pass ``exclude_failures=True``. Note that this only + really makes sense for compiling ``.py`` files which can also + be used without compilation. + + :param annotate: If ``True``, will produce a HTML file for each of the ``.pyx`` or ``.py`` + files compiled. The HTML file gives an indication + of how much Python interaction there is in + each of the source code lines, compared to plain C code. + It also allows you to see the C/C++ code + generated for each line of Cython code. This report is invaluable when + optimizing a function for speed, + and for determining when to :ref:`release the GIL `: + in general, a ``nogil`` block may contain only "white" code. + See examples in :ref:`determining_where_to_add_types` or + :ref:`primes`. + + :param compiler_directives: Allow to set compiler directives in the ``setup.py`` like this: + ``compiler_directives={'embedsignature': True}``. + See :ref:`compiler-directives`. + """ + if exclude is None: + exclude = [] + if 'include_path' not in options: + options['include_path'] = ['.'] + if 'common_utility_include_dir' in options: + safe_makedirs(options['common_utility_include_dir']) + + if pythran is None: + pythran_options = None + else: + pythran_options = CompilationOptions(**options) + pythran_options.cplus = True + pythran_options.np_pythran = True + + c_options = CompilationOptions(**options) + cpp_options = CompilationOptions(**options); cpp_options.cplus = True + ctx = c_options.create_context() + options = c_options + module_list, module_metadata = create_extension_list( + module_list, + exclude=exclude, + ctx=ctx, + quiet=quiet, + exclude_failures=exclude_failures, + language=language, + aliases=aliases) + deps = create_dependency_tree(ctx, quiet=quiet) + build_dir = getattr(options, 'build_dir', None) + + def copy_to_build_dir(filepath, root=os.getcwd()): + filepath_abs = os.path.abspath(filepath) + if os.path.isabs(filepath): + filepath = filepath_abs + if filepath_abs.startswith(root): + # distutil extension depends are relative to cwd + mod_dir = join_path(build_dir, + os.path.dirname(_relpath(filepath, root))) + copy_once_if_newer(filepath_abs, mod_dir) + + modules_by_cfile = collections.defaultdict(list) + to_compile = [] + for m in module_list: + if build_dir: + for dep in m.depends: + copy_to_build_dir(dep) + + cy_sources = [ + source for source in m.sources + if os.path.splitext(source)[1] in ('.pyx', '.py')] + if len(cy_sources) == 1: + # normal "special" case: believe the Extension module name to allow user overrides + full_module_name = m.name + else: + # infer FQMN from source files + full_module_name = None + + new_sources = [] + for source in m.sources: + base, ext = os.path.splitext(source) + if ext in ('.pyx', '.py'): + if m.np_pythran: + c_file = base + '.cpp' + options = pythran_options + elif m.language == 'c++': + c_file = base + '.cpp' + options = cpp_options + else: + c_file = base + '.c' + options = c_options + + # setup for out of place build directory if enabled + if build_dir: + if os.path.isabs(c_file): + warnings.warn("build_dir has no effect for absolute source paths") + c_file = os.path.join(build_dir, c_file) + dir = os.path.dirname(c_file) + safe_makedirs_once(dir) + + if os.path.exists(c_file): + c_timestamp = os.path.getmtime(c_file) + else: + c_timestamp = -1 + + # Priority goes first to modified files, second to direct + # dependents, and finally to indirect dependents. + if c_timestamp < deps.timestamp(source): + dep_timestamp, dep = deps.timestamp(source), source + priority = 0 + else: + dep_timestamp, dep = deps.newest_dependency(source) + priority = 2 - (dep in deps.immediate_dependencies(source)) + if force or c_timestamp < dep_timestamp: + if not quiet and not force: + if source == dep: + print("Compiling %s because it changed." % source) + else: + print("Compiling %s because it depends on %s." % (source, dep)) + if not force and options.cache: + fingerprint = deps.transitive_fingerprint(source, m, options) + else: + fingerprint = None + to_compile.append(( + priority, source, c_file, fingerprint, quiet, + options, not exclude_failures, module_metadata.get(m.name), + full_module_name)) + new_sources.append(c_file) + modules_by_cfile[c_file].append(m) + else: + new_sources.append(source) + if build_dir: + copy_to_build_dir(source) + m.sources = new_sources + + if options.cache: + if not os.path.exists(options.cache): + os.makedirs(options.cache) + to_compile.sort() + # Drop "priority" component of "to_compile" entries and add a + # simple progress indicator. + N = len(to_compile) + progress_fmt = "[{0:%d}/{1}] " % len(str(N)) + for i in range(N): + progress = progress_fmt.format(i+1, N) + to_compile[i] = to_compile[i][1:] + (progress,) + + if N <= 1: + nthreads = 0 + if nthreads: + # Requires multiprocessing (or Python >= 2.6) + try: + import multiprocessing + pool = multiprocessing.Pool( + nthreads, initializer=_init_multiprocessing_helper) + except (ImportError, OSError): + print("multiprocessing required for parallel cythonization") + nthreads = 0 + else: + # This is a bit more involved than it should be, because KeyboardInterrupts + # break the multiprocessing workers when using a normal pool.map(). + # See, for example: + # http://noswap.com/blog/python-multiprocessing-keyboardinterrupt + try: + result = pool.map_async(cythonize_one_helper, to_compile, chunksize=1) + pool.close() + while not result.ready(): + try: + result.get(99999) # seconds + except multiprocessing.TimeoutError: + pass + except KeyboardInterrupt: + pool.terminate() + raise + pool.join() + if not nthreads: + for args in to_compile: + cythonize_one(*args) + + if exclude_failures: + failed_modules = set() + for c_file, modules in modules_by_cfile.items(): + if not os.path.exists(c_file): + failed_modules.update(modules) + elif os.path.getsize(c_file) < 200: + f = io_open(c_file, 'r', encoding='iso8859-1') + try: + if f.read(len('#error ')) == '#error ': + # dead compilation result + failed_modules.update(modules) + finally: + f.close() + if failed_modules: + for module in failed_modules: + module_list.remove(module) + print("Failed compilations: %s" % ', '.join(sorted([ + module.name for module in failed_modules]))) + + if options.cache: + cleanup_cache(options.cache, getattr(options, 'cache_size', 1024 * 1024 * 100)) + # cythonize() is often followed by the (non-Python-buffered) + # compiler output, flush now to avoid interleaving output. + sys.stdout.flush() + return module_list + + +if os.environ.get('XML_RESULTS'): + compile_result_dir = os.environ['XML_RESULTS'] + def record_results(func): + def with_record(*args): + t = time.time() + success = True + try: + try: + func(*args) + except: + success = False + finally: + t = time.time() - t + module = fully_qualified_name(args[0]) + name = "cythonize." + module + failures = 1 - success + if success: + failure_item = "" + else: + failure_item = "failure" + output = open(os.path.join(compile_result_dir, name + ".xml"), "w") + output.write(""" + + + + %(failure_item)s + + + """.strip() % locals()) + output.close() + return with_record +else: + def record_results(func): + return func + + +# TODO: Share context? Issue: pyx processing leaks into pxd module +@record_results +def cythonize_one(pyx_file, c_file, fingerprint, quiet, options=None, + raise_on_failure=True, embedded_metadata=None, full_module_name=None, + progress=""): + from ..Compiler.Main import compile_single, default_options + from ..Compiler.Errors import CompileError, PyrexError + + if fingerprint: + if not os.path.exists(options.cache): + safe_makedirs(options.cache) + # Cython-generated c files are highly compressible. + # (E.g. a compression ratio of about 10 for Sage). + fingerprint_file_base = join_path( + options.cache, "%s-%s" % (os.path.basename(c_file), fingerprint)) + gz_fingerprint_file = fingerprint_file_base + gzip_ext + zip_fingerprint_file = fingerprint_file_base + '.zip' + if os.path.exists(gz_fingerprint_file) or os.path.exists(zip_fingerprint_file): + if not quiet: + print("%sFound compiled %s in cache" % (progress, pyx_file)) + if os.path.exists(gz_fingerprint_file): + os.utime(gz_fingerprint_file, None) + with contextlib.closing(gzip_open(gz_fingerprint_file, 'rb')) as g: + with contextlib.closing(open(c_file, 'wb')) as f: + shutil.copyfileobj(g, f) + else: + os.utime(zip_fingerprint_file, None) + dirname = os.path.dirname(c_file) + with contextlib.closing(zipfile.ZipFile(zip_fingerprint_file)) as z: + for artifact in z.namelist(): + z.extract(artifact, os.path.join(dirname, artifact)) + return + if not quiet: + print("%sCythonizing %s" % (progress, pyx_file)) + if options is None: + options = CompilationOptions(default_options) + options.output_file = c_file + options.embedded_metadata = embedded_metadata + + any_failures = 0 + try: + result = compile_single(pyx_file, options, full_module_name=full_module_name) + if result.num_errors > 0: + any_failures = 1 + except (EnvironmentError, PyrexError) as e: + sys.stderr.write('%s\n' % e) + any_failures = 1 + # XXX + import traceback + traceback.print_exc() + except Exception: + if raise_on_failure: + raise + import traceback + traceback.print_exc() + any_failures = 1 + if any_failures: + if raise_on_failure: + raise CompileError(None, pyx_file) + elif os.path.exists(c_file): + os.remove(c_file) + elif fingerprint: + artifacts = list(filter(None, [ + getattr(result, attr, None) + for attr in ('c_file', 'h_file', 'api_file', 'i_file')])) + if len(artifacts) == 1: + fingerprint_file = gz_fingerprint_file + with contextlib.closing(open(c_file, 'rb')) as f: + with contextlib.closing(gzip_open(fingerprint_file + '.tmp', 'wb')) as g: + shutil.copyfileobj(f, g) + else: + fingerprint_file = zip_fingerprint_file + with contextlib.closing(zipfile.ZipFile( + fingerprint_file + '.tmp', 'w', zipfile_compression_mode)) as zip: + for artifact in artifacts: + zip.write(artifact, os.path.basename(artifact)) + os.rename(fingerprint_file + '.tmp', fingerprint_file) + + +def cythonize_one_helper(m): + import traceback + try: + return cythonize_one(*m) + except Exception: + traceback.print_exc() + raise + + +def _init_multiprocessing_helper(): + # KeyboardInterrupt kills workers, so don't let them get it + import signal + signal.signal(signal.SIGINT, signal.SIG_IGN) + + +def cleanup_cache(cache, target_size, ratio=.85): + try: + p = subprocess.Popen(['du', '-s', '-k', os.path.abspath(cache)], stdout=subprocess.PIPE) + res = p.wait() + if res == 0: + total_size = 1024 * int(p.stdout.read().strip().split()[0]) + if total_size < target_size: + return + except (OSError, ValueError): + pass + total_size = 0 + all = [] + for file in os.listdir(cache): + path = join_path(cache, file) + s = os.stat(path) + total_size += s.st_size + all.append((s.st_atime, s.st_size, path)) + if total_size > target_size: + for time, size, file in reversed(sorted(all)): + os.unlink(file) + total_size -= size + if total_size < target_size * ratio: + break diff --git a/venv/lib/python3.8/site-packages/Cython/Build/Distutils.py b/venv/lib/python3.8/site-packages/Cython/Build/Distutils.py new file mode 100644 index 0000000..3efcc0d --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Build/Distutils.py @@ -0,0 +1 @@ +from Cython.Distutils.build_ext import build_ext diff --git a/venv/lib/python3.8/site-packages/Cython/Build/Inline.py b/venv/lib/python3.8/site-packages/Cython/Build/Inline.py new file mode 100644 index 0000000..fdd38e2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Build/Inline.py @@ -0,0 +1,369 @@ +from __future__ import absolute_import + +import sys, os, re, inspect +import imp + +try: + import hashlib +except ImportError: + import md5 as hashlib + +from distutils.core import Distribution, Extension +from distutils.command.build_ext import build_ext + +import Cython +from ..Compiler.Main import Context, CompilationOptions, default_options + +from ..Compiler.ParseTreeTransforms import (CythonTransform, + SkipDeclarations, AnalyseDeclarationsTransform, EnvTransform) +from ..Compiler.TreeFragment import parse_from_strings +from ..Compiler.StringEncoding import _unicode +from .Dependencies import strip_string_literals, cythonize, cached_function +from ..Compiler import Pipeline, Nodes +from ..Utils import get_cython_cache_dir +import cython as cython_module + +IS_PY3 = sys.version_info >= (3, 0) + +# A utility function to convert user-supplied ASCII strings to unicode. +if sys.version_info[0] < 3: + def to_unicode(s): + if isinstance(s, bytes): + return s.decode('ascii') + else: + return s +else: + to_unicode = lambda x: x + +if sys.version_info[:2] < (3, 3): + import imp + def load_dynamic(name, module_path): + return imp.load_dynamic(name, module_path) +else: + from importlib.machinery import ExtensionFileLoader + def load_dynamic(name, module_path): + return ExtensionFileLoader(name, module_path).load_module() + +class UnboundSymbols(EnvTransform, SkipDeclarations): + def __init__(self): + CythonTransform.__init__(self, None) + self.unbound = set() + def visit_NameNode(self, node): + if not self.current_env().lookup(node.name): + self.unbound.add(node.name) + return node + def __call__(self, node): + super(UnboundSymbols, self).__call__(node) + return self.unbound + + +@cached_function +def unbound_symbols(code, context=None): + code = to_unicode(code) + if context is None: + context = Context([], default_options) + from ..Compiler.ParseTreeTransforms import AnalyseDeclarationsTransform + tree = parse_from_strings('(tree fragment)', code) + for phase in Pipeline.create_pipeline(context, 'pyx'): + if phase is None: + continue + tree = phase(tree) + if isinstance(phase, AnalyseDeclarationsTransform): + break + try: + import builtins + except ImportError: + import __builtin__ as builtins + return tuple(UnboundSymbols()(tree) - set(dir(builtins))) + + +def unsafe_type(arg, context=None): + py_type = type(arg) + if py_type is int: + return 'long' + else: + return safe_type(arg, context) + + +def safe_type(arg, context=None): + py_type = type(arg) + if py_type in (list, tuple, dict, str): + return py_type.__name__ + elif py_type is complex: + return 'double complex' + elif py_type is float: + return 'double' + elif py_type is bool: + return 'bint' + elif 'numpy' in sys.modules and isinstance(arg, sys.modules['numpy'].ndarray): + return 'numpy.ndarray[numpy.%s_t, ndim=%s]' % (arg.dtype.name, arg.ndim) + else: + for base_type in py_type.__mro__: + if base_type.__module__ in ('__builtin__', 'builtins'): + return 'object' + module = context.find_module(base_type.__module__, need_pxd=False) + if module: + entry = module.lookup(base_type.__name__) + if entry.is_type: + return '%s.%s' % (base_type.__module__, base_type.__name__) + return 'object' + + +def _get_build_extension(): + dist = Distribution() + # Ensure the build respects distutils configuration by parsing + # the configuration files + config_files = dist.find_config_files() + dist.parse_config_files(config_files) + build_extension = build_ext(dist) + build_extension.finalize_options() + return build_extension + + +@cached_function +def _create_context(cython_include_dirs): + return Context(list(cython_include_dirs), default_options) + + +_cython_inline_cache = {} +_cython_inline_default_context = _create_context(('.',)) + +def _populate_unbound(kwds, unbound_symbols, locals=None, globals=None): + for symbol in unbound_symbols: + if symbol not in kwds: + if locals is None or globals is None: + calling_frame = inspect.currentframe().f_back.f_back.f_back + if locals is None: + locals = calling_frame.f_locals + if globals is None: + globals = calling_frame.f_globals + if symbol in locals: + kwds[symbol] = locals[symbol] + elif symbol in globals: + kwds[symbol] = globals[symbol] + else: + print("Couldn't find %r" % symbol) + +def _inline_key(orig_code, arg_sigs, language_level): + key = orig_code, arg_sigs, sys.version_info, sys.executable, language_level, Cython.__version__ + return hashlib.sha1(_unicode(key).encode('utf-8')).hexdigest() + +def cython_inline(code, get_type=unsafe_type, + lib_dir=os.path.join(get_cython_cache_dir(), 'inline'), + cython_include_dirs=None, cython_compiler_directives=None, + force=False, quiet=False, locals=None, globals=None, language_level=None, **kwds): + + if get_type is None: + get_type = lambda x: 'object' + ctx = _create_context(tuple(cython_include_dirs)) if cython_include_dirs else _cython_inline_default_context + + cython_compiler_directives = dict(cython_compiler_directives or {}) + if language_level is None and 'language_level' not in cython_compiler_directives: + language_level = '3str' + if language_level is not None: + cython_compiler_directives['language_level'] = language_level + + # Fast path if this has been called in this session. + _unbound_symbols = _cython_inline_cache.get(code) + if _unbound_symbols is not None: + _populate_unbound(kwds, _unbound_symbols, locals, globals) + args = sorted(kwds.items()) + arg_sigs = tuple([(get_type(value, ctx), arg) for arg, value in args]) + key_hash = _inline_key(code, arg_sigs, language_level) + invoke = _cython_inline_cache.get((code, arg_sigs, key_hash)) + if invoke is not None: + arg_list = [arg[1] for arg in args] + return invoke(*arg_list) + + orig_code = code + code = to_unicode(code) + code, literals = strip_string_literals(code) + code = strip_common_indent(code) + if locals is None: + locals = inspect.currentframe().f_back.f_back.f_locals + if globals is None: + globals = inspect.currentframe().f_back.f_back.f_globals + try: + _cython_inline_cache[orig_code] = _unbound_symbols = unbound_symbols(code) + _populate_unbound(kwds, _unbound_symbols, locals, globals) + except AssertionError: + if not quiet: + # Parsing from strings not fully supported (e.g. cimports). + print("Could not parse code as a string (to extract unbound symbols).") + + cimports = [] + for name, arg in list(kwds.items()): + if arg is cython_module: + cimports.append('\ncimport cython as %s' % name) + del kwds[name] + arg_names = sorted(kwds) + arg_sigs = tuple([(get_type(kwds[arg], ctx), arg) for arg in arg_names]) + key_hash = _inline_key(orig_code, arg_sigs, language_level) + module_name = "_cython_inline_" + key_hash + + if module_name in sys.modules: + module = sys.modules[module_name] + + else: + build_extension = None + if cython_inline.so_ext is None: + # Figure out and cache current extension suffix + build_extension = _get_build_extension() + cython_inline.so_ext = build_extension.get_ext_filename('') + + module_path = os.path.join(lib_dir, module_name + cython_inline.so_ext) + + if not os.path.exists(lib_dir): + os.makedirs(lib_dir) + if force or not os.path.isfile(module_path): + cflags = [] + c_include_dirs = [] + qualified = re.compile(r'([.\w]+)[.]') + for type, _ in arg_sigs: + m = qualified.match(type) + if m: + cimports.append('\ncimport %s' % m.groups()[0]) + # one special case + if m.groups()[0] == 'numpy': + import numpy + c_include_dirs.append(numpy.get_include()) + # cflags.append('-Wno-unused') + module_body, func_body = extract_func_code(code) + params = ', '.join(['%s %s' % a for a in arg_sigs]) + module_code = """ +%(module_body)s +%(cimports)s +def __invoke(%(params)s): +%(func_body)s + return locals() + """ % {'cimports': '\n'.join(cimports), + 'module_body': module_body, + 'params': params, + 'func_body': func_body } + for key, value in literals.items(): + module_code = module_code.replace(key, value) + pyx_file = os.path.join(lib_dir, module_name + '.pyx') + fh = open(pyx_file, 'w') + try: + fh.write(module_code) + finally: + fh.close() + extension = Extension( + name = module_name, + sources = [pyx_file], + include_dirs = c_include_dirs, + extra_compile_args = cflags) + if build_extension is None: + build_extension = _get_build_extension() + build_extension.extensions = cythonize( + [extension], + include_path=cython_include_dirs or ['.'], + compiler_directives=cython_compiler_directives, + quiet=quiet) + build_extension.build_temp = os.path.dirname(pyx_file) + build_extension.build_lib = lib_dir + build_extension.run() + + module = load_dynamic(module_name, module_path) + + _cython_inline_cache[orig_code, arg_sigs, key_hash] = module.__invoke + arg_list = [kwds[arg] for arg in arg_names] + return module.__invoke(*arg_list) + +# Cached suffix used by cython_inline above. None should get +# overridden with actual value upon the first cython_inline invocation +cython_inline.so_ext = None + +_find_non_space = re.compile('[^ ]').search + + +def strip_common_indent(code): + min_indent = None + lines = code.splitlines() + for line in lines: + match = _find_non_space(line) + if not match: + continue # blank + indent = match.start() + if line[indent] == '#': + continue # comment + if min_indent is None or min_indent > indent: + min_indent = indent + for ix, line in enumerate(lines): + match = _find_non_space(line) + if not match or not line or line[indent:indent+1] == '#': + continue + lines[ix] = line[min_indent:] + return '\n'.join(lines) + + +module_statement = re.compile(r'^((cdef +(extern|class))|cimport|(from .+ cimport)|(from .+ import +[*]))') +def extract_func_code(code): + module = [] + function = [] + current = function + code = code.replace('\t', ' ') + lines = code.split('\n') + for line in lines: + if not line.startswith(' '): + if module_statement.match(line): + current = module + else: + current = function + current.append(line) + return '\n'.join(module), ' ' + '\n '.join(function) + + +try: + from inspect import getcallargs +except ImportError: + def getcallargs(func, *arg_values, **kwd_values): + all = {} + args, varargs, kwds, defaults = inspect.getargspec(func) + if varargs is not None: + all[varargs] = arg_values[len(args):] + for name, value in zip(args, arg_values): + all[name] = value + for name, value in list(kwd_values.items()): + if name in args: + if name in all: + raise TypeError("Duplicate argument %s" % name) + all[name] = kwd_values.pop(name) + if kwds is not None: + all[kwds] = kwd_values + elif kwd_values: + raise TypeError("Unexpected keyword arguments: %s" % list(kwd_values)) + if defaults is None: + defaults = () + first_default = len(args) - len(defaults) + for ix, name in enumerate(args): + if name not in all: + if ix >= first_default: + all[name] = defaults[ix - first_default] + else: + raise TypeError("Missing argument: %s" % name) + return all + + +def get_body(source): + ix = source.index(':') + if source[:5] == 'lambda': + return "return %s" % source[ix+1:] + else: + return source[ix+1:] + + +# Lots to be done here... It would be especially cool if compiled functions +# could invoke each other quickly. +class RuntimeCompiledFunction(object): + + def __init__(self, f): + self._f = f + self._body = get_body(inspect.getsource(f)) + + def __call__(self, *args, **kwds): + all = getcallargs(self._f, *args, **kwds) + if IS_PY3: + return cython_inline(self._body, locals=self._f.__globals__, globals=self._f.__globals__, **all) + else: + return cython_inline(self._body, locals=self._f.func_globals, globals=self._f.func_globals, **all) diff --git a/venv/lib/python3.8/site-packages/Cython/Build/IpythonMagic.py b/venv/lib/python3.8/site-packages/Cython/Build/IpythonMagic.py new file mode 100644 index 0000000..7abb97e --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Build/IpythonMagic.py @@ -0,0 +1,565 @@ +# -*- coding: utf-8 -*- +""" +===================== +Cython related magics +===================== + +Magic command interface for interactive work with Cython + +.. note:: + + The ``Cython`` package needs to be installed separately. It + can be obtained using ``easy_install`` or ``pip``. + +Usage +===== + +To enable the magics below, execute ``%load_ext cython``. + +``%%cython`` + +{CYTHON_DOC} + +``%%cython_inline`` + +{CYTHON_INLINE_DOC} + +``%%cython_pyximport`` + +{CYTHON_PYXIMPORT_DOC} + +Author: +* Brian Granger + +Code moved from IPython and adapted by: +* Martín Gaitán + +Parts of this code were taken from Cython.inline. +""" +#----------------------------------------------------------------------------- +# Copyright (C) 2010-2011, IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file ipython-COPYING.rst, distributed with this software. +#----------------------------------------------------------------------------- + +from __future__ import absolute_import, print_function + +import imp +import io +import os +import re +import sys +import time +import copy +import distutils.log +import textwrap + +IO_ENCODING = sys.getfilesystemencoding() +IS_PY2 = sys.version_info[0] < 3 + +try: + reload +except NameError: # Python 3 + from imp import reload + +try: + import hashlib +except ImportError: + import md5 as hashlib + +from distutils.core import Distribution, Extension +from distutils.command.build_ext import build_ext + +from IPython.core import display +from IPython.core import magic_arguments +from IPython.core.magic import Magics, magics_class, cell_magic +try: + from IPython.paths import get_ipython_cache_dir +except ImportError: + # older IPython version + from IPython.utils.path import get_ipython_cache_dir +from IPython.utils.text import dedent + +from ..Shadow import __version__ as cython_version +from ..Compiler.Errors import CompileError +from .Inline import cython_inline +from .Dependencies import cythonize + + +PGO_CONFIG = { + 'gcc': { + 'gen': ['-fprofile-generate', '-fprofile-dir={TEMPDIR}'], + 'use': ['-fprofile-use', '-fprofile-correction', '-fprofile-dir={TEMPDIR}'], + }, + # blind copy from 'configure' script in CPython 3.7 + 'icc': { + 'gen': ['-prof-gen'], + 'use': ['-prof-use'], + } +} +PGO_CONFIG['mingw32'] = PGO_CONFIG['gcc'] + + +if IS_PY2: + def encode_fs(name): + return name if isinstance(name, bytes) else name.encode(IO_ENCODING) +else: + def encode_fs(name): + return name + + +@magics_class +class CythonMagics(Magics): + + def __init__(self, shell): + super(CythonMagics, self).__init__(shell) + self._reloads = {} + self._code_cache = {} + self._pyximport_installed = False + + def _import_all(self, module): + mdict = module.__dict__ + if '__all__' in mdict: + keys = mdict['__all__'] + else: + keys = [k for k in mdict if not k.startswith('_')] + + for k in keys: + try: + self.shell.push({k: mdict[k]}) + except KeyError: + msg = "'module' object has no attribute '%s'" % k + raise AttributeError(msg) + + @cell_magic + def cython_inline(self, line, cell): + """Compile and run a Cython code cell using Cython.inline. + + This magic simply passes the body of the cell to Cython.inline + and returns the result. If the variables `a` and `b` are defined + in the user's namespace, here is a simple example that returns + their sum:: + + %%cython_inline + return a+b + + For most purposes, we recommend the usage of the `%%cython` magic. + """ + locs = self.shell.user_global_ns + globs = self.shell.user_ns + return cython_inline(cell, locals=locs, globals=globs) + + @cell_magic + def cython_pyximport(self, line, cell): + """Compile and import a Cython code cell using pyximport. + + The contents of the cell are written to a `.pyx` file in the current + working directory, which is then imported using `pyximport`. This + magic requires a module name to be passed:: + + %%cython_pyximport modulename + def f(x): + return 2.0*x + + The compiled module is then imported and all of its symbols are + injected into the user's namespace. For most purposes, we recommend + the usage of the `%%cython` magic. + """ + module_name = line.strip() + if not module_name: + raise ValueError('module name must be given') + fname = module_name + '.pyx' + with io.open(fname, 'w', encoding='utf-8') as f: + f.write(cell) + if 'pyximport' not in sys.modules or not self._pyximport_installed: + import pyximport + pyximport.install() + self._pyximport_installed = True + if module_name in self._reloads: + module = self._reloads[module_name] + # Note: reloading extension modules is not actually supported + # (requires PEP-489 reinitialisation support). + # Don't know why this should ever have worked as it reads here. + # All we really need to do is to update the globals below. + #reload(module) + else: + __import__(module_name) + module = sys.modules[module_name] + self._reloads[module_name] = module + self._import_all(module) + + @magic_arguments.magic_arguments() + @magic_arguments.argument( + '-a', '--annotate', action='store_true', default=False, + help="Produce a colorized HTML version of the source." + ) + @magic_arguments.argument( + '-+', '--cplus', action='store_true', default=False, + help="Output a C++ rather than C file." + ) + @magic_arguments.argument( + '-3', dest='language_level', action='store_const', const=3, default=None, + help="Select Python 3 syntax." + ) + @magic_arguments.argument( + '-2', dest='language_level', action='store_const', const=2, default=None, + help="Select Python 2 syntax." + ) + @magic_arguments.argument( + '-f', '--force', action='store_true', default=False, + help="Force the compilation of a new module, even if the source has been " + "previously compiled." + ) + @magic_arguments.argument( + '-c', '--compile-args', action='append', default=[], + help="Extra flags to pass to compiler via the `extra_compile_args` " + "Extension flag (can be specified multiple times)." + ) + @magic_arguments.argument( + '--link-args', action='append', default=[], + help="Extra flags to pass to linker via the `extra_link_args` " + "Extension flag (can be specified multiple times)." + ) + @magic_arguments.argument( + '-l', '--lib', action='append', default=[], + help="Add a library to link the extension against (can be specified " + "multiple times)." + ) + @magic_arguments.argument( + '-n', '--name', + help="Specify a name for the Cython module." + ) + @magic_arguments.argument( + '-L', dest='library_dirs', metavar='dir', action='append', default=[], + help="Add a path to the list of library directories (can be specified " + "multiple times)." + ) + @magic_arguments.argument( + '-I', '--include', action='append', default=[], + help="Add a path to the list of include directories (can be specified " + "multiple times)." + ) + @magic_arguments.argument( + '-S', '--src', action='append', default=[], + help="Add a path to the list of src files (can be specified " + "multiple times)." + ) + @magic_arguments.argument( + '--pgo', dest='pgo', action='store_true', default=False, + help=("Enable profile guided optimisation in the C compiler. " + "Compiles the cell twice and executes it in between to generate a runtime profile.") + ) + @magic_arguments.argument( + '--verbose', dest='quiet', action='store_false', default=True, + help=("Print debug information like generated .c/.cpp file location " + "and exact gcc/g++ command invoked.") + ) + @cell_magic + def cython(self, line, cell): + """Compile and import everything from a Cython code cell. + + The contents of the cell are written to a `.pyx` file in the + directory `IPYTHONDIR/cython` using a filename with the hash of the + code. This file is then cythonized and compiled. The resulting module + is imported and all of its symbols are injected into the user's + namespace. The usage is similar to that of `%%cython_pyximport` but + you don't have to pass a module name:: + + %%cython + def f(x): + return 2.0*x + + To compile OpenMP codes, pass the required `--compile-args` + and `--link-args`. For example with gcc:: + + %%cython --compile-args=-fopenmp --link-args=-fopenmp + ... + + To enable profile guided optimisation, pass the ``--pgo`` option. + Note that the cell itself needs to take care of establishing a suitable + profile when executed. This can be done by implementing the functions to + optimise, and then calling them directly in the same cell on some realistic + training data like this:: + + %%cython --pgo + def critical_function(data): + for item in data: + ... + + # execute function several times to build profile + from somewhere import some_typical_data + for _ in range(100): + critical_function(some_typical_data) + + In Python 3.5 and later, you can distinguish between the profile and + non-profile runs as follows:: + + if "_pgo_" in __name__: + ... # execute critical code here + """ + args = magic_arguments.parse_argstring(self.cython, line) + code = cell if cell.endswith('\n') else cell + '\n' + lib_dir = os.path.join(get_ipython_cache_dir(), 'cython') + key = (code, line, sys.version_info, sys.executable, cython_version) + + if not os.path.exists(lib_dir): + os.makedirs(lib_dir) + + if args.pgo: + key += ('pgo',) + if args.force: + # Force a new module name by adding the current time to the + # key which is hashed to determine the module name. + key += (time.time(),) + + if args.name: + module_name = str(args.name) # no-op in Py3 + else: + module_name = "_cython_magic_" + hashlib.md5(str(key).encode('utf-8')).hexdigest() + html_file = os.path.join(lib_dir, module_name + '.html') + module_path = os.path.join(lib_dir, module_name + self.so_ext) + + have_module = os.path.isfile(module_path) + need_cythonize = args.pgo or not have_module + + if args.annotate: + if not os.path.isfile(html_file): + need_cythonize = True + + extension = None + if need_cythonize: + extensions = self._cythonize(module_name, code, lib_dir, args, quiet=args.quiet) + if extensions is None: + # Compilation failed and printed error message + return None + assert len(extensions) == 1 + extension = extensions[0] + self._code_cache[key] = module_name + + if args.pgo: + self._profile_pgo_wrapper(extension, lib_dir) + + try: + self._build_extension(extension, lib_dir, pgo_step_name='use' if args.pgo else None, + quiet=args.quiet) + except distutils.errors.CompileError: + # Build failed and printed error message + return None + + module = imp.load_dynamic(module_name, module_path) + self._import_all(module) + + if args.annotate: + try: + with io.open(html_file, encoding='utf-8') as f: + annotated_html = f.read() + except IOError as e: + # File could not be opened. Most likely the user has a version + # of Cython before 0.15.1 (when `cythonize` learned the + # `force` keyword argument) and has already compiled this + # exact source without annotation. + print('Cython completed successfully but the annotated ' + 'source could not be read.', file=sys.stderr) + print(e, file=sys.stderr) + else: + return display.HTML(self.clean_annotated_html(annotated_html)) + + def _profile_pgo_wrapper(self, extension, lib_dir): + """ + Generate a .c file for a separate extension module that calls the + module init function of the original module. This makes sure that the + PGO profiler sees the correct .o file of the final module, but it still + allows us to import the module under a different name for profiling, + before recompiling it into the PGO optimised module. Overwriting and + reimporting the same shared library is not portable. + """ + extension = copy.copy(extension) # shallow copy, do not modify sources in place! + module_name = extension.name + pgo_module_name = '_pgo_' + module_name + pgo_wrapper_c_file = os.path.join(lib_dir, pgo_module_name + '.c') + with io.open(pgo_wrapper_c_file, 'w', encoding='utf-8') as f: + f.write(textwrap.dedent(u""" + #include "Python.h" + #if PY_MAJOR_VERSION < 3 + extern PyMODINIT_FUNC init%(module_name)s(void); + PyMODINIT_FUNC init%(pgo_module_name)s(void); /*proto*/ + PyMODINIT_FUNC init%(pgo_module_name)s(void) { + PyObject *sys_modules; + init%(module_name)s(); if (PyErr_Occurred()) return; + sys_modules = PyImport_GetModuleDict(); /* borrowed, no exception, "never" fails */ + if (sys_modules) { + PyObject *module = PyDict_GetItemString(sys_modules, "%(module_name)s"); if (!module) return; + PyDict_SetItemString(sys_modules, "%(pgo_module_name)s", module); + Py_DECREF(module); + } + } + #else + extern PyMODINIT_FUNC PyInit_%(module_name)s(void); + PyMODINIT_FUNC PyInit_%(pgo_module_name)s(void); /*proto*/ + PyMODINIT_FUNC PyInit_%(pgo_module_name)s(void) { + return PyInit_%(module_name)s(); + } + #endif + """ % {'module_name': module_name, 'pgo_module_name': pgo_module_name})) + + extension.sources = extension.sources + [pgo_wrapper_c_file] # do not modify in place! + extension.name = pgo_module_name + + self._build_extension(extension, lib_dir, pgo_step_name='gen') + + # import and execute module code to generate profile + so_module_path = os.path.join(lib_dir, pgo_module_name + self.so_ext) + imp.load_dynamic(pgo_module_name, so_module_path) + + def _cythonize(self, module_name, code, lib_dir, args, quiet=True): + pyx_file = os.path.join(lib_dir, module_name + '.pyx') + pyx_file = encode_fs(pyx_file) + + c_include_dirs = args.include + c_src_files = list(map(str, args.src)) + if 'numpy' in code: + import numpy + c_include_dirs.append(numpy.get_include()) + with io.open(pyx_file, 'w', encoding='utf-8') as f: + f.write(code) + extension = Extension( + name=module_name, + sources=[pyx_file] + c_src_files, + include_dirs=c_include_dirs, + library_dirs=args.library_dirs, + extra_compile_args=args.compile_args, + extra_link_args=args.link_args, + libraries=args.lib, + language='c++' if args.cplus else 'c', + ) + try: + opts = dict( + quiet=quiet, + annotate=args.annotate, + force=True, + ) + if args.language_level is not None: + assert args.language_level in (2, 3) + opts['language_level'] = args.language_level + elif sys.version_info[0] >= 3: + opts['language_level'] = 3 + return cythonize([extension], **opts) + except CompileError: + return None + + def _build_extension(self, extension, lib_dir, temp_dir=None, pgo_step_name=None, quiet=True): + build_extension = self._get_build_extension( + extension, lib_dir=lib_dir, temp_dir=temp_dir, pgo_step_name=pgo_step_name) + old_threshold = None + try: + if not quiet: + old_threshold = distutils.log.set_threshold(distutils.log.DEBUG) + build_extension.run() + finally: + if not quiet and old_threshold is not None: + distutils.log.set_threshold(old_threshold) + + def _add_pgo_flags(self, build_extension, step_name, temp_dir): + compiler_type = build_extension.compiler.compiler_type + if compiler_type == 'unix': + compiler_cmd = build_extension.compiler.compiler_so + # TODO: we could try to call "[cmd] --version" for better insights + if not compiler_cmd: + pass + elif 'clang' in compiler_cmd or 'clang' in compiler_cmd[0]: + compiler_type = 'clang' + elif 'icc' in compiler_cmd or 'icc' in compiler_cmd[0]: + compiler_type = 'icc' + elif 'gcc' in compiler_cmd or 'gcc' in compiler_cmd[0]: + compiler_type = 'gcc' + elif 'g++' in compiler_cmd or 'g++' in compiler_cmd[0]: + compiler_type = 'gcc' + config = PGO_CONFIG.get(compiler_type) + orig_flags = [] + if config and step_name in config: + flags = [f.format(TEMPDIR=temp_dir) for f in config[step_name]] + for extension in build_extension.extensions: + orig_flags.append((extension.extra_compile_args, extension.extra_link_args)) + extension.extra_compile_args = extension.extra_compile_args + flags + extension.extra_link_args = extension.extra_link_args + flags + else: + print("No PGO %s configuration known for C compiler type '%s'" % (step_name, compiler_type), + file=sys.stderr) + return orig_flags + + @property + def so_ext(self): + """The extension suffix for compiled modules.""" + try: + return self._so_ext + except AttributeError: + self._so_ext = self._get_build_extension().get_ext_filename('') + return self._so_ext + + def _clear_distutils_mkpath_cache(self): + """clear distutils mkpath cache + + prevents distutils from skipping re-creation of dirs that have been removed + """ + try: + from distutils.dir_util import _path_created + except ImportError: + pass + else: + _path_created.clear() + + def _get_build_extension(self, extension=None, lib_dir=None, temp_dir=None, + pgo_step_name=None, _build_ext=build_ext): + self._clear_distutils_mkpath_cache() + dist = Distribution() + config_files = dist.find_config_files() + try: + config_files.remove('setup.cfg') + except ValueError: + pass + dist.parse_config_files(config_files) + + if not temp_dir: + temp_dir = lib_dir + add_pgo_flags = self._add_pgo_flags + + if pgo_step_name: + base_build_ext = _build_ext + class _build_ext(_build_ext): + def build_extensions(self): + add_pgo_flags(self, pgo_step_name, temp_dir) + base_build_ext.build_extensions(self) + + build_extension = _build_ext(dist) + build_extension.finalize_options() + if temp_dir: + temp_dir = encode_fs(temp_dir) + build_extension.build_temp = temp_dir + if lib_dir: + lib_dir = encode_fs(lib_dir) + build_extension.build_lib = lib_dir + if extension is not None: + build_extension.extensions = [extension] + return build_extension + + @staticmethod + def clean_annotated_html(html): + """Clean up the annotated HTML source. + + Strips the link to the generated C or C++ file, which we do not + present to the user. + """ + r = re.compile('

Raw output: (.*)') + html = '\n'.join(l for l in html.splitlines() if not r.match(l)) + return html + +__doc__ = __doc__.format( + # rST doesn't see the -+ flag as part of an option list, so we + # hide it from the module-level docstring. + CYTHON_DOC=dedent(CythonMagics.cython.__doc__\ + .replace('-+, --cplus', '--cplus ')), + CYTHON_INLINE_DOC=dedent(CythonMagics.cython_inline.__doc__), + CYTHON_PYXIMPORT_DOC=dedent(CythonMagics.cython_pyximport.__doc__), +) diff --git a/venv/lib/python3.8/site-packages/Cython/Build/Tests/TestCyCache.py b/venv/lib/python3.8/site-packages/Cython/Build/Tests/TestCyCache.py new file mode 100644 index 0000000..a3224b4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Build/Tests/TestCyCache.py @@ -0,0 +1,106 @@ +import difflib +import glob +import gzip +import os +import tempfile + +import Cython.Build.Dependencies +import Cython.Utils +from Cython.TestUtils import CythonTest + + +class TestCyCache(CythonTest): + + def setUp(self): + CythonTest.setUp(self) + self.temp_dir = tempfile.mkdtemp( + prefix='cycache-test', + dir='TEST_TMP' if os.path.isdir('TEST_TMP') else None) + self.src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir) + self.cache_dir = tempfile.mkdtemp(prefix='cache', dir=self.temp_dir) + + def cache_files(self, file_glob): + return glob.glob(os.path.join(self.cache_dir, file_glob)) + + def fresh_cythonize(self, *args, **kwargs): + Cython.Utils.clear_function_caches() + Cython.Build.Dependencies._dep_tree = None # discard method caches + Cython.Build.Dependencies.cythonize(*args, **kwargs) + + def test_cycache_switch(self): + content1 = 'value = 1\n' + content2 = 'value = 2\n' + a_pyx = os.path.join(self.src_dir, 'a.pyx') + a_c = a_pyx[:-4] + '.c' + + open(a_pyx, 'w').write(content1) + self.fresh_cythonize(a_pyx, cache=self.cache_dir) + self.fresh_cythonize(a_pyx, cache=self.cache_dir) + self.assertEqual(1, len(self.cache_files('a.c*'))) + a_contents1 = open(a_c).read() + os.unlink(a_c) + + open(a_pyx, 'w').write(content2) + self.fresh_cythonize(a_pyx, cache=self.cache_dir) + a_contents2 = open(a_c).read() + os.unlink(a_c) + + self.assertNotEqual(a_contents1, a_contents2, 'C file not changed!') + self.assertEqual(2, len(self.cache_files('a.c*'))) + + open(a_pyx, 'w').write(content1) + self.fresh_cythonize(a_pyx, cache=self.cache_dir) + self.assertEqual(2, len(self.cache_files('a.c*'))) + a_contents = open(a_c).read() + self.assertEqual( + a_contents, a_contents1, + msg='\n'.join(list(difflib.unified_diff( + a_contents.split('\n'), a_contents1.split('\n')))[:10])) + + def test_cycache_uses_cache(self): + a_pyx = os.path.join(self.src_dir, 'a.pyx') + a_c = a_pyx[:-4] + '.c' + open(a_pyx, 'w').write('pass') + self.fresh_cythonize(a_pyx, cache=self.cache_dir) + a_cache = os.path.join(self.cache_dir, os.listdir(self.cache_dir)[0]) + gzip.GzipFile(a_cache, 'wb').write('fake stuff'.encode('ascii')) + os.unlink(a_c) + self.fresh_cythonize(a_pyx, cache=self.cache_dir) + a_contents = open(a_c).read() + self.assertEqual(a_contents, 'fake stuff', + 'Unexpected contents: %s...' % a_contents[:100]) + + def test_multi_file_output(self): + a_pyx = os.path.join(self.src_dir, 'a.pyx') + a_c = a_pyx[:-4] + '.c' + a_h = a_pyx[:-4] + '.h' + a_api_h = a_pyx[:-4] + '_api.h' + open(a_pyx, 'w').write('cdef public api int foo(int x): return x\n') + self.fresh_cythonize(a_pyx, cache=self.cache_dir) + expected = [a_c, a_h, a_api_h] + for output in expected: + self.assertTrue(os.path.exists(output), output) + os.unlink(output) + self.fresh_cythonize(a_pyx, cache=self.cache_dir) + for output in expected: + self.assertTrue(os.path.exists(output), output) + + def test_options_invalidation(self): + hash_pyx = os.path.join(self.src_dir, 'options.pyx') + hash_c = hash_pyx[:-len('.pyx')] + '.c' + + open(hash_pyx, 'w').write('pass') + self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False) + self.assertEqual(1, len(self.cache_files('options.c*'))) + + os.unlink(hash_c) + self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=True) + self.assertEqual(2, len(self.cache_files('options.c*'))) + + os.unlink(hash_c) + self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False, show_version=False) + self.assertEqual(2, len(self.cache_files('options.c*'))) + + os.unlink(hash_c) + self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False, show_version=True) + self.assertEqual(2, len(self.cache_files('options.c*'))) diff --git a/venv/lib/python3.8/site-packages/Cython/Build/Tests/TestInline.py b/venv/lib/python3.8/site-packages/Cython/Build/Tests/TestInline.py new file mode 100644 index 0000000..5ef9fec --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Build/Tests/TestInline.py @@ -0,0 +1,96 @@ +import os, tempfile +from Cython.Shadow import inline +from Cython.Build.Inline import safe_type +from Cython.TestUtils import CythonTest + +try: + import numpy + has_numpy = True +except: + has_numpy = False + +test_kwds = dict(force=True, quiet=True) + +global_value = 100 + +class TestInline(CythonTest): + def setUp(self): + CythonTest.setUp(self) + self.test_kwds = dict(test_kwds) + if os.path.isdir('TEST_TMP'): + lib_dir = os.path.join('TEST_TMP','inline') + else: + lib_dir = tempfile.mkdtemp(prefix='cython_inline_') + self.test_kwds['lib_dir'] = lib_dir + + def test_simple(self): + self.assertEquals(inline("return 1+2", **self.test_kwds), 3) + + def test_types(self): + self.assertEquals(inline(""" + cimport cython + return cython.typeof(a), cython.typeof(b) + """, a=1.0, b=[], **self.test_kwds), ('double', 'list object')) + + def test_locals(self): + a = 1 + b = 2 + self.assertEquals(inline("return a+b", **self.test_kwds), 3) + + def test_globals(self): + self.assertEquals(inline("return global_value + 1", **self.test_kwds), global_value + 1) + + def test_no_return(self): + self.assertEquals(inline(""" + a = 1 + cdef double b = 2 + cdef c = [] + """, **self.test_kwds), dict(a=1, b=2.0, c=[])) + + def test_def_node(self): + foo = inline("def foo(x): return x * x", **self.test_kwds)['foo'] + self.assertEquals(foo(7), 49) + + def test_class_ref(self): + class Type(object): + pass + tp = inline("Type")['Type'] + self.assertEqual(tp, Type) + + def test_pure(self): + import cython as cy + b = inline(""" + b = cy.declare(float, a) + c = cy.declare(cy.pointer(cy.float), &b) + return b + """, a=3, **self.test_kwds) + self.assertEquals(type(b), float) + + def test_compiler_directives(self): + self.assertEqual( + inline('return sum(x)', + x=[1, 2, 3], + cython_compiler_directives={'boundscheck': False}), + 6 + ) + + def test_lang_version(self): + # GH-3419. Caching for inline code didn't always respect compiler directives. + inline_divcode = "def f(int a, int b): return a/b" + self.assertEqual( + inline(inline_divcode, language_level=2)['f'](5,2), + 2 + ) + self.assertEqual( + inline(inline_divcode, language_level=3)['f'](5,2), + 2.5 + ) + + if has_numpy: + + def test_numpy(self): + import numpy + a = numpy.ndarray((10, 20)) + a[0,0] = 10 + self.assertEquals(safe_type(a), 'numpy.ndarray[numpy.float64_t, ndim=2]') + self.assertEquals(inline("return a[0,0]", a=a, **self.test_kwds), 10.0) diff --git a/venv/lib/python3.8/site-packages/Cython/Build/Tests/TestIpythonMagic.py b/venv/lib/python3.8/site-packages/Cython/Build/Tests/TestIpythonMagic.py new file mode 100644 index 0000000..d9d8322 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Build/Tests/TestIpythonMagic.py @@ -0,0 +1,205 @@ +# -*- coding: utf-8 -*- +# tag: ipython + +"""Tests for the Cython magics extension.""" + +from __future__ import absolute_import + +import os +import sys +from contextlib import contextmanager +from Cython.Build import IpythonMagic +from Cython.TestUtils import CythonTest + +try: + import IPython.testing.globalipapp +except ImportError: + # Disable tests and fake helpers for initialisation below. + def skip_if_not_installed(_): + return None +else: + def skip_if_not_installed(c): + return c + +try: + # disable IPython history thread before it gets started to avoid having to clean it up + from IPython.core.history import HistoryManager + HistoryManager.enabled = False +except ImportError: + pass + +code = u"""\ +def f(x): + return 2*x +""" + +cython3_code = u"""\ +def f(int x): + return 2 / x + +def call(x): + return f(*(x,)) +""" + +pgo_cython3_code = cython3_code + u"""\ +def main(): + for _ in range(100): call(5) +main() +""" + + +if sys.platform == 'win32': + # not using IPython's decorators here because they depend on "nose" + try: + from unittest import skip as skip_win32 + except ImportError: + # poor dev's silent @unittest.skip() + def skip_win32(dummy): + def _skip_win32(func): + return None + return _skip_win32 +else: + def skip_win32(dummy): + def _skip_win32(func): + def wrapper(*args, **kwargs): + func(*args, **kwargs) + return wrapper + return _skip_win32 + + +@skip_if_not_installed +class TestIPythonMagic(CythonTest): + + @classmethod + def setUpClass(cls): + CythonTest.setUpClass() + cls._ip = IPython.testing.globalipapp.get_ipython() + + def setUp(self): + CythonTest.setUp(self) + self._ip.extension_manager.load_extension('cython') + + def test_cython_inline(self): + ip = self._ip + ip.ex('a=10; b=20') + result = ip.run_cell_magic('cython_inline', '', 'return a+b') + self.assertEqual(result, 30) + + @skip_win32('Skip on Windows') + def test_cython_pyximport(self): + ip = self._ip + module_name = '_test_cython_pyximport' + ip.run_cell_magic('cython_pyximport', module_name, code) + ip.ex('g = f(10)') + self.assertEqual(ip.user_ns['g'], 20.0) + ip.run_cell_magic('cython_pyximport', module_name, code) + ip.ex('h = f(-10)') + self.assertEqual(ip.user_ns['h'], -20.0) + try: + os.remove(module_name + '.pyx') + except OSError: + pass + + def test_cython(self): + ip = self._ip + ip.run_cell_magic('cython', '', code) + ip.ex('g = f(10)') + self.assertEqual(ip.user_ns['g'], 20.0) + + def test_cython_name(self): + # The Cython module named 'mymodule' defines the function f. + ip = self._ip + ip.run_cell_magic('cython', '--name=mymodule', code) + # This module can now be imported in the interactive namespace. + ip.ex('import mymodule; g = mymodule.f(10)') + self.assertEqual(ip.user_ns['g'], 20.0) + + def test_cython_language_level(self): + # The Cython cell defines the functions f() and call(). + ip = self._ip + ip.run_cell_magic('cython', '', cython3_code) + ip.ex('g = f(10); h = call(10)') + if sys.version_info[0] < 3: + self.assertEqual(ip.user_ns['g'], 2 // 10) + self.assertEqual(ip.user_ns['h'], 2 // 10) + else: + self.assertEqual(ip.user_ns['g'], 2.0 / 10.0) + self.assertEqual(ip.user_ns['h'], 2.0 / 10.0) + + def test_cython3(self): + # The Cython cell defines the functions f() and call(). + ip = self._ip + ip.run_cell_magic('cython', '-3', cython3_code) + ip.ex('g = f(10); h = call(10)') + self.assertEqual(ip.user_ns['g'], 2.0 / 10.0) + self.assertEqual(ip.user_ns['h'], 2.0 / 10.0) + + def test_cython2(self): + # The Cython cell defines the functions f() and call(). + ip = self._ip + ip.run_cell_magic('cython', '-2', cython3_code) + ip.ex('g = f(10); h = call(10)') + self.assertEqual(ip.user_ns['g'], 2 // 10) + self.assertEqual(ip.user_ns['h'], 2 // 10) + + @skip_win32('Skip on Windows') + def test_cython3_pgo(self): + # The Cython cell defines the functions f() and call(). + ip = self._ip + ip.run_cell_magic('cython', '-3 --pgo', pgo_cython3_code) + ip.ex('g = f(10); h = call(10); main()') + self.assertEqual(ip.user_ns['g'], 2.0 / 10.0) + self.assertEqual(ip.user_ns['h'], 2.0 / 10.0) + + @skip_win32('Skip on Windows') + def test_extlibs(self): + ip = self._ip + code = u""" +from libc.math cimport sin +x = sin(0.0) + """ + ip.user_ns['x'] = 1 + ip.run_cell_magic('cython', '-l m', code) + self.assertEqual(ip.user_ns['x'], 0) + + + def test_cython_verbose(self): + ip = self._ip + ip.run_cell_magic('cython', '--verbose', code) + ip.ex('g = f(10)') + self.assertEqual(ip.user_ns['g'], 20.0) + + def test_cython_verbose_thresholds(self): + @contextmanager + def mock_distutils(): + class MockLog: + DEBUG = 1 + INFO = 2 + thresholds = [INFO] + + def set_threshold(self, val): + self.thresholds.append(val) + return self.thresholds[-2] + + + new_log = MockLog() + old_log = IpythonMagic.distutils.log + try: + IpythonMagic.distutils.log = new_log + yield new_log + finally: + IpythonMagic.distutils.log = old_log + + ip = self._ip + with mock_distutils() as verbose_log: + ip.run_cell_magic('cython', '--verbose', code) + ip.ex('g = f(10)') + self.assertEqual(ip.user_ns['g'], 20.0) + self.assertEquals([verbose_log.INFO, verbose_log.DEBUG, verbose_log.INFO], + verbose_log.thresholds) + + with mock_distutils() as normal_log: + ip.run_cell_magic('cython', '', code) + ip.ex('g = f(10)') + self.assertEqual(ip.user_ns['g'], 20.0) + self.assertEquals([normal_log.INFO], normal_log.thresholds) diff --git a/venv/lib/python3.8/site-packages/Cython/Build/Tests/TestStripLiterals.py b/venv/lib/python3.8/site-packages/Cython/Build/Tests/TestStripLiterals.py new file mode 100644 index 0000000..a7572a5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Build/Tests/TestStripLiterals.py @@ -0,0 +1,57 @@ +from Cython.Build.Dependencies import strip_string_literals + +from Cython.TestUtils import CythonTest + +class TestStripLiterals(CythonTest): + + def t(self, before, expected): + actual, literals = strip_string_literals(before, prefix="_L") + self.assertEqual(expected, actual) + for key, value in literals.items(): + actual = actual.replace(key, value) + self.assertEqual(before, actual) + + def test_empty(self): + self.t("", "") + + def test_single_quote(self): + self.t("'x'", "'_L1_'") + + def test_double_quote(self): + self.t('"x"', '"_L1_"') + + def test_nested_quotes(self): + self.t(""" '"' "'" """, """ '_L1_' "_L2_" """) + + def test_triple_quote(self): + self.t(" '''a\n''' ", " '''_L1_''' ") + + def test_backslash(self): + self.t(r"'a\'b'", "'_L1_'") + self.t(r"'a\\'", "'_L1_'") + self.t(r"'a\\\'b'", "'_L1_'") + + def test_unicode(self): + self.t("u'abc'", "u'_L1_'") + + def test_raw(self): + self.t(r"r'abc\\'", "r'_L1_'") + + def test_raw_unicode(self): + self.t(r"ru'abc\\'", "ru'_L1_'") + + def test_comment(self): + self.t("abc # foo", "abc #_L1_") + + def test_comment_and_quote(self): + self.t("abc # 'x'", "abc #_L1_") + self.t("'abc#'", "'_L1_'") + + def test_include(self): + self.t("include 'a.pxi' # something here", + "include '_L1_' #_L2_") + + def test_extern(self): + self.t("cdef extern from 'a.h': # comment", + "cdef extern from '_L1_': #_L2_") + diff --git a/venv/lib/python3.8/site-packages/Cython/Build/Tests/__init__.py b/venv/lib/python3.8/site-packages/Cython/Build/Tests/__init__.py new file mode 100644 index 0000000..fa81ada --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Build/Tests/__init__.py @@ -0,0 +1 @@ +# empty file diff --git a/venv/lib/python3.8/site-packages/Cython/Build/Tests/__pycache__/TestCyCache.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Build/Tests/__pycache__/TestCyCache.cpython-38.pyc new file mode 100644 index 0000000..691bef7 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Build/Tests/__pycache__/TestCyCache.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Build/Tests/__pycache__/TestInline.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Build/Tests/__pycache__/TestInline.cpython-38.pyc new file mode 100644 index 0000000..1f2171b Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Build/Tests/__pycache__/TestInline.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Build/Tests/__pycache__/TestIpythonMagic.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Build/Tests/__pycache__/TestIpythonMagic.cpython-38.pyc new file mode 100644 index 0000000..c5599a6 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Build/Tests/__pycache__/TestIpythonMagic.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Build/Tests/__pycache__/TestStripLiterals.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Build/Tests/__pycache__/TestStripLiterals.cpython-38.pyc new file mode 100644 index 0000000..4a68487 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Build/Tests/__pycache__/TestStripLiterals.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Build/Tests/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Build/Tests/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..e4d4e3c Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Build/Tests/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Build/__init__.py b/venv/lib/python3.8/site-packages/Cython/Build/__init__.py new file mode 100644 index 0000000..d6f3986 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Build/__init__.py @@ -0,0 +1,2 @@ +from .Dependencies import cythonize +from .Distutils import build_ext diff --git a/venv/lib/python3.8/site-packages/Cython/Build/__pycache__/BuildExecutable.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Build/__pycache__/BuildExecutable.cpython-38.pyc new file mode 100644 index 0000000..bc55b2d Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Build/__pycache__/BuildExecutable.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Build/__pycache__/Cythonize.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Build/__pycache__/Cythonize.cpython-38.pyc new file mode 100644 index 0000000..af4f383 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Build/__pycache__/Cythonize.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Build/__pycache__/Dependencies.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Build/__pycache__/Dependencies.cpython-38.pyc new file mode 100644 index 0000000..4411cd2 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Build/__pycache__/Dependencies.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Build/__pycache__/Distutils.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Build/__pycache__/Distutils.cpython-38.pyc new file mode 100644 index 0000000..817f0e9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Build/__pycache__/Distutils.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Build/__pycache__/Inline.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Build/__pycache__/Inline.cpython-38.pyc new file mode 100644 index 0000000..410a99f Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Build/__pycache__/Inline.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Build/__pycache__/IpythonMagic.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Build/__pycache__/IpythonMagic.cpython-38.pyc new file mode 100644 index 0000000..11c5875 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Build/__pycache__/IpythonMagic.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Build/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Build/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..a440c4f Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Build/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/CodeWriter.py b/venv/lib/python3.8/site-packages/Cython/CodeWriter.py new file mode 100644 index 0000000..2e4646a --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/CodeWriter.py @@ -0,0 +1,816 @@ +""" +Serializes a Cython code tree to Cython code. This is primarily useful for +debugging and testing purposes. + +The output is in a strict format, no whitespace or comments from the input +is preserved (and it could not be as it is not present in the code tree). +""" + +from __future__ import absolute_import, print_function + +from .Compiler.Visitor import TreeVisitor +from .Compiler.ExprNodes import * + + +class LinesResult(object): + def __init__(self): + self.lines = [] + self.s = u"" + + def put(self, s): + self.s += s + + def newline(self): + self.lines.append(self.s) + self.s = u"" + + def putline(self, s): + self.put(s) + self.newline() + +class DeclarationWriter(TreeVisitor): + + indent_string = u" " + + def __init__(self, result=None): + super(DeclarationWriter, self).__init__() + if result is None: + result = LinesResult() + self.result = result + self.numindents = 0 + self.tempnames = {} + self.tempblockindex = 0 + + def write(self, tree): + self.visit(tree) + return self.result + + def indent(self): + self.numindents += 1 + + def dedent(self): + self.numindents -= 1 + + def startline(self, s=u""): + self.result.put(self.indent_string * self.numindents + s) + + def put(self, s): + self.result.put(s) + + def putline(self, s): + self.result.putline(self.indent_string * self.numindents + s) + + def endline(self, s=u""): + self.result.putline(s) + + def line(self, s): + self.startline(s) + self.endline() + + def comma_separated_list(self, items, output_rhs=False): + if len(items) > 0: + for item in items[:-1]: + self.visit(item) + if output_rhs and item.default is not None: + self.put(u" = ") + self.visit(item.default) + self.put(u", ") + self.visit(items[-1]) + + def visit_Node(self, node): + raise AssertionError("Node not handled by serializer: %r" % node) + + def visit_ModuleNode(self, node): + self.visitchildren(node) + + def visit_StatListNode(self, node): + self.visitchildren(node) + + def visit_CDefExternNode(self, node): + if node.include_file is None: + file = u'*' + else: + file = u'"%s"' % node.include_file + self.putline(u"cdef extern from %s:" % file) + self.indent() + self.visit(node.body) + self.dedent() + + def visit_CPtrDeclaratorNode(self, node): + self.put('*') + self.visit(node.base) + + def visit_CReferenceDeclaratorNode(self, node): + self.put('&') + self.visit(node.base) + + def visit_CArrayDeclaratorNode(self, node): + self.visit(node.base) + self.put(u'[') + if node.dimension is not None: + self.visit(node.dimension) + self.put(u']') + + def visit_CArrayDeclaratorNode(self, node): + self.visit(node.base) + self.put(u'[') + if node.dimension is not None: + self.visit(node.dimension) + self.put(u']') + + def visit_CFuncDeclaratorNode(self, node): + # TODO: except, gil, etc. + self.visit(node.base) + self.put(u'(') + self.comma_separated_list(node.args) + self.endline(u')') + + def visit_CNameDeclaratorNode(self, node): + self.put(node.name) + + def visit_CSimpleBaseTypeNode(self, node): + # See Parsing.p_sign_and_longness + if node.is_basic_c_type: + self.put(("unsigned ", "", "signed ")[node.signed]) + if node.longness < 0: + self.put("short " * -node.longness) + elif node.longness > 0: + self.put("long " * node.longness) + self.put(node.name) + + def visit_CComplexBaseTypeNode(self, node): + self.put(u'(') + self.visit(node.base_type) + self.visit(node.declarator) + self.put(u')') + + def visit_CNestedBaseTypeNode(self, node): + self.visit(node.base_type) + self.put(u'.') + self.put(node.name) + + def visit_TemplatedTypeNode(self, node): + self.visit(node.base_type_node) + self.put(u'[') + self.comma_separated_list(node.positional_args + node.keyword_args.key_value_pairs) + self.put(u']') + + def visit_CVarDefNode(self, node): + self.startline(u"cdef ") + self.visit(node.base_type) + self.put(u" ") + self.comma_separated_list(node.declarators, output_rhs=True) + self.endline() + + def visit_container_node(self, node, decl, extras, attributes): + # TODO: visibility + self.startline(decl) + if node.name: + self.put(u' ') + self.put(node.name) + if node.cname is not None: + self.put(u' "%s"' % node.cname) + if extras: + self.put(extras) + self.endline(':') + self.indent() + if not attributes: + self.putline('pass') + else: + for attribute in attributes: + self.visit(attribute) + self.dedent() + + def visit_CStructOrUnionDefNode(self, node): + if node.typedef_flag: + decl = u'ctypedef ' + else: + decl = u'cdef ' + if node.visibility == 'public': + decl += u'public ' + if node.packed: + decl += u'packed ' + decl += node.kind + self.visit_container_node(node, decl, None, node.attributes) + + def visit_CppClassNode(self, node): + extras = "" + if node.templates: + extras = u"[%s]" % ", ".join(node.templates) + if node.base_classes: + extras += "(%s)" % ", ".join(node.base_classes) + self.visit_container_node(node, u"cdef cppclass", extras, node.attributes) + + def visit_CEnumDefNode(self, node): + self.visit_container_node(node, u"cdef enum", None, node.items) + + def visit_CEnumDefItemNode(self, node): + self.startline(node.name) + if node.cname: + self.put(u' "%s"' % node.cname) + if node.value: + self.put(u" = ") + self.visit(node.value) + self.endline() + + def visit_CClassDefNode(self, node): + assert not node.module_name + if node.decorators: + for decorator in node.decorators: + self.visit(decorator) + self.startline(u"cdef class ") + self.put(node.class_name) + if node.base_class_name: + self.put(u"(") + if node.base_class_module: + self.put(node.base_class_module) + self.put(u".") + self.put(node.base_class_name) + self.put(u")") + self.endline(u":") + self.indent() + self.visit(node.body) + self.dedent() + + def visit_CTypeDefNode(self, node): + self.startline(u"ctypedef ") + self.visit(node.base_type) + self.put(u" ") + self.visit(node.declarator) + self.endline() + + def visit_FuncDefNode(self, node): + self.startline(u"def %s(" % node.name) + self.comma_separated_list(node.args) + self.endline(u"):") + self.indent() + self.visit(node.body) + self.dedent() + + def visit_CArgDeclNode(self, node): + if node.base_type.name is not None: + self.visit(node.base_type) + self.put(u" ") + self.visit(node.declarator) + if node.default is not None: + self.put(u" = ") + self.visit(node.default) + + def visit_CImportStatNode(self, node): + self.startline(u"cimport ") + self.put(node.module_name) + if node.as_name: + self.put(u" as ") + self.put(node.as_name) + self.endline() + + def visit_FromCImportStatNode(self, node): + self.startline(u"from ") + self.put(node.module_name) + self.put(u" cimport ") + first = True + for pos, name, as_name, kind in node.imported_names: + assert kind is None + if first: + first = False + else: + self.put(u", ") + self.put(name) + if as_name: + self.put(u" as ") + self.put(as_name) + self.endline() + + def visit_NameNode(self, node): + self.put(node.name) + + def visit_IntNode(self, node): + self.put(node.value) + + def visit_NoneNode(self, node): + self.put(u"None") + + def visit_NotNode(self, node): + self.put(u"(not ") + self.visit(node.operand) + self.put(u")") + + def visit_DecoratorNode(self, node): + self.startline("@") + self.visit(node.decorator) + self.endline() + + def visit_BinopNode(self, node): + self.visit(node.operand1) + self.put(u" %s " % node.operator) + self.visit(node.operand2) + + def visit_AttributeNode(self, node): + self.visit(node.obj) + self.put(u".%s" % node.attribute) + + def visit_BoolNode(self, node): + self.put(str(node.value)) + + # FIXME: represent string nodes correctly + def visit_StringNode(self, node): + value = node.value + if value.encoding is not None: + value = value.encode(value.encoding) + self.put(repr(value)) + + def visit_PassStatNode(self, node): + self.startline(u"pass") + self.endline() + +class CodeWriter(DeclarationWriter): + + def visit_SingleAssignmentNode(self, node): + self.startline() + self.visit(node.lhs) + self.put(u" = ") + self.visit(node.rhs) + self.endline() + + def visit_CascadedAssignmentNode(self, node): + self.startline() + for lhs in node.lhs_list: + self.visit(lhs) + self.put(u" = ") + self.visit(node.rhs) + self.endline() + + def visit_PrintStatNode(self, node): + self.startline(u"print ") + self.comma_separated_list(node.arg_tuple.args) + if not node.append_newline: + self.put(u",") + self.endline() + + def visit_ForInStatNode(self, node): + self.startline(u"for ") + self.visit(node.target) + self.put(u" in ") + self.visit(node.iterator.sequence) + self.endline(u":") + self.indent() + self.visit(node.body) + self.dedent() + if node.else_clause is not None: + self.line(u"else:") + self.indent() + self.visit(node.else_clause) + self.dedent() + + def visit_IfStatNode(self, node): + # The IfClauseNode is handled directly without a separate match + # for clariy. + self.startline(u"if ") + self.visit(node.if_clauses[0].condition) + self.endline(":") + self.indent() + self.visit(node.if_clauses[0].body) + self.dedent() + for clause in node.if_clauses[1:]: + self.startline("elif ") + self.visit(clause.condition) + self.endline(":") + self.indent() + self.visit(clause.body) + self.dedent() + if node.else_clause is not None: + self.line("else:") + self.indent() + self.visit(node.else_clause) + self.dedent() + + def visit_SequenceNode(self, node): + self.comma_separated_list(node.args) # Might need to discover whether we need () around tuples...hmm... + + def visit_SimpleCallNode(self, node): + self.visit(node.function) + self.put(u"(") + self.comma_separated_list(node.args) + self.put(")") + + def visit_GeneralCallNode(self, node): + self.visit(node.function) + self.put(u"(") + posarg = node.positional_args + if isinstance(posarg, AsTupleNode): + self.visit(posarg.arg) + else: + self.comma_separated_list(posarg.args) # TupleNode.args + if node.keyword_args: + if isinstance(node.keyword_args, DictNode): + for i, (name, value) in enumerate(node.keyword_args.key_value_pairs): + if i > 0: + self.put(', ') + self.visit(name) + self.put('=') + self.visit(value) + else: + raise Exception("Not implemented yet") + self.put(u")") + + def visit_ExprStatNode(self, node): + self.startline() + self.visit(node.expr) + self.endline() + + def visit_InPlaceAssignmentNode(self, node): + self.startline() + self.visit(node.lhs) + self.put(u" %s= " % node.operator) + self.visit(node.rhs) + self.endline() + + def visit_WithStatNode(self, node): + self.startline() + self.put(u"with ") + self.visit(node.manager) + if node.target is not None: + self.put(u" as ") + self.visit(node.target) + self.endline(u":") + self.indent() + self.visit(node.body) + self.dedent() + + def visit_TryFinallyStatNode(self, node): + self.line(u"try:") + self.indent() + self.visit(node.body) + self.dedent() + self.line(u"finally:") + self.indent() + self.visit(node.finally_clause) + self.dedent() + + def visit_TryExceptStatNode(self, node): + self.line(u"try:") + self.indent() + self.visit(node.body) + self.dedent() + for x in node.except_clauses: + self.visit(x) + if node.else_clause is not None: + self.visit(node.else_clause) + + def visit_ExceptClauseNode(self, node): + self.startline(u"except") + if node.pattern is not None: + self.put(u" ") + self.visit(node.pattern) + if node.target is not None: + self.put(u", ") + self.visit(node.target) + self.endline(":") + self.indent() + self.visit(node.body) + self.dedent() + + def visit_ReturnStatNode(self, node): + self.startline("return ") + self.visit(node.value) + self.endline() + + def visit_ReraiseStatNode(self, node): + self.line("raise") + + def visit_ImportNode(self, node): + self.put(u"(import %s)" % node.module_name.value) + + def visit_TempsBlockNode(self, node): + """ + Temporaries are output like $1_1', where the first number is + an index of the TempsBlockNode and the second number is an index + of the temporary which that block allocates. + """ + idx = 0 + for handle in node.temps: + self.tempnames[handle] = "$%d_%d" % (self.tempblockindex, idx) + idx += 1 + self.tempblockindex += 1 + self.visit(node.body) + + def visit_TempRefNode(self, node): + self.put(self.tempnames[node.handle]) + + +class PxdWriter(DeclarationWriter): + def __call__(self, node): + print(u'\n'.join(self.write(node).lines)) + return node + + def visit_CFuncDefNode(self, node): + if 'inline' in node.modifiers: + return + if node.overridable: + self.startline(u'cpdef ') + else: + self.startline(u'cdef ') + if node.visibility != 'private': + self.put(node.visibility) + self.put(u' ') + if node.api: + self.put(u'api ') + self.visit(node.declarator) + + def visit_StatNode(self, node): + pass + + +class ExpressionWriter(TreeVisitor): + + def __init__(self, result=None): + super(ExpressionWriter, self).__init__() + if result is None: + result = u"" + self.result = result + self.precedence = [0] + + def write(self, tree): + self.visit(tree) + return self.result + + def put(self, s): + self.result += s + + def remove(self, s): + if self.result.endswith(s): + self.result = self.result[:-len(s)] + + def comma_separated_list(self, items): + if len(items) > 0: + for item in items[:-1]: + self.visit(item) + self.put(u", ") + self.visit(items[-1]) + + def visit_Node(self, node): + raise AssertionError("Node not handled by serializer: %r" % node) + + def visit_NameNode(self, node): + self.put(node.name) + + def visit_NoneNode(self, node): + self.put(u"None") + + def visit_EllipsisNode(self, node): + self.put(u"...") + + def visit_BoolNode(self, node): + self.put(str(node.value)) + + def visit_ConstNode(self, node): + self.put(str(node.value)) + + def visit_ImagNode(self, node): + self.put(node.value) + self.put(u"j") + + def emit_string(self, node, prefix=u""): + repr_val = repr(node.value) + if repr_val[0] in 'ub': + repr_val = repr_val[1:] + self.put(u"%s%s" % (prefix, repr_val)) + + def visit_BytesNode(self, node): + self.emit_string(node, u"b") + + def visit_StringNode(self, node): + self.emit_string(node) + + def visit_UnicodeNode(self, node): + self.emit_string(node, u"u") + + def emit_sequence(self, node, parens=(u"", u"")): + open_paren, close_paren = parens + items = node.subexpr_nodes() + self.put(open_paren) + self.comma_separated_list(items) + self.put(close_paren) + + def visit_ListNode(self, node): + self.emit_sequence(node, u"[]") + + def visit_TupleNode(self, node): + self.emit_sequence(node, u"()") + + def visit_SetNode(self, node): + if len(node.subexpr_nodes()) > 0: + self.emit_sequence(node, u"{}") + else: + self.put(u"set()") + + def visit_DictNode(self, node): + self.emit_sequence(node, u"{}") + + def visit_DictItemNode(self, node): + self.visit(node.key) + self.put(u": ") + self.visit(node.value) + + unop_precedence = { + 'not': 3, '!': 3, + '+': 11, '-': 11, '~': 11, + } + binop_precedence = { + 'or': 1, + 'and': 2, + # unary: 'not': 3, '!': 3, + 'in': 4, 'not_in': 4, 'is': 4, 'is_not': 4, '<': 4, '<=': 4, '>': 4, '>=': 4, '!=': 4, '==': 4, + '|': 5, + '^': 6, + '&': 7, + '<<': 8, '>>': 8, + '+': 9, '-': 9, + '*': 10, '@': 10, '/': 10, '//': 10, '%': 10, + # unary: '+': 11, '-': 11, '~': 11 + '**': 12, + } + + def operator_enter(self, new_prec): + old_prec = self.precedence[-1] + if old_prec > new_prec: + self.put(u"(") + self.precedence.append(new_prec) + + def operator_exit(self): + old_prec, new_prec = self.precedence[-2:] + if old_prec > new_prec: + self.put(u")") + self.precedence.pop() + + def visit_NotNode(self, node): + op = 'not' + prec = self.unop_precedence[op] + self.operator_enter(prec) + self.put(u"not ") + self.visit(node.operand) + self.operator_exit() + + def visit_UnopNode(self, node): + op = node.operator + prec = self.unop_precedence[op] + self.operator_enter(prec) + self.put(u"%s" % node.operator) + self.visit(node.operand) + self.operator_exit() + + def visit_BinopNode(self, node): + op = node.operator + prec = self.binop_precedence.get(op, 0) + self.operator_enter(prec) + self.visit(node.operand1) + self.put(u" %s " % op.replace('_', ' ')) + self.visit(node.operand2) + self.operator_exit() + + def visit_BoolBinopNode(self, node): + self.visit_BinopNode(node) + + def visit_PrimaryCmpNode(self, node): + self.visit_BinopNode(node) + + def visit_IndexNode(self, node): + self.visit(node.base) + self.put(u"[") + if isinstance(node.index, TupleNode): + self.emit_sequence(node.index) + else: + self.visit(node.index) + self.put(u"]") + + def visit_SliceIndexNode(self, node): + self.visit(node.base) + self.put(u"[") + if node.start: + self.visit(node.start) + self.put(u":") + if node.stop: + self.visit(node.stop) + if node.slice: + self.put(u":") + self.visit(node.slice) + self.put(u"]") + + def visit_SliceNode(self, node): + if not node.start.is_none: + self.visit(node.start) + self.put(u":") + if not node.stop.is_none: + self.visit(node.stop) + if not node.step.is_none: + self.put(u":") + self.visit(node.step) + + def visit_CondExprNode(self, node): + self.visit(node.true_val) + self.put(u" if ") + self.visit(node.test) + self.put(u" else ") + self.visit(node.false_val) + + def visit_AttributeNode(self, node): + self.visit(node.obj) + self.put(u".%s" % node.attribute) + + def visit_SimpleCallNode(self, node): + self.visit(node.function) + self.put(u"(") + self.comma_separated_list(node.args) + self.put(")") + + def emit_pos_args(self, node): + if node is None: + return + if isinstance(node, AddNode): + self.emit_pos_args(node.operand1) + self.emit_pos_args(node.operand2) + elif isinstance(node, TupleNode): + for expr in node.subexpr_nodes(): + self.visit(expr) + self.put(u", ") + elif isinstance(node, AsTupleNode): + self.put("*") + self.visit(node.arg) + self.put(u", ") + else: + self.visit(node) + self.put(u", ") + + def emit_kwd_args(self, node): + if node is None: + return + if isinstance(node, MergedDictNode): + for expr in node.subexpr_nodes(): + self.emit_kwd_args(expr) + elif isinstance(node, DictNode): + for expr in node.subexpr_nodes(): + self.put(u"%s=" % expr.key.value) + self.visit(expr.value) + self.put(u", ") + else: + self.put(u"**") + self.visit(node) + self.put(u", ") + + def visit_GeneralCallNode(self, node): + self.visit(node.function) + self.put(u"(") + self.emit_pos_args(node.positional_args) + self.emit_kwd_args(node.keyword_args) + self.remove(u", ") + self.put(")") + + def emit_comprehension(self, body, target, + sequence, condition, + parens=(u"", u"")): + open_paren, close_paren = parens + self.put(open_paren) + self.visit(body) + self.put(u" for ") + self.visit(target) + self.put(u" in ") + self.visit(sequence) + if condition: + self.put(u" if ") + self.visit(condition) + self.put(close_paren) + + def visit_ComprehensionAppendNode(self, node): + self.visit(node.expr) + + def visit_DictComprehensionAppendNode(self, node): + self.visit(node.key_expr) + self.put(u": ") + self.visit(node.value_expr) + + def visit_ComprehensionNode(self, node): + tpmap = {'list': u"[]", 'dict': u"{}", 'set': u"{}"} + parens = tpmap[node.type.py_type_name()] + body = node.loop.body + target = node.loop.target + sequence = node.loop.iterator.sequence + condition = None + if hasattr(body, 'if_clauses'): + # type(body) is Nodes.IfStatNode + condition = body.if_clauses[0].condition + body = body.if_clauses[0].body + self.emit_comprehension(body, target, sequence, condition, parens) + + def visit_GeneratorExpressionNode(self, node): + body = node.loop.body + target = node.loop.target + sequence = node.loop.iterator.sequence + condition = None + if hasattr(body, 'if_clauses'): + # type(body) is Nodes.IfStatNode + condition = body.if_clauses[0].condition + body = body.if_clauses[0].body.expr.arg + elif hasattr(body, 'expr'): + # type(body) is Nodes.ExprStatNode + body = body.expr.arg + self.emit_comprehension(body, target, sequence, condition, u"()") diff --git a/venv/lib/python3.8/site-packages/Cython/Compiler/AnalysedTreeTransforms.py b/venv/lib/python3.8/site-packages/Cython/Compiler/AnalysedTreeTransforms.py new file mode 100644 index 0000000..07bf31f --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Compiler/AnalysedTreeTransforms.py @@ -0,0 +1,99 @@ +from __future__ import absolute_import + +from .Visitor import ScopeTrackingTransform +from .Nodes import StatListNode, SingleAssignmentNode, CFuncDefNode, DefNode +from .ExprNodes import DictNode, DictItemNode, NameNode, UnicodeNode +from .PyrexTypes import py_object_type +from .StringEncoding import EncodedString +from . import Symtab + +class AutoTestDictTransform(ScopeTrackingTransform): + # Handles autotestdict directive + + blacklist = ['__cinit__', '__dealloc__', '__richcmp__', + '__nonzero__', '__bool__', + '__len__', '__contains__'] + + def visit_ModuleNode(self, node): + if node.is_pxd: + return node + self.scope_type = 'module' + self.scope_node = node + + if not self.current_directives['autotestdict']: + return node + self.all_docstrings = self.current_directives['autotestdict.all'] + self.cdef_docstrings = self.all_docstrings or self.current_directives['autotestdict.cdef'] + + assert isinstance(node.body, StatListNode) + + # First see if __test__ is already created + if u'__test__' in node.scope.entries: + # Do nothing + return node + + pos = node.pos + + self.tests = [] + self.testspos = node.pos + + test_dict_entry = node.scope.declare_var(EncodedString(u'__test__'), + py_object_type, + pos, + visibility='public') + create_test_dict_assignment = SingleAssignmentNode(pos, + lhs=NameNode(pos, name=EncodedString(u'__test__'), + entry=test_dict_entry), + rhs=DictNode(pos, key_value_pairs=self.tests)) + self.visitchildren(node) + node.body.stats.append(create_test_dict_assignment) + return node + + def add_test(self, testpos, path, doctest): + pos = self.testspos + keystr = u'%s (line %d)' % (path, testpos[1]) + key = UnicodeNode(pos, value=EncodedString(keystr)) + value = UnicodeNode(pos, value=doctest) + self.tests.append(DictItemNode(pos, key=key, value=value)) + + def visit_ExprNode(self, node): + # expressions cannot contain functions and lambda expressions + # do not have a docstring + return node + + def visit_FuncDefNode(self, node): + if not node.doc or (isinstance(node, DefNode) and node.fused_py_func): + return node + if not self.cdef_docstrings: + if isinstance(node, CFuncDefNode) and not node.py_func: + return node + if not self.all_docstrings and '>>>' not in node.doc: + return node + + pos = self.testspos + if self.scope_type == 'module': + path = node.entry.name + elif self.scope_type in ('pyclass', 'cclass'): + if isinstance(node, CFuncDefNode): + if node.py_func is not None: + name = node.py_func.name + else: + name = node.entry.name + else: + name = node.name + if self.scope_type == 'cclass' and name in self.blacklist: + return node + if self.scope_type == 'pyclass': + class_name = self.scope_node.name + else: + class_name = self.scope_node.class_name + if isinstance(node.entry.scope, Symtab.PropertyScope): + property_method_name = node.entry.scope.name + path = "%s.%s.%s" % (class_name, node.entry.scope.name, + node.entry.name) + else: + path = "%s.%s" % (class_name, node.entry.name) + else: + assert False + self.add_test(node.pos, path, node.doc) + return node diff --git a/venv/lib/python3.8/site-packages/Cython/Compiler/Annotate.py b/venv/lib/python3.8/site-packages/Cython/Compiler/Annotate.py new file mode 100644 index 0000000..5feac02 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Compiler/Annotate.py @@ -0,0 +1,317 @@ +# Note: Work in progress + +from __future__ import absolute_import + +import os +import os.path +import re +import codecs +import textwrap +from datetime import datetime +from functools import partial +from collections import defaultdict +from xml.sax.saxutils import escape as html_escape +try: + from StringIO import StringIO +except ImportError: + from io import StringIO # does not support writing 'str' in Py2 + +from . import Version +from .Code import CCodeWriter +from .. import Utils + + +class AnnotationCCodeWriter(CCodeWriter): + + def __init__(self, create_from=None, buffer=None, copy_formatting=True): + CCodeWriter.__init__(self, create_from, buffer, copy_formatting=copy_formatting) + if create_from is None: + self.annotation_buffer = StringIO() + self.last_annotated_pos = None + # annotations[filename][line] -> [(column, AnnotationItem)*] + self.annotations = defaultdict(partial(defaultdict, list)) + # code[filename][line] -> str + self.code = defaultdict(partial(defaultdict, str)) + # scopes[filename][line] -> set(scopes) + self.scopes = defaultdict(partial(defaultdict, set)) + else: + # When creating an insertion point, keep references to the same database + self.annotation_buffer = create_from.annotation_buffer + self.annotations = create_from.annotations + self.code = create_from.code + self.scopes = create_from.scopes + self.last_annotated_pos = create_from.last_annotated_pos + + def create_new(self, create_from, buffer, copy_formatting): + return AnnotationCCodeWriter(create_from, buffer, copy_formatting) + + def write(self, s): + CCodeWriter.write(self, s) + self.annotation_buffer.write(s) + + def mark_pos(self, pos, trace=True): + if pos is not None: + CCodeWriter.mark_pos(self, pos, trace) + if self.funcstate and self.funcstate.scope: + # lambdas and genexprs can result in multiple scopes per line => keep them in a set + self.scopes[pos[0].filename][pos[1]].add(self.funcstate.scope) + if self.last_annotated_pos: + source_desc, line, _ = self.last_annotated_pos + pos_code = self.code[source_desc.filename] + pos_code[line] += self.annotation_buffer.getvalue() + self.annotation_buffer = StringIO() + self.last_annotated_pos = pos + + def annotate(self, pos, item): + self.annotations[pos[0].filename][pos[1]].append((pos[2], item)) + + def _css(self): + """css template will later allow to choose a colormap""" + css = [self._css_template] + for i in range(255): + color = u"FFFF%02x" % int(255/(1+i/10.0)) + css.append('.cython.score-%d {background-color: #%s;}' % (i, color)) + try: + from pygments.formatters import HtmlFormatter + except ImportError: + pass + else: + css.append(HtmlFormatter().get_style_defs('.cython')) + return '\n'.join(css) + + _css_template = textwrap.dedent(""" + body.cython { font-family: courier; font-size: 12; } + + .cython.tag { } + .cython.line { margin: 0em } + .cython.code { font-size: 9; color: #444444; display: none; margin: 0px 0px 0px 8px; border-left: 8px none; } + + .cython.line .run { background-color: #B0FFB0; } + .cython.line .mis { background-color: #FFB0B0; } + .cython.code.run { border-left: 8px solid #B0FFB0; } + .cython.code.mis { border-left: 8px solid #FFB0B0; } + + .cython.code .py_c_api { color: red; } + .cython.code .py_macro_api { color: #FF7000; } + .cython.code .pyx_c_api { color: #FF3000; } + .cython.code .pyx_macro_api { color: #FF7000; } + .cython.code .refnanny { color: #FFA000; } + .cython.code .trace { color: #FFA000; } + .cython.code .error_goto { color: #FFA000; } + + .cython.code .coerce { color: #008000; border: 1px dotted #008000 } + .cython.code .py_attr { color: #FF0000; font-weight: bold; } + .cython.code .c_attr { color: #0000FF; } + .cython.code .py_call { color: #FF0000; font-weight: bold; } + .cython.code .c_call { color: #0000FF; } + """) + + # on-click toggle function to show/hide C source code + _onclick_attr = ' onclick="{0}"'.format(( + "(function(s){" + " s.display = s.display === 'block' ? 'none' : 'block'" + "})(this.nextElementSibling.style)" + ).replace(' ', '') # poor dev's JS minification + ) + + def save_annotation(self, source_filename, target_filename, coverage_xml=None): + with Utils.open_source_file(source_filename) as f: + code = f.read() + generated_code = self.code.get(source_filename, {}) + c_file = Utils.decode_filename(os.path.basename(target_filename)) + html_filename = os.path.splitext(target_filename)[0] + ".html" + + with codecs.open(html_filename, "w", encoding="UTF-8") as out_buffer: + out_buffer.write(self._save_annotation(code, generated_code, c_file, source_filename, coverage_xml)) + + def _save_annotation_header(self, c_file, source_filename, coverage_timestamp=None): + coverage_info = '' + if coverage_timestamp: + coverage_info = u' with coverage data from {timestamp}'.format( + timestamp=datetime.fromtimestamp(int(coverage_timestamp) // 1000)) + + outlist = [ + textwrap.dedent(u'''\ + + + + + + Cython: {filename} + + + +

Generated by Cython {watermark}{more_info}

+

+ Yellow lines hint at Python interaction.
+ Click on a line that starts with a "+" to see the C code that Cython generated for it. +

+ ''').format(css=self._css(), watermark=Version.watermark, + filename=os.path.basename(source_filename) if source_filename else '', + more_info=coverage_info) + ] + if c_file: + outlist.append(u'

Raw output: %s

\n' % (c_file, c_file)) + return outlist + + def _save_annotation_footer(self): + return (u'\n',) + + def _save_annotation(self, code, generated_code, c_file=None, source_filename=None, coverage_xml=None): + """ + lines : original cython source code split by lines + generated_code : generated c code keyed by line number in original file + target filename : name of the file in which to store the generated html + c_file : filename in which the c_code has been written + """ + if coverage_xml is not None and source_filename: + coverage_timestamp = coverage_xml.get('timestamp', '').strip() + covered_lines = self._get_line_coverage(coverage_xml, source_filename) + else: + coverage_timestamp = covered_lines = None + annotation_items = dict(self.annotations[source_filename]) + scopes = dict(self.scopes[source_filename]) + + outlist = [] + outlist.extend(self._save_annotation_header(c_file, source_filename, coverage_timestamp)) + outlist.extend(self._save_annotation_body(code, generated_code, annotation_items, scopes, covered_lines)) + outlist.extend(self._save_annotation_footer()) + return ''.join(outlist) + + def _get_line_coverage(self, coverage_xml, source_filename): + coverage_data = None + for entry in coverage_xml.iterfind('.//class'): + if not entry.get('filename'): + continue + if (entry.get('filename') == source_filename or + os.path.abspath(entry.get('filename')) == source_filename): + coverage_data = entry + break + elif source_filename.endswith(entry.get('filename')): + coverage_data = entry # but we might still find a better match... + if coverage_data is None: + return None + return dict( + (int(line.get('number')), int(line.get('hits'))) + for line in coverage_data.iterfind('lines/line') + ) + + def _htmlify_code(self, code): + try: + from pygments import highlight + from pygments.lexers import CythonLexer + from pygments.formatters import HtmlFormatter + except ImportError: + # no Pygments, just escape the code + return html_escape(code) + + html_code = highlight( + code, CythonLexer(stripnl=False, stripall=False), + HtmlFormatter(nowrap=True)) + return html_code + + def _save_annotation_body(self, cython_code, generated_code, annotation_items, scopes, covered_lines=None): + outlist = [u'
'] + pos_comment_marker = u'/* \N{HORIZONTAL ELLIPSIS} */\n' + new_calls_map = dict( + (name, 0) for name in + 'refnanny trace py_macro_api py_c_api pyx_macro_api pyx_c_api error_goto'.split() + ).copy + + self.mark_pos(None) + + def annotate(match): + group_name = match.lastgroup + calls[group_name] += 1 + return u"%s" % ( + group_name, match.group(group_name)) + + lines = self._htmlify_code(cython_code).splitlines() + lineno_width = len(str(len(lines))) + if not covered_lines: + covered_lines = None + + for k, line in enumerate(lines, 1): + try: + c_code = generated_code[k] + except KeyError: + c_code = '' + else: + c_code = _replace_pos_comment(pos_comment_marker, c_code) + if c_code.startswith(pos_comment_marker): + c_code = c_code[len(pos_comment_marker):] + c_code = html_escape(c_code) + + calls = new_calls_map() + c_code = _parse_code(annotate, c_code) + score = (5 * calls['py_c_api'] + 2 * calls['pyx_c_api'] + + calls['py_macro_api'] + calls['pyx_macro_api']) + + if c_code: + onclick = self._onclick_attr + expandsymbol = '+' + else: + onclick = '' + expandsymbol = ' ' + + covered = '' + if covered_lines is not None and k in covered_lines: + hits = covered_lines[k] + if hits is not None: + covered = 'run' if hits else 'mis' + + outlist.append( + u'
'
+                # generate line number with expand symbol in front,
+                # and the right  number of digit
+                u'{expandsymbol}{line:0{lineno_width}d}: {code}
\n'.format( + score=score, + expandsymbol=expandsymbol, + covered=covered, + lineno_width=lineno_width, + line=k, + code=line.rstrip(), + onclick=onclick, + )) + if c_code: + outlist.append(u"
{code}
".format( + score=score, covered=covered, code=c_code)) + outlist.append(u"
") + return outlist + + +_parse_code = re.compile(( + br'(?P__Pyx_X?(?:GOT|GIVE)REF|__Pyx_RefNanny[A-Za-z]+)|' + br'(?P__Pyx_Trace[A-Za-z]+)|' + br'(?:' + br'(?P__Pyx_[A-Z][A-Z_]+)|' + br'(?P(?:__Pyx_[A-Z][a-z_][A-Za-z_]*)|__pyx_convert_[A-Za-z_]*)|' + br'(?PPy[A-Z][a-z]+_[A-Z][A-Z_]+)|' + br'(?PPy[A-Z][a-z]+_[A-Z][a-z][A-Za-z_]*)' + br')(?=\()|' # look-ahead to exclude subsequent '(' from replacement + br'(?P(?:(?<=;) *if [^;]* +)?__PYX_ERR\([^)]+\))' +).decode('ascii')).sub + + +_replace_pos_comment = re.compile( + # this matches what Cython generates as code line marker comment + br'^\s*/\*(?:(?:[^*]|\*[^/])*\n)+\s*\*/\s*\n'.decode('ascii'), + re.M +).sub + + +class AnnotationItem(object): + + def __init__(self, style, text, tag="", size=0): + self.style = style + self.text = text + self.tag = tag + self.size = size + + def start(self): + return u"%s" % (self.style, self.text, self.tag) + + def end(self): + return self.size, u"" diff --git a/venv/lib/python3.8/site-packages/Cython/Compiler/AutoDocTransforms.py b/venv/lib/python3.8/site-packages/Cython/Compiler/AutoDocTransforms.py new file mode 100644 index 0000000..d3c0a1d --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Compiler/AutoDocTransforms.py @@ -0,0 +1,214 @@ +from __future__ import absolute_import, print_function + +from .Visitor import CythonTransform +from .StringEncoding import EncodedString +from . import Options +from . import PyrexTypes, ExprNodes +from ..CodeWriter import ExpressionWriter + + +class AnnotationWriter(ExpressionWriter): + + def visit_Node(self, node): + self.put(u"") + + def visit_LambdaNode(self, node): + # XXX Should we do better? + self.put("") + + +class EmbedSignature(CythonTransform): + + def __init__(self, context): + super(EmbedSignature, self).__init__(context) + self.class_name = None + self.class_node = None + + def _fmt_expr(self, node): + writer = AnnotationWriter() + result = writer.write(node) + # print(type(node).__name__, '-->', result) + return result + + def _fmt_arg(self, arg): + if arg.type is PyrexTypes.py_object_type or arg.is_self_arg: + doc = arg.name + else: + doc = arg.type.declaration_code(arg.name, for_display=1) + + if arg.annotation: + annotation = self._fmt_expr(arg.annotation) + doc = doc + (': %s' % annotation) + if arg.default: + default = self._fmt_expr(arg.default) + doc = doc + (' = %s' % default) + elif arg.default: + default = self._fmt_expr(arg.default) + doc = doc + ('=%s' % default) + return doc + + def _fmt_star_arg(self, arg): + arg_doc = arg.name + if arg.annotation: + annotation = self._fmt_expr(arg.annotation) + arg_doc = arg_doc + (': %s' % annotation) + return arg_doc + + def _fmt_arglist(self, args, + npargs=0, pargs=None, + nkargs=0, kargs=None, + hide_self=False): + arglist = [] + for arg in args: + if not hide_self or not arg.entry.is_self_arg: + arg_doc = self._fmt_arg(arg) + arglist.append(arg_doc) + if pargs: + arg_doc = self._fmt_star_arg(pargs) + arglist.insert(npargs, '*%s' % arg_doc) + elif nkargs: + arglist.insert(npargs, '*') + if kargs: + arg_doc = self._fmt_star_arg(kargs) + arglist.append('**%s' % arg_doc) + return arglist + + def _fmt_ret_type(self, ret): + if ret is PyrexTypes.py_object_type: + return None + else: + return ret.declaration_code("", for_display=1) + + def _fmt_signature(self, cls_name, func_name, args, + npargs=0, pargs=None, + nkargs=0, kargs=None, + return_expr=None, + return_type=None, hide_self=False): + arglist = self._fmt_arglist(args, + npargs, pargs, + nkargs, kargs, + hide_self=hide_self) + arglist_doc = ', '.join(arglist) + func_doc = '%s(%s)' % (func_name, arglist_doc) + if cls_name: + func_doc = '%s.%s' % (cls_name, func_doc) + ret_doc = None + if return_expr: + ret_doc = self._fmt_expr(return_expr) + elif return_type: + ret_doc = self._fmt_ret_type(return_type) + if ret_doc: + func_doc = '%s -> %s' % (func_doc, ret_doc) + return func_doc + + def _embed_signature(self, signature, node_doc): + if node_doc: + return "%s\n%s" % (signature, node_doc) + else: + return signature + + def __call__(self, node): + if not Options.docstrings: + return node + else: + return super(EmbedSignature, self).__call__(node) + + def visit_ClassDefNode(self, node): + oldname = self.class_name + oldclass = self.class_node + self.class_node = node + try: + # PyClassDefNode + self.class_name = node.name + except AttributeError: + # CClassDefNode + self.class_name = node.class_name + self.visitchildren(node) + self.class_name = oldname + self.class_node = oldclass + return node + + def visit_LambdaNode(self, node): + # lambda expressions so not have signature or inner functions + return node + + def visit_DefNode(self, node): + if not self.current_directives['embedsignature']: + return node + + is_constructor = False + hide_self = False + if node.entry.is_special: + is_constructor = self.class_node and node.name == '__init__' + if not is_constructor: + return node + class_name, func_name = None, self.class_name + hide_self = True + else: + class_name, func_name = self.class_name, node.name + + nkargs = getattr(node, 'num_kwonly_args', 0) + npargs = len(node.args) - nkargs + signature = self._fmt_signature( + class_name, func_name, node.args, + npargs, node.star_arg, + nkargs, node.starstar_arg, + return_expr=node.return_type_annotation, + return_type=None, hide_self=hide_self) + if signature: + if is_constructor: + doc_holder = self.class_node.entry.type.scope + else: + doc_holder = node.entry + + if doc_holder.doc is not None: + old_doc = doc_holder.doc + elif not is_constructor and getattr(node, 'py_func', None) is not None: + old_doc = node.py_func.entry.doc + else: + old_doc = None + new_doc = self._embed_signature(signature, old_doc) + doc_holder.doc = EncodedString(new_doc) + if not is_constructor and getattr(node, 'py_func', None) is not None: + node.py_func.entry.doc = EncodedString(new_doc) + return node + + def visit_CFuncDefNode(self, node): + if not self.current_directives['embedsignature']: + return node + if not node.overridable: # not cpdef FOO(...): + return node + + signature = self._fmt_signature( + self.class_name, node.declarator.base.name, + node.declarator.args, + return_type=node.return_type) + if signature: + if node.entry.doc is not None: + old_doc = node.entry.doc + elif getattr(node, 'py_func', None) is not None: + old_doc = node.py_func.entry.doc + else: + old_doc = None + new_doc = self._embed_signature(signature, old_doc) + node.entry.doc = EncodedString(new_doc) + if hasattr(node, 'py_func') and node.py_func is not None: + node.py_func.entry.doc = EncodedString(new_doc) + return node + + def visit_PropertyNode(self, node): + if not self.current_directives['embedsignature']: + return node + + entry = node.entry + if entry.visibility == 'public': + # property synthesised from a cdef public attribute + type_name = entry.type.declaration_code("", for_display=1) + if not entry.type.is_pyobject: + type_name = "'%s'" % type_name + elif entry.type.is_extension_type: + type_name = entry.type.module_name + '.' + type_name + signature = '%s: %s' % (entry.name, type_name) + new_doc = self._embed_signature(signature, entry.doc) + entry.doc = EncodedString(new_doc) + return node diff --git a/venv/lib/python3.8/site-packages/Cython/Compiler/Buffer.py b/venv/lib/python3.8/site-packages/Cython/Compiler/Buffer.py new file mode 100644 index 0000000..04385b4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Compiler/Buffer.py @@ -0,0 +1,738 @@ +from __future__ import absolute_import + +from .Visitor import CythonTransform +from .ModuleNode import ModuleNode +from .Errors import CompileError +from .UtilityCode import CythonUtilityCode +from .Code import UtilityCode, TempitaUtilityCode + +from . import Options +from . import Interpreter +from . import PyrexTypes +from . import Naming +from . import Symtab + +def dedent(text, reindent=0): + from textwrap import dedent + text = dedent(text) + if reindent > 0: + indent = " " * reindent + text = '\n'.join([indent + x for x in text.split('\n')]) + return text + +class IntroduceBufferAuxiliaryVars(CythonTransform): + + # + # Entry point + # + + buffers_exists = False + using_memoryview = False + + def __call__(self, node): + assert isinstance(node, ModuleNode) + self.max_ndim = 0 + result = super(IntroduceBufferAuxiliaryVars, self).__call__(node) + if self.buffers_exists: + use_bufstruct_declare_code(node.scope) + use_py2_buffer_functions(node.scope) + + return result + + + # + # Basic operations for transforms + # + def handle_scope(self, node, scope): + # For all buffers, insert extra variables in the scope. + # The variables are also accessible from the buffer_info + # on the buffer entry + scope_items = scope.entries.items() + bufvars = [entry for name, entry in scope_items if entry.type.is_buffer] + if len(bufvars) > 0: + bufvars.sort(key=lambda entry: entry.name) + self.buffers_exists = True + + memviewslicevars = [entry for name, entry in scope_items if entry.type.is_memoryviewslice] + if len(memviewslicevars) > 0: + self.buffers_exists = True + + + for (name, entry) in scope_items: + if name == 'memoryview' and isinstance(entry.utility_code_definition, CythonUtilityCode): + self.using_memoryview = True + break + del scope_items + + if isinstance(node, ModuleNode) and len(bufvars) > 0: + # for now...note that pos is wrong + raise CompileError(node.pos, "Buffer vars not allowed in module scope") + for entry in bufvars: + if entry.type.dtype.is_ptr: + raise CompileError(node.pos, "Buffers with pointer types not yet supported.") + + name = entry.name + buftype = entry.type + if buftype.ndim > Options.buffer_max_dims: + raise CompileError(node.pos, + "Buffer ndims exceeds Options.buffer_max_dims = %d" % Options.buffer_max_dims) + if buftype.ndim > self.max_ndim: + self.max_ndim = buftype.ndim + + # Declare auxiliary vars + def decvar(type, prefix): + cname = scope.mangle(prefix, name) + aux_var = scope.declare_var(name=None, cname=cname, + type=type, pos=node.pos) + if entry.is_arg: + aux_var.used = True # otherwise, NameNode will mark whether it is used + + return aux_var + + auxvars = ((PyrexTypes.c_pyx_buffer_nd_type, Naming.pybuffernd_prefix), + (PyrexTypes.c_pyx_buffer_type, Naming.pybufferstruct_prefix)) + pybuffernd, rcbuffer = [decvar(type, prefix) for (type, prefix) in auxvars] + + entry.buffer_aux = Symtab.BufferAux(pybuffernd, rcbuffer) + + scope.buffer_entries = bufvars + self.scope = scope + + def visit_ModuleNode(self, node): + self.handle_scope(node, node.scope) + self.visitchildren(node) + return node + + def visit_FuncDefNode(self, node): + self.handle_scope(node, node.local_scope) + self.visitchildren(node) + return node + +# +# Analysis +# +buffer_options = ("dtype", "ndim", "mode", "negative_indices", "cast") # ordered! +buffer_defaults = {"ndim": 1, "mode": "full", "negative_indices": True, "cast": False} +buffer_positional_options_count = 1 # anything beyond this needs keyword argument + +ERR_BUF_OPTION_UNKNOWN = '"%s" is not a buffer option' +ERR_BUF_TOO_MANY = 'Too many buffer options' +ERR_BUF_DUP = '"%s" buffer option already supplied' +ERR_BUF_MISSING = '"%s" missing' +ERR_BUF_MODE = 'Only allowed buffer modes are: "c", "fortran", "full", "strided" (as a compile-time string)' +ERR_BUF_NDIM = 'ndim must be a non-negative integer' +ERR_BUF_DTYPE = 'dtype must be "object", numeric type or a struct' +ERR_BUF_BOOL = '"%s" must be a boolean' + +def analyse_buffer_options(globalpos, env, posargs, dictargs, defaults=None, need_complete=True): + """ + Must be called during type analysis, as analyse is called + on the dtype argument. + + posargs and dictargs should consist of a list and a dict + of tuples (value, pos). Defaults should be a dict of values. + + Returns a dict containing all the options a buffer can have and + its value (with the positions stripped). + """ + if defaults is None: + defaults = buffer_defaults + + posargs, dictargs = Interpreter.interpret_compiletime_options( + posargs, dictargs, type_env=env, type_args=(0, 'dtype')) + + if len(posargs) > buffer_positional_options_count: + raise CompileError(posargs[-1][1], ERR_BUF_TOO_MANY) + + options = {} + for name, (value, pos) in dictargs.items(): + if not name in buffer_options: + raise CompileError(pos, ERR_BUF_OPTION_UNKNOWN % name) + options[name] = value + + for name, (value, pos) in zip(buffer_options, posargs): + if not name in buffer_options: + raise CompileError(pos, ERR_BUF_OPTION_UNKNOWN % name) + if name in options: + raise CompileError(pos, ERR_BUF_DUP % name) + options[name] = value + + # Check that they are all there and copy defaults + for name in buffer_options: + if not name in options: + try: + options[name] = defaults[name] + except KeyError: + if need_complete: + raise CompileError(globalpos, ERR_BUF_MISSING % name) + + dtype = options.get("dtype") + if dtype and dtype.is_extension_type: + raise CompileError(globalpos, ERR_BUF_DTYPE) + + ndim = options.get("ndim") + if ndim and (not isinstance(ndim, int) or ndim < 0): + raise CompileError(globalpos, ERR_BUF_NDIM) + + mode = options.get("mode") + if mode and not (mode in ('full', 'strided', 'c', 'fortran')): + raise CompileError(globalpos, ERR_BUF_MODE) + + def assert_bool(name): + x = options.get(name) + if not isinstance(x, bool): + raise CompileError(globalpos, ERR_BUF_BOOL % name) + + assert_bool('negative_indices') + assert_bool('cast') + + return options + + +# +# Code generation +# + +class BufferEntry(object): + def __init__(self, entry): + self.entry = entry + self.type = entry.type + self.cname = entry.buffer_aux.buflocal_nd_var.cname + self.buf_ptr = "%s.rcbuffer->pybuffer.buf" % self.cname + self.buf_ptr_type = entry.type.buffer_ptr_type + self.init_attributes() + + def init_attributes(self): + self.shape = self.get_buf_shapevars() + self.strides = self.get_buf_stridevars() + self.suboffsets = self.get_buf_suboffsetvars() + + def get_buf_suboffsetvars(self): + return self._for_all_ndim("%s.diminfo[%d].suboffsets") + + def get_buf_stridevars(self): + return self._for_all_ndim("%s.diminfo[%d].strides") + + def get_buf_shapevars(self): + return self._for_all_ndim("%s.diminfo[%d].shape") + + def _for_all_ndim(self, s): + return [s % (self.cname, i) for i in range(self.type.ndim)] + + def generate_buffer_lookup_code(self, code, index_cnames): + # Create buffer lookup and return it + # This is done via utility macros/inline functions, which vary + # according to the access mode used. + params = [] + nd = self.type.ndim + mode = self.type.mode + if mode == 'full': + for i, s, o in zip(index_cnames, + self.get_buf_stridevars(), + self.get_buf_suboffsetvars()): + params.append(i) + params.append(s) + params.append(o) + funcname = "__Pyx_BufPtrFull%dd" % nd + funcgen = buf_lookup_full_code + else: + if mode == 'strided': + funcname = "__Pyx_BufPtrStrided%dd" % nd + funcgen = buf_lookup_strided_code + elif mode == 'c': + funcname = "__Pyx_BufPtrCContig%dd" % nd + funcgen = buf_lookup_c_code + elif mode == 'fortran': + funcname = "__Pyx_BufPtrFortranContig%dd" % nd + funcgen = buf_lookup_fortran_code + else: + assert False + for i, s in zip(index_cnames, self.get_buf_stridevars()): + params.append(i) + params.append(s) + + # Make sure the utility code is available + if funcname not in code.globalstate.utility_codes: + code.globalstate.utility_codes.add(funcname) + protocode = code.globalstate['utility_code_proto'] + defcode = code.globalstate['utility_code_def'] + funcgen(protocode, defcode, name=funcname, nd=nd) + + buf_ptr_type_code = self.buf_ptr_type.empty_declaration_code() + ptrcode = "%s(%s, %s, %s)" % (funcname, buf_ptr_type_code, self.buf_ptr, + ", ".join(params)) + return ptrcode + + +def get_flags(buffer_aux, buffer_type): + flags = 'PyBUF_FORMAT' + mode = buffer_type.mode + if mode == 'full': + flags += '| PyBUF_INDIRECT' + elif mode == 'strided': + flags += '| PyBUF_STRIDES' + elif mode == 'c': + flags += '| PyBUF_C_CONTIGUOUS' + elif mode == 'fortran': + flags += '| PyBUF_F_CONTIGUOUS' + else: + assert False + if buffer_aux.writable_needed: flags += "| PyBUF_WRITABLE" + return flags + +def used_buffer_aux_vars(entry): + buffer_aux = entry.buffer_aux + buffer_aux.buflocal_nd_var.used = True + buffer_aux.rcbuf_var.used = True + +def put_unpack_buffer_aux_into_scope(buf_entry, code): + # Generate code to copy the needed struct info into local + # variables. + buffer_aux, mode = buf_entry.buffer_aux, buf_entry.type.mode + pybuffernd_struct = buffer_aux.buflocal_nd_var.cname + + fldnames = ['strides', 'shape'] + if mode == 'full': + fldnames.append('suboffsets') + + ln = [] + for i in range(buf_entry.type.ndim): + for fldname in fldnames: + ln.append("%s.diminfo[%d].%s = %s.rcbuffer->pybuffer.%s[%d];" % \ + (pybuffernd_struct, i, fldname, + pybuffernd_struct, fldname, i)) + code.putln(' '.join(ln)) + +def put_init_vars(entry, code): + bufaux = entry.buffer_aux + pybuffernd_struct = bufaux.buflocal_nd_var.cname + pybuffer_struct = bufaux.rcbuf_var.cname + # init pybuffer_struct + code.putln("%s.pybuffer.buf = NULL;" % pybuffer_struct) + code.putln("%s.refcount = 0;" % pybuffer_struct) + # init the buffer object + # code.put_init_var_to_py_none(entry) + # init the pybuffernd_struct + code.putln("%s.data = NULL;" % pybuffernd_struct) + code.putln("%s.rcbuffer = &%s;" % (pybuffernd_struct, pybuffer_struct)) + + +def put_acquire_arg_buffer(entry, code, pos): + buffer_aux = entry.buffer_aux + getbuffer = get_getbuffer_call(code, entry.cname, buffer_aux, entry.type) + + # Acquire any new buffer + code.putln("{") + code.putln("__Pyx_BufFmt_StackElem __pyx_stack[%d];" % entry.type.dtype.struct_nesting_depth()) + code.putln(code.error_goto_if("%s == -1" % getbuffer, pos)) + code.putln("}") + # An exception raised in arg parsing cannot be caught, so no + # need to care about the buffer then. + put_unpack_buffer_aux_into_scope(entry, code) + + +def put_release_buffer_code(code, entry): + code.globalstate.use_utility_code(acquire_utility_code) + code.putln("__Pyx_SafeReleaseBuffer(&%s.rcbuffer->pybuffer);" % entry.buffer_aux.buflocal_nd_var.cname) + + +def get_getbuffer_call(code, obj_cname, buffer_aux, buffer_type): + ndim = buffer_type.ndim + cast = int(buffer_type.cast) + flags = get_flags(buffer_aux, buffer_type) + pybuffernd_struct = buffer_aux.buflocal_nd_var.cname + + dtype_typeinfo = get_type_information_cname(code, buffer_type.dtype) + + code.globalstate.use_utility_code(acquire_utility_code) + return ("__Pyx_GetBufferAndValidate(&%(pybuffernd_struct)s.rcbuffer->pybuffer, " + "(PyObject*)%(obj_cname)s, &%(dtype_typeinfo)s, %(flags)s, %(ndim)d, " + "%(cast)d, __pyx_stack)" % locals()) + + +def put_assign_to_buffer(lhs_cname, rhs_cname, buf_entry, + is_initialized, pos, code): + """ + Generate code for reassigning a buffer variables. This only deals with getting + the buffer auxiliary structure and variables set up correctly, the assignment + itself and refcounting is the responsibility of the caller. + + However, the assignment operation may throw an exception so that the reassignment + never happens. + + Depending on the circumstances there are two possible outcomes: + - Old buffer released, new acquired, rhs assigned to lhs + - Old buffer released, new acquired which fails, reaqcuire old lhs buffer + (which may or may not succeed). + """ + + buffer_aux, buffer_type = buf_entry.buffer_aux, buf_entry.type + pybuffernd_struct = buffer_aux.buflocal_nd_var.cname + flags = get_flags(buffer_aux, buffer_type) + + code.putln("{") # Set up necessary stack for getbuffer + code.putln("__Pyx_BufFmt_StackElem __pyx_stack[%d];" % buffer_type.dtype.struct_nesting_depth()) + + getbuffer = get_getbuffer_call(code, "%s", buffer_aux, buffer_type) # fill in object below + + if is_initialized: + # Release any existing buffer + code.putln('__Pyx_SafeReleaseBuffer(&%s.rcbuffer->pybuffer);' % pybuffernd_struct) + # Acquire + retcode_cname = code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False) + code.putln("%s = %s;" % (retcode_cname, getbuffer % rhs_cname)) + code.putln('if (%s) {' % (code.unlikely("%s < 0" % retcode_cname))) + # If acquisition failed, attempt to reacquire the old buffer + # before raising the exception. A failure of reacquisition + # will cause the reacquisition exception to be reported, one + # can consider working around this later. + exc_temps = tuple(code.funcstate.allocate_temp(PyrexTypes.py_object_type, manage_ref=False) + for _ in range(3)) + code.putln('PyErr_Fetch(&%s, &%s, &%s);' % exc_temps) + code.putln('if (%s) {' % code.unlikely("%s == -1" % (getbuffer % lhs_cname))) + code.putln('Py_XDECREF(%s); Py_XDECREF(%s); Py_XDECREF(%s);' % exc_temps) # Do not refnanny these! + code.globalstate.use_utility_code(raise_buffer_fallback_code) + code.putln('__Pyx_RaiseBufferFallbackError();') + code.putln('} else {') + code.putln('PyErr_Restore(%s, %s, %s);' % exc_temps) + code.putln('}') + code.putln('%s = %s = %s = 0;' % exc_temps) + for t in exc_temps: + code.funcstate.release_temp(t) + code.putln('}') + # Unpack indices + put_unpack_buffer_aux_into_scope(buf_entry, code) + code.putln(code.error_goto_if_neg(retcode_cname, pos)) + code.funcstate.release_temp(retcode_cname) + else: + # Our entry had no previous value, so set to None when acquisition fails. + # In this case, auxiliary vars should be set up right in initialization to a zero-buffer, + # so it suffices to set the buf field to NULL. + code.putln('if (%s) {' % code.unlikely("%s == -1" % (getbuffer % rhs_cname))) + code.putln('%s = %s; __Pyx_INCREF(Py_None); %s.rcbuffer->pybuffer.buf = NULL;' % + (lhs_cname, + PyrexTypes.typecast(buffer_type, PyrexTypes.py_object_type, "Py_None"), + pybuffernd_struct)) + code.putln(code.error_goto(pos)) + code.put('} else {') + # Unpack indices + put_unpack_buffer_aux_into_scope(buf_entry, code) + code.putln('}') + + code.putln("}") # Release stack + + +def put_buffer_lookup_code(entry, index_signeds, index_cnames, directives, + pos, code, negative_indices, in_nogil_context): + """ + Generates code to process indices and calculate an offset into + a buffer. Returns a C string which gives a pointer which can be + read from or written to at will (it is an expression so caller should + store it in a temporary if it is used more than once). + + As the bounds checking can have any number of combinations of unsigned + arguments, smart optimizations etc. we insert it directly in the function + body. The lookup however is delegated to a inline function that is instantiated + once per ndim (lookup with suboffsets tend to get quite complicated). + + entry is a BufferEntry + """ + negative_indices = directives['wraparound'] and negative_indices + + if directives['boundscheck']: + # Check bounds and fix negative indices. + # We allocate a temporary which is initialized to -1, meaning OK (!). + # If an error occurs, the temp is set to the index dimension the + # error is occurring at. + failed_dim_temp = code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False) + code.putln("%s = -1;" % failed_dim_temp) + for dim, (signed, cname, shape) in enumerate(zip(index_signeds, index_cnames, entry.get_buf_shapevars())): + if signed != 0: + # not unsigned, deal with negative index + code.putln("if (%s < 0) {" % cname) + if negative_indices: + code.putln("%s += %s;" % (cname, shape)) + code.putln("if (%s) %s = %d;" % ( + code.unlikely("%s < 0" % cname), + failed_dim_temp, dim)) + else: + code.putln("%s = %d;" % (failed_dim_temp, dim)) + code.put("} else ") + # check bounds in positive direction + if signed != 0: + cast = "" + else: + cast = "(size_t)" + code.putln("if (%s) %s = %d;" % ( + code.unlikely("%s >= %s%s" % (cname, cast, shape)), + failed_dim_temp, dim)) + + if in_nogil_context: + code.globalstate.use_utility_code(raise_indexerror_nogil) + func = '__Pyx_RaiseBufferIndexErrorNogil' + else: + code.globalstate.use_utility_code(raise_indexerror_code) + func = '__Pyx_RaiseBufferIndexError' + + code.putln("if (%s) {" % code.unlikely("%s != -1" % failed_dim_temp)) + code.putln('%s(%s);' % (func, failed_dim_temp)) + code.putln(code.error_goto(pos)) + code.putln('}') + code.funcstate.release_temp(failed_dim_temp) + elif negative_indices: + # Only fix negative indices. + for signed, cname, shape in zip(index_signeds, index_cnames, entry.get_buf_shapevars()): + if signed != 0: + code.putln("if (%s < 0) %s += %s;" % (cname, cname, shape)) + + return entry.generate_buffer_lookup_code(code, index_cnames) + + +def use_bufstruct_declare_code(env): + env.use_utility_code(buffer_struct_declare_code) + + +def buf_lookup_full_code(proto, defin, name, nd): + """ + Generates a buffer lookup function for the right number + of dimensions. The function gives back a void* at the right location. + """ + # _i_ndex, _s_tride, sub_o_ffset + macroargs = ", ".join(["i%d, s%d, o%d" % (i, i, i) for i in range(nd)]) + proto.putln("#define %s(type, buf, %s) (type)(%s_imp(buf, %s))" % (name, macroargs, name, macroargs)) + + funcargs = ", ".join(["Py_ssize_t i%d, Py_ssize_t s%d, Py_ssize_t o%d" % (i, i, i) for i in range(nd)]) + proto.putln("static CYTHON_INLINE void* %s_imp(void* buf, %s);" % (name, funcargs)) + defin.putln(dedent(""" + static CYTHON_INLINE void* %s_imp(void* buf, %s) { + char* ptr = (char*)buf; + """) % (name, funcargs) + "".join([dedent("""\ + ptr += s%d * i%d; + if (o%d >= 0) ptr = *((char**)ptr) + o%d; + """) % (i, i, i, i) for i in range(nd)] + ) + "\nreturn ptr;\n}") + + +def buf_lookup_strided_code(proto, defin, name, nd): + """ + Generates a buffer lookup function for the right number + of dimensions. The function gives back a void* at the right location. + """ + # _i_ndex, _s_tride + args = ", ".join(["i%d, s%d" % (i, i) for i in range(nd)]) + offset = " + ".join(["i%d * s%d" % (i, i) for i in range(nd)]) + proto.putln("#define %s(type, buf, %s) (type)((char*)buf + %s)" % (name, args, offset)) + + +def buf_lookup_c_code(proto, defin, name, nd): + """ + Similar to strided lookup, but can assume that the last dimension + doesn't need a multiplication as long as. + Still we keep the same signature for now. + """ + if nd == 1: + proto.putln("#define %s(type, buf, i0, s0) ((type)buf + i0)" % name) + else: + args = ", ".join(["i%d, s%d" % (i, i) for i in range(nd)]) + offset = " + ".join(["i%d * s%d" % (i, i) for i in range(nd - 1)]) + proto.putln("#define %s(type, buf, %s) ((type)((char*)buf + %s) + i%d)" % (name, args, offset, nd - 1)) + + +def buf_lookup_fortran_code(proto, defin, name, nd): + """ + Like C lookup, but the first index is optimized instead. + """ + if nd == 1: + proto.putln("#define %s(type, buf, i0, s0) ((type)buf + i0)" % name) + else: + args = ", ".join(["i%d, s%d" % (i, i) for i in range(nd)]) + offset = " + ".join(["i%d * s%d" % (i, i) for i in range(1, nd)]) + proto.putln("#define %s(type, buf, %s) ((type)((char*)buf + %s) + i%d)" % (name, args, offset, 0)) + + +def use_py2_buffer_functions(env): + env.use_utility_code(GetAndReleaseBufferUtilityCode()) + + +class GetAndReleaseBufferUtilityCode(object): + # Emulation of PyObject_GetBuffer and PyBuffer_Release for Python 2. + # For >= 2.6 we do double mode -- use the new buffer interface on objects + # which has the right tp_flags set, but emulation otherwise. + + requires = None + is_cython_utility = False + + def __init__(self): + pass + + def __eq__(self, other): + return isinstance(other, GetAndReleaseBufferUtilityCode) + + def __hash__(self): + return 24342342 + + def get_tree(self, **kwargs): pass + + def put_code(self, output): + code = output['utility_code_def'] + proto_code = output['utility_code_proto'] + env = output.module_node.scope + cython_scope = env.context.cython_scope + + # Search all types for __getbuffer__ overloads + types = [] + visited_scopes = set() + def find_buffer_types(scope): + if scope in visited_scopes: + return + visited_scopes.add(scope) + for m in scope.cimported_modules: + find_buffer_types(m) + for e in scope.type_entries: + if isinstance(e.utility_code_definition, CythonUtilityCode): + continue + t = e.type + if t.is_extension_type: + if scope is cython_scope and not e.used: + continue + release = get = None + for x in t.scope.pyfunc_entries: + if x.name == u"__getbuffer__": get = x.func_cname + elif x.name == u"__releasebuffer__": release = x.func_cname + if get: + types.append((t.typeptr_cname, get, release)) + + find_buffer_types(env) + + util_code = TempitaUtilityCode.load( + "GetAndReleaseBuffer", from_file="Buffer.c", + context=dict(types=types)) + + proto = util_code.format_code(util_code.proto) + impl = util_code.format_code( + util_code.inject_string_constants(util_code.impl, output)[1]) + + proto_code.putln(proto) + code.putln(impl) + + +def mangle_dtype_name(dtype): + # Use prefixes to separate user defined types from builtins + # (consider "typedef float unsigned_int") + if dtype.is_pyobject: + return "object" + elif dtype.is_ptr: + return "ptr" + else: + if dtype.is_typedef or dtype.is_struct_or_union: + prefix = "nn_" + else: + prefix = "" + return prefix + dtype.specialization_name() + +def get_type_information_cname(code, dtype, maxdepth=None): + """ + Output the run-time type information (__Pyx_TypeInfo) for given dtype, + and return the name of the type info struct. + + Structs with two floats of the same size are encoded as complex numbers. + One can separate between complex numbers declared as struct or with native + encoding by inspecting to see if the fields field of the type is + filled in. + """ + namesuffix = mangle_dtype_name(dtype) + name = "__Pyx_TypeInfo_%s" % namesuffix + structinfo_name = "__Pyx_StructFields_%s" % namesuffix + + if dtype.is_error: return "" + + # It's critical that walking the type info doesn't use more stack + # depth than dtype.struct_nesting_depth() returns, so use an assertion for this + if maxdepth is None: maxdepth = dtype.struct_nesting_depth() + if maxdepth <= 0: + assert False + + if name not in code.globalstate.utility_codes: + code.globalstate.utility_codes.add(name) + typecode = code.globalstate['typeinfo'] + + arraysizes = [] + if dtype.is_array: + while dtype.is_array: + arraysizes.append(dtype.size) + dtype = dtype.base_type + + complex_possible = dtype.is_struct_or_union and dtype.can_be_complex() + + declcode = dtype.empty_declaration_code() + if dtype.is_simple_buffer_dtype(): + structinfo_name = "NULL" + elif dtype.is_struct: + fields = dtype.scope.var_entries + # Must pre-call all used types in order not to recurse utility code + # writing. + assert len(fields) > 0 + types = [get_type_information_cname(code, f.type, maxdepth - 1) + for f in fields] + typecode.putln("static __Pyx_StructField %s[] = {" % structinfo_name, safe=True) + for f, typeinfo in zip(fields, types): + typecode.putln(' {&%s, "%s", offsetof(%s, %s)},' % + (typeinfo, f.name, dtype.empty_declaration_code(), f.cname), safe=True) + typecode.putln(' {NULL, NULL, 0}', safe=True) + typecode.putln("};", safe=True) + else: + assert False + + rep = str(dtype) + + flags = "0" + is_unsigned = "0" + if dtype is PyrexTypes.c_char_type: + is_unsigned = "IS_UNSIGNED(%s)" % declcode + typegroup = "'H'" + elif dtype.is_int: + is_unsigned = "IS_UNSIGNED(%s)" % declcode + typegroup = "%s ? 'U' : 'I'" % is_unsigned + elif complex_possible or dtype.is_complex: + typegroup = "'C'" + elif dtype.is_float: + typegroup = "'R'" + elif dtype.is_struct: + typegroup = "'S'" + if dtype.packed: + flags = "__PYX_BUF_FLAGS_PACKED_STRUCT" + elif dtype.is_pyobject: + typegroup = "'O'" + else: + assert False, dtype + + typeinfo = ('static __Pyx_TypeInfo %s = ' + '{ "%s", %s, sizeof(%s), { %s }, %s, %s, %s, %s };') + tup = (name, rep, structinfo_name, declcode, + ', '.join([str(x) for x in arraysizes]) or '0', len(arraysizes), + typegroup, is_unsigned, flags) + typecode.putln(typeinfo % tup, safe=True) + + return name + +def load_buffer_utility(util_code_name, context=None, **kwargs): + if context is None: + return UtilityCode.load(util_code_name, "Buffer.c", **kwargs) + else: + return TempitaUtilityCode.load(util_code_name, "Buffer.c", context=context, **kwargs) + +context = dict(max_dims=Options.buffer_max_dims) +buffer_struct_declare_code = load_buffer_utility("BufferStructDeclare", context=context) +buffer_formats_declare_code = load_buffer_utility("BufferFormatStructs") + +# Utility function to set the right exception +# The caller should immediately goto_error +raise_indexerror_code = load_buffer_utility("BufferIndexError") +raise_indexerror_nogil = load_buffer_utility("BufferIndexErrorNogil") +raise_buffer_fallback_code = load_buffer_utility("BufferFallbackError") + +acquire_utility_code = load_buffer_utility("BufferGetAndValidate", context=context) +buffer_format_check_code = load_buffer_utility("BufferFormatCheck", context=context) + +# See utility code BufferFormatFromTypeInfo +_typeinfo_to_format_code = load_buffer_utility("TypeInfoToFormat") diff --git a/venv/lib/python3.8/site-packages/Cython/Compiler/Builtin.py b/venv/lib/python3.8/site-packages/Cython/Compiler/Builtin.py new file mode 100644 index 0000000..1bdc643 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Compiler/Builtin.py @@ -0,0 +1,449 @@ +# +# Builtin Definitions +# + +from __future__ import absolute_import + +from .Symtab import BuiltinScope, StructOrUnionScope +from .Code import UtilityCode +from .TypeSlots import Signature +from . import PyrexTypes +from . import Options + + +# C-level implementations of builtin types, functions and methods + +iter_next_utility_code = UtilityCode.load("IterNext", "ObjectHandling.c") +getattr_utility_code = UtilityCode.load("GetAttr", "ObjectHandling.c") +getattr3_utility_code = UtilityCode.load("GetAttr3", "Builtins.c") +pyexec_utility_code = UtilityCode.load("PyExec", "Builtins.c") +pyexec_globals_utility_code = UtilityCode.load("PyExecGlobals", "Builtins.c") +globals_utility_code = UtilityCode.load("Globals", "Builtins.c") + +builtin_utility_code = { + 'StopAsyncIteration': UtilityCode.load_cached("StopAsyncIteration", "Coroutine.c"), +} + + +# mapping from builtins to their C-level equivalents + +class _BuiltinOverride(object): + def __init__(self, py_name, args, ret_type, cname, py_equiv="*", + utility_code=None, sig=None, func_type=None, + is_strict_signature=False, builtin_return_type=None): + self.py_name, self.cname, self.py_equiv = py_name, cname, py_equiv + self.args, self.ret_type = args, ret_type + self.func_type, self.sig = func_type, sig + self.builtin_return_type = builtin_return_type + self.is_strict_signature = is_strict_signature + self.utility_code = utility_code + + def build_func_type(self, sig=None, self_arg=None): + if sig is None: + sig = Signature(self.args, self.ret_type) + sig.exception_check = False # not needed for the current builtins + func_type = sig.function_type(self_arg) + if self.is_strict_signature: + func_type.is_strict_signature = True + if self.builtin_return_type: + func_type.return_type = builtin_types[self.builtin_return_type] + return func_type + + +class BuiltinAttribute(object): + def __init__(self, py_name, cname=None, field_type=None, field_type_name=None): + self.py_name = py_name + self.cname = cname or py_name + self.field_type_name = field_type_name # can't do the lookup before the type is declared! + self.field_type = field_type + + def declare_in_type(self, self_type): + if self.field_type_name is not None: + # lazy type lookup + field_type = builtin_scope.lookup(self.field_type_name).type + else: + field_type = self.field_type or PyrexTypes.py_object_type + entry = self_type.scope.declare(self.py_name, self.cname, field_type, None, 'private') + entry.is_variable = True + + +class BuiltinFunction(_BuiltinOverride): + def declare_in_scope(self, scope): + func_type, sig = self.func_type, self.sig + if func_type is None: + func_type = self.build_func_type(sig) + scope.declare_builtin_cfunction(self.py_name, func_type, self.cname, + self.py_equiv, self.utility_code) + + +class BuiltinMethod(_BuiltinOverride): + def declare_in_type(self, self_type): + method_type, sig = self.func_type, self.sig + if method_type is None: + # override 'self' type (first argument) + self_arg = PyrexTypes.CFuncTypeArg("", self_type, None) + self_arg.not_none = True + self_arg.accept_builtin_subtypes = True + method_type = self.build_func_type(sig, self_arg) + self_type.scope.declare_builtin_cfunction( + self.py_name, method_type, self.cname, utility_code=self.utility_code) + + +builtin_function_table = [ + # name, args, return, C API func, py equiv = "*" + BuiltinFunction('abs', "d", "d", "fabs", + is_strict_signature = True), + BuiltinFunction('abs', "f", "f", "fabsf", + is_strict_signature = True), + BuiltinFunction('abs', "i", "i", "abs", + is_strict_signature = True), + BuiltinFunction('abs', "l", "l", "labs", + is_strict_signature = True), + BuiltinFunction('abs', None, None, "__Pyx_abs_longlong", + utility_code = UtilityCode.load("abs_longlong", "Builtins.c"), + func_type = PyrexTypes.CFuncType( + PyrexTypes.c_longlong_type, [ + PyrexTypes.CFuncTypeArg("arg", PyrexTypes.c_longlong_type, None) + ], + is_strict_signature = True, nogil=True)), + ] + list( + BuiltinFunction('abs', None, None, "/*abs_{0}*/".format(t.specialization_name()), + func_type = PyrexTypes.CFuncType( + t, + [PyrexTypes.CFuncTypeArg("arg", t, None)], + is_strict_signature = True, nogil=True)) + for t in (PyrexTypes.c_uint_type, PyrexTypes.c_ulong_type, PyrexTypes.c_ulonglong_type) + ) + list( + BuiltinFunction('abs', None, None, "__Pyx_c_abs{0}".format(t.funcsuffix), + func_type = PyrexTypes.CFuncType( + t.real_type, [ + PyrexTypes.CFuncTypeArg("arg", t, None) + ], + is_strict_signature = True, nogil=True)) + for t in (PyrexTypes.c_float_complex_type, + PyrexTypes.c_double_complex_type, + PyrexTypes.c_longdouble_complex_type) + ) + [ + BuiltinFunction('abs', "O", "O", "__Pyx_PyNumber_Absolute", + utility_code=UtilityCode.load("py_abs", "Builtins.c")), + #('all', "", "", ""), + #('any', "", "", ""), + #('ascii', "", "", ""), + #('bin', "", "", ""), + BuiltinFunction('callable', "O", "b", "__Pyx_PyCallable_Check", + utility_code = UtilityCode.load("CallableCheck", "ObjectHandling.c")), + #('chr', "", "", ""), + #('cmp', "", "", "", ""), # int PyObject_Cmp(PyObject *o1, PyObject *o2, int *result) + #('compile', "", "", ""), # PyObject* Py_CompileString( char *str, char *filename, int start) + BuiltinFunction('delattr', "OO", "r", "PyObject_DelAttr"), + BuiltinFunction('dir', "O", "O", "PyObject_Dir"), + BuiltinFunction('divmod', "OO", "O", "PyNumber_Divmod"), + BuiltinFunction('exec', "O", "O", "__Pyx_PyExecGlobals", + utility_code = pyexec_globals_utility_code), + BuiltinFunction('exec', "OO", "O", "__Pyx_PyExec2", + utility_code = pyexec_utility_code), + BuiltinFunction('exec', "OOO", "O", "__Pyx_PyExec3", + utility_code = pyexec_utility_code), + #('eval', "", "", ""), + #('execfile', "", "", ""), + #('filter', "", "", ""), + BuiltinFunction('getattr3', "OOO", "O", "__Pyx_GetAttr3", "getattr", + utility_code=getattr3_utility_code), # Pyrex legacy + BuiltinFunction('getattr', "OOO", "O", "__Pyx_GetAttr3", + utility_code=getattr3_utility_code), + BuiltinFunction('getattr', "OO", "O", "__Pyx_GetAttr", + utility_code=getattr_utility_code), + BuiltinFunction('hasattr', "OO", "b", "__Pyx_HasAttr", + utility_code = UtilityCode.load("HasAttr", "Builtins.c")), + BuiltinFunction('hash', "O", "h", "PyObject_Hash"), + #('hex', "", "", ""), + #('id', "", "", ""), + #('input', "", "", ""), + BuiltinFunction('intern', "O", "O", "__Pyx_Intern", + utility_code = UtilityCode.load("Intern", "Builtins.c")), + BuiltinFunction('isinstance', "OO", "b", "PyObject_IsInstance"), + BuiltinFunction('issubclass', "OO", "b", "PyObject_IsSubclass"), + BuiltinFunction('iter', "OO", "O", "PyCallIter_New"), + BuiltinFunction('iter', "O", "O", "PyObject_GetIter"), + BuiltinFunction('len', "O", "z", "PyObject_Length"), + BuiltinFunction('locals', "", "O", "__pyx_locals"), + #('map', "", "", ""), + #('max', "", "", ""), + #('min', "", "", ""), + BuiltinFunction('next', "O", "O", "__Pyx_PyIter_Next", + utility_code = iter_next_utility_code), # not available in Py2 => implemented here + BuiltinFunction('next', "OO", "O", "__Pyx_PyIter_Next2", + utility_code = iter_next_utility_code), # not available in Py2 => implemented here + #('oct', "", "", ""), + #('open', "ss", "O", "PyFile_FromString"), # not in Py3 +] + [ + BuiltinFunction('ord', None, None, "__Pyx_long_cast", + func_type=PyrexTypes.CFuncType( + PyrexTypes.c_long_type, [PyrexTypes.CFuncTypeArg("c", c_type, None)], + is_strict_signature=True)) + for c_type in [PyrexTypes.c_py_ucs4_type, PyrexTypes.c_py_unicode_type] +] + [ + BuiltinFunction('ord', None, None, "__Pyx_uchar_cast", + func_type=PyrexTypes.CFuncType( + PyrexTypes.c_uchar_type, [PyrexTypes.CFuncTypeArg("c", c_type, None)], + is_strict_signature=True)) + for c_type in [PyrexTypes.c_char_type, PyrexTypes.c_schar_type, PyrexTypes.c_uchar_type] +] + [ + BuiltinFunction('ord', None, None, "__Pyx_PyObject_Ord", + utility_code=UtilityCode.load_cached("object_ord", "Builtins.c"), + func_type=PyrexTypes.CFuncType( + PyrexTypes.c_long_type, [ + PyrexTypes.CFuncTypeArg("c", PyrexTypes.py_object_type, None) + ], + exception_value="(long)(Py_UCS4)-1")), + BuiltinFunction('pow', "OOO", "O", "PyNumber_Power"), + BuiltinFunction('pow', "OO", "O", "__Pyx_PyNumber_Power2", + utility_code = UtilityCode.load("pow2", "Builtins.c")), + #('range', "", "", ""), + #('raw_input', "", "", ""), + #('reduce', "", "", ""), + BuiltinFunction('reload', "O", "O", "PyImport_ReloadModule"), + BuiltinFunction('repr', "O", "O", "PyObject_Repr", builtin_return_type='str'), + #('round', "", "", ""), + BuiltinFunction('setattr', "OOO", "r", "PyObject_SetAttr"), + #('sum', "", "", ""), + #('sorted', "", "", ""), + #('type', "O", "O", "PyObject_Type"), + #('unichr', "", "", ""), + #('unicode', "", "", ""), + #('vars', "", "", ""), + #('zip', "", "", ""), + # Can't do these easily until we have builtin type entries. + #('typecheck', "OO", "i", "PyObject_TypeCheck", False), + #('issubtype', "OO", "i", "PyType_IsSubtype", False), + + # Put in namespace append optimization. + BuiltinFunction('__Pyx_PyObject_Append', "OO", "O", "__Pyx_PyObject_Append"), + + # This is conditionally looked up based on a compiler directive. + BuiltinFunction('__Pyx_Globals', "", "O", "__Pyx_Globals", + utility_code=globals_utility_code), +] + + +# Builtin types +# bool +# buffer +# classmethod +# dict +# enumerate +# file +# float +# int +# list +# long +# object +# property +# slice +# staticmethod +# super +# str +# tuple +# type +# xrange + +builtin_types_table = [ + + ("type", "PyType_Type", []), + +# This conflicts with the C++ bool type, and unfortunately +# C++ is too liberal about PyObject* <-> bool conversions, +# resulting in unintuitive runtime behavior and segfaults. +# ("bool", "PyBool_Type", []), + + ("int", "PyInt_Type", []), + ("long", "PyLong_Type", []), + ("float", "PyFloat_Type", []), + + ("complex", "PyComplex_Type", [BuiltinAttribute('cval', field_type_name = 'Py_complex'), + BuiltinAttribute('real', 'cval.real', field_type = PyrexTypes.c_double_type), + BuiltinAttribute('imag', 'cval.imag', field_type = PyrexTypes.c_double_type), + ]), + + ("basestring", "PyBaseString_Type", [ + BuiltinMethod("join", "TO", "T", "__Pyx_PyBaseString_Join", + utility_code=UtilityCode.load("StringJoin", "StringTools.c")), + ]), + ("bytearray", "PyByteArray_Type", [ + ]), + ("bytes", "PyBytes_Type", [BuiltinMethod("__contains__", "TO", "b", "PySequence_Contains"), + BuiltinMethod("join", "TO", "O", "__Pyx_PyBytes_Join", + utility_code=UtilityCode.load("StringJoin", "StringTools.c")), + ]), + ("str", "PyString_Type", [BuiltinMethod("__contains__", "TO", "b", "PySequence_Contains"), + BuiltinMethod("join", "TO", "O", "__Pyx_PyString_Join", + builtin_return_type='basestring', + utility_code=UtilityCode.load("StringJoin", "StringTools.c")), + ]), + ("unicode", "PyUnicode_Type", [BuiltinMethod("__contains__", "TO", "b", "PyUnicode_Contains"), + BuiltinMethod("join", "TO", "T", "PyUnicode_Join"), + ]), + + ("tuple", "PyTuple_Type", [BuiltinMethod("__contains__", "TO", "b", "PySequence_Contains"), + ]), + + ("list", "PyList_Type", [BuiltinMethod("__contains__", "TO", "b", "PySequence_Contains"), + BuiltinMethod("insert", "TzO", "r", "PyList_Insert"), + BuiltinMethod("reverse", "T", "r", "PyList_Reverse"), + BuiltinMethod("append", "TO", "r", "__Pyx_PyList_Append", + utility_code=UtilityCode.load("ListAppend", "Optimize.c")), + BuiltinMethod("extend", "TO", "r", "__Pyx_PyList_Extend", + utility_code=UtilityCode.load("ListExtend", "Optimize.c")), + ]), + + ("dict", "PyDict_Type", [BuiltinMethod("__contains__", "TO", "b", "PyDict_Contains"), + BuiltinMethod("has_key", "TO", "b", "PyDict_Contains"), + BuiltinMethod("items", "T", "O", "__Pyx_PyDict_Items", + utility_code=UtilityCode.load("py_dict_items", "Builtins.c")), + BuiltinMethod("keys", "T", "O", "__Pyx_PyDict_Keys", + utility_code=UtilityCode.load("py_dict_keys", "Builtins.c")), + BuiltinMethod("values", "T", "O", "__Pyx_PyDict_Values", + utility_code=UtilityCode.load("py_dict_values", "Builtins.c")), + BuiltinMethod("iteritems", "T", "O", "__Pyx_PyDict_IterItems", + utility_code=UtilityCode.load("py_dict_iteritems", "Builtins.c")), + BuiltinMethod("iterkeys", "T", "O", "__Pyx_PyDict_IterKeys", + utility_code=UtilityCode.load("py_dict_iterkeys", "Builtins.c")), + BuiltinMethod("itervalues", "T", "O", "__Pyx_PyDict_IterValues", + utility_code=UtilityCode.load("py_dict_itervalues", "Builtins.c")), + BuiltinMethod("viewitems", "T", "O", "__Pyx_PyDict_ViewItems", + utility_code=UtilityCode.load("py_dict_viewitems", "Builtins.c")), + BuiltinMethod("viewkeys", "T", "O", "__Pyx_PyDict_ViewKeys", + utility_code=UtilityCode.load("py_dict_viewkeys", "Builtins.c")), + BuiltinMethod("viewvalues", "T", "O", "__Pyx_PyDict_ViewValues", + utility_code=UtilityCode.load("py_dict_viewvalues", "Builtins.c")), + BuiltinMethod("clear", "T", "r", "__Pyx_PyDict_Clear", + utility_code=UtilityCode.load("py_dict_clear", "Optimize.c")), + BuiltinMethod("copy", "T", "T", "PyDict_Copy")]), + + ("slice", "PySlice_Type", [BuiltinAttribute('start'), + BuiltinAttribute('stop'), + BuiltinAttribute('step'), + ]), +# ("file", "PyFile_Type", []), # not in Py3 + + ("set", "PySet_Type", [BuiltinMethod("__contains__", "TO", "b", "PySequence_Contains"), + BuiltinMethod("clear", "T", "r", "PySet_Clear"), + # discard() and remove() have a special treatment for unhashable values + BuiltinMethod("discard", "TO", "r", "__Pyx_PySet_Discard", + utility_code=UtilityCode.load("py_set_discard", "Optimize.c")), + BuiltinMethod("remove", "TO", "r", "__Pyx_PySet_Remove", + utility_code=UtilityCode.load("py_set_remove", "Optimize.c")), + # update is actually variadic (see Github issue #1645) +# BuiltinMethod("update", "TO", "r", "__Pyx_PySet_Update", +# utility_code=UtilityCode.load_cached("PySet_Update", "Builtins.c")), + BuiltinMethod("add", "TO", "r", "PySet_Add"), + BuiltinMethod("pop", "T", "O", "PySet_Pop")]), + ("frozenset", "PyFrozenSet_Type", []), + ("Exception", "((PyTypeObject*)PyExc_Exception)[0]", []), + ("StopAsyncIteration", "((PyTypeObject*)__Pyx_PyExc_StopAsyncIteration)[0]", []), +] + + +types_that_construct_their_instance = set([ + # some builtin types do not always return an instance of + # themselves - these do: + 'type', 'bool', 'long', 'float', 'complex', + 'bytes', 'unicode', 'bytearray', + 'tuple', 'list', 'dict', 'set', 'frozenset' + # 'str', # only in Py3.x + # 'file', # only in Py2.x +]) + + +builtin_structs_table = [ + ('Py_buffer', 'Py_buffer', + [("buf", PyrexTypes.c_void_ptr_type), + ("obj", PyrexTypes.py_object_type), + ("len", PyrexTypes.c_py_ssize_t_type), + ("itemsize", PyrexTypes.c_py_ssize_t_type), + ("readonly", PyrexTypes.c_bint_type), + ("ndim", PyrexTypes.c_int_type), + ("format", PyrexTypes.c_char_ptr_type), + ("shape", PyrexTypes.c_py_ssize_t_ptr_type), + ("strides", PyrexTypes.c_py_ssize_t_ptr_type), + ("suboffsets", PyrexTypes.c_py_ssize_t_ptr_type), + ("smalltable", PyrexTypes.CArrayType(PyrexTypes.c_py_ssize_t_type, 2)), + ("internal", PyrexTypes.c_void_ptr_type), + ]), + ('Py_complex', 'Py_complex', + [('real', PyrexTypes.c_double_type), + ('imag', PyrexTypes.c_double_type), + ]) +] + +# set up builtin scope + +builtin_scope = BuiltinScope() + +def init_builtin_funcs(): + for bf in builtin_function_table: + bf.declare_in_scope(builtin_scope) + +builtin_types = {} + +def init_builtin_types(): + global builtin_types + for name, cname, methods in builtin_types_table: + utility = builtin_utility_code.get(name) + if name == 'frozenset': + objstruct_cname = 'PySetObject' + elif name == 'bytearray': + objstruct_cname = 'PyByteArrayObject' + elif name == 'bool': + objstruct_cname = None + elif name == 'Exception': + objstruct_cname = "PyBaseExceptionObject" + elif name == 'StopAsyncIteration': + objstruct_cname = "PyBaseExceptionObject" + else: + objstruct_cname = 'Py%sObject' % name.capitalize() + the_type = builtin_scope.declare_builtin_type(name, cname, utility, objstruct_cname) + builtin_types[name] = the_type + for method in methods: + method.declare_in_type(the_type) + +def init_builtin_structs(): + for name, cname, attribute_types in builtin_structs_table: + scope = StructOrUnionScope(name) + for attribute_name, attribute_type in attribute_types: + scope.declare_var(attribute_name, attribute_type, None, + attribute_name, allow_pyobject=True) + builtin_scope.declare_struct_or_union( + name, "struct", scope, 1, None, cname = cname) + + +def init_builtins(): + init_builtin_structs() + init_builtin_types() + init_builtin_funcs() + + builtin_scope.declare_var( + '__debug__', PyrexTypes.c_const_type(PyrexTypes.c_bint_type), + pos=None, cname='(!Py_OptimizeFlag)', is_cdef=True) + + global list_type, tuple_type, dict_type, set_type, frozenset_type + global bytes_type, str_type, unicode_type, basestring_type, slice_type + global float_type, bool_type, type_type, complex_type, bytearray_type + type_type = builtin_scope.lookup('type').type + list_type = builtin_scope.lookup('list').type + tuple_type = builtin_scope.lookup('tuple').type + dict_type = builtin_scope.lookup('dict').type + set_type = builtin_scope.lookup('set').type + frozenset_type = builtin_scope.lookup('frozenset').type + slice_type = builtin_scope.lookup('slice').type + bytes_type = builtin_scope.lookup('bytes').type + str_type = builtin_scope.lookup('str').type + unicode_type = builtin_scope.lookup('unicode').type + basestring_type = builtin_scope.lookup('basestring').type + bytearray_type = builtin_scope.lookup('bytearray').type + float_type = builtin_scope.lookup('float').type + bool_type = builtin_scope.lookup('bool').type + complex_type = builtin_scope.lookup('complex').type + + +init_builtins() diff --git a/venv/lib/python3.8/site-packages/Cython/Compiler/CmdLine.py b/venv/lib/python3.8/site-packages/Cython/Compiler/CmdLine.py new file mode 100644 index 0000000..e89e45a --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Compiler/CmdLine.py @@ -0,0 +1,224 @@ +# +# Cython - Command Line Parsing +# + +from __future__ import absolute_import + +import os +import sys +from . import Options + +usage = """\ +Cython (http://cython.org) is a compiler for code written in the +Cython language. Cython is based on Pyrex by Greg Ewing. + +Usage: cython [options] sourcefile.{pyx,py} ... + +Options: + -V, --version Display version number of cython compiler + -l, --create-listing Write error messages to a listing file + -I, --include-dir Search for include files in named directory + (multiple include directories are allowed). + -o, --output-file Specify name of generated C file + -t, --timestamps Only compile newer source files + -f, --force Compile all source files (overrides implied -t) + -v, --verbose Be verbose, print file names on multiple compilation + -p, --embed-positions If specified, the positions in Cython files of each + function definition is embedded in its docstring. + --cleanup Release interned objects on python exit, for memory debugging. + Level indicates aggressiveness, default 0 releases nothing. + -w, --working Sets the working directory for Cython (the directory modules + are searched from) + --gdb Output debug information for cygdb + --gdb-outdir Specify gdb debug information output directory. Implies --gdb. + + -D, --no-docstrings Strip docstrings from the compiled module. + -a, --annotate Produce a colorized HTML version of the source. + --annotate-coverage Annotate and include coverage information from cov.xml. + --line-directives Produce #line directives pointing to the .pyx source + --cplus Output a C++ rather than C file. + --embed[=] Generate a main() function that embeds the Python interpreter. + -2 Compile based on Python-2 syntax and code semantics. + -3 Compile based on Python-3 syntax and code semantics. + --3str Compile based on Python-3 syntax and code semantics without + assuming unicode by default for string literals under Python 2. + --lenient Change some compile time errors to runtime errors to + improve Python compatibility + --capi-reexport-cincludes Add cincluded headers to any auto-generated header files. + --fast-fail Abort the compilation on the first error + --warning-errors, -Werror Make all warnings into errors + --warning-extra, -Wextra Enable extra warnings + -X, --directive =[, 1: + sys.stderr.write( + "cython: Only one source file allowed when using -o\n") + sys.exit(1) + if len(sources) == 0 and not options.show_version: + bad_usage() + if Options.embed and len(sources) > 1: + sys.stderr.write( + "cython: Only one source file allowed when using -embed\n") + sys.exit(1) + return options, sources + diff --git a/venv/lib/python3.8/site-packages/Cython/Compiler/Code.pxd b/venv/lib/python3.8/site-packages/Cython/Compiler/Code.pxd new file mode 100644 index 0000000..01f7a71 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Compiler/Code.pxd @@ -0,0 +1,123 @@ + +from __future__ import absolute_import + +cimport cython +from ..StringIOTree cimport StringIOTree + + +cdef class UtilityCodeBase(object): + cpdef format_code(self, code_string, replace_empty_lines=*) + + +cdef class UtilityCode(UtilityCodeBase): + cdef public object name + cdef public object proto + cdef public object impl + cdef public object init + cdef public object cleanup + cdef public object proto_block + cdef public object requires + cdef public dict _cache + cdef public list specialize_list + cdef public object file + + cpdef none_or_sub(self, s, context) + + +cdef class FunctionState: + cdef public set names_taken + cdef public object owner + cdef public object scope + + cdef public object error_label + cdef public size_t label_counter + cdef public set labels_used + cdef public object return_label + cdef public object continue_label + cdef public object break_label + cdef public list yield_labels + + cdef public object return_from_error_cleanup_label # not used in __init__ ? + + cdef public object exc_vars + cdef public object current_except + cdef public bint in_try_finally + cdef public bint can_trace + cdef public bint gil_owned + + cdef public list temps_allocated + cdef public dict temps_free + cdef public dict temps_used_type + cdef public size_t temp_counter + cdef public list collect_temps_stack + + cdef public object closure_temps + cdef public bint should_declare_error_indicator + cdef public bint uses_error_indicator + + @cython.locals(n=size_t) + cpdef new_label(self, name=*) + cpdef tuple get_loop_labels(self) + cpdef set_loop_labels(self, labels) + cpdef tuple get_all_labels(self) + cpdef set_all_labels(self, labels) + cpdef start_collecting_temps(self) + cpdef stop_collecting_temps(self) + + cpdef list temps_in_use(self) + +cdef class IntConst: + cdef public object cname + cdef public object value + cdef public bint is_long + +cdef class PyObjectConst: + cdef public object cname + cdef public object type + +cdef class StringConst: + cdef public object cname + cdef public object text + cdef public object escaped_value + cdef public dict py_strings + cdef public list py_versions + + @cython.locals(intern=bint, is_str=bint, is_unicode=bint) + cpdef get_py_string_const(self, encoding, identifier=*, is_str=*, py3str_cstring=*) + +## cdef class PyStringConst: +## cdef public object cname +## cdef public object encoding +## cdef public bint is_str +## cdef public bint is_unicode +## cdef public bint intern + +#class GlobalState(object): + +#def funccontext_property(name): + +cdef class CCodeWriter(object): + cdef readonly StringIOTree buffer + cdef readonly list pyclass_stack + cdef readonly object globalstate + cdef readonly object funcstate + cdef object code_config + cdef object last_pos + cdef object last_marked_pos + cdef Py_ssize_t level + cdef public Py_ssize_t call_level # debug-only, see Nodes.py + cdef bint bol + + cpdef write(self, s) + cpdef put(self, code) + cpdef put_safe(self, code) + cpdef putln(self, code=*, bint safe=*) + @cython.final + cdef increase_indent(self) + @cython.final + cdef decrease_indent(self) + + +cdef class PyrexCodeWriter: + cdef public object f + cdef public Py_ssize_t level diff --git a/venv/lib/python3.8/site-packages/Cython/Compiler/Code.py b/venv/lib/python3.8/site-packages/Cython/Compiler/Code.py new file mode 100644 index 0000000..b9c689c --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Compiler/Code.py @@ -0,0 +1,2575 @@ +# cython: language_level = 2 +# cython: auto_pickle=False +# +# Code output module +# + +from __future__ import absolute_import + +import cython +cython.declare(os=object, re=object, operator=object, textwrap=object, + Template=object, Naming=object, Options=object, StringEncoding=object, + Utils=object, SourceDescriptor=object, StringIOTree=object, + DebugFlags=object, basestring=object, defaultdict=object, + closing=object, partial=object) + +import os +import re +import shutil +import sys +import operator +import textwrap +from string import Template +from functools import partial +from contextlib import closing +from collections import defaultdict + +try: + import hashlib +except ImportError: + import md5 as hashlib + +from . import Naming +from . import Options +from . import DebugFlags +from . import StringEncoding +from . import Version +from .. import Utils +from .Scanning import SourceDescriptor +from ..StringIOTree import StringIOTree + +try: + from __builtin__ import basestring +except ImportError: + from builtins import str as basestring + +KEYWORDS_MUST_BE_BYTES = sys.version_info < (2, 7) + + +non_portable_builtins_map = { + # builtins that have different names in different Python versions + 'bytes' : ('PY_MAJOR_VERSION < 3', 'str'), + 'unicode' : ('PY_MAJOR_VERSION >= 3', 'str'), + 'basestring' : ('PY_MAJOR_VERSION >= 3', 'str'), + 'xrange' : ('PY_MAJOR_VERSION >= 3', 'range'), + 'raw_input' : ('PY_MAJOR_VERSION >= 3', 'input'), +} + +ctypedef_builtins_map = { + # types of builtins in "ctypedef class" statements which we don't + # import either because the names conflict with C types or because + # the type simply is not exposed. + 'py_int' : '&PyInt_Type', + 'py_long' : '&PyLong_Type', + 'py_float' : '&PyFloat_Type', + 'wrapper_descriptor' : '&PyWrapperDescr_Type', +} + +basicsize_builtins_map = { + # builtins whose type has a different tp_basicsize than sizeof(...) + 'PyTypeObject': 'PyHeapTypeObject', +} + +uncachable_builtins = [ + # Global/builtin names that cannot be cached because they may or may not + # be available at import time, for various reasons: + ## - Py3.7+ + 'breakpoint', # might deserve an implementation in Cython + ## - Py3.4+ + '__loader__', + '__spec__', + ## - Py3+ + 'BlockingIOError', + 'BrokenPipeError', + 'ChildProcessError', + 'ConnectionAbortedError', + 'ConnectionError', + 'ConnectionRefusedError', + 'ConnectionResetError', + 'FileExistsError', + 'FileNotFoundError', + 'InterruptedError', + 'IsADirectoryError', + 'ModuleNotFoundError', + 'NotADirectoryError', + 'PermissionError', + 'ProcessLookupError', + 'RecursionError', + 'ResourceWarning', + #'StopAsyncIteration', # backported + 'TimeoutError', + '__build_class__', + 'ascii', # might deserve an implementation in Cython + #'exec', # implemented in Cython + ## - Py2.7+ + 'memoryview', + ## - platform specific + 'WindowsError', + ## - others + '_', # e.g. used by gettext +] + +special_py_methods = set([ + '__cinit__', '__dealloc__', '__richcmp__', '__next__', + '__await__', '__aiter__', '__anext__', + '__getreadbuffer__', '__getwritebuffer__', '__getsegcount__', + '__getcharbuffer__', '__getbuffer__', '__releasebuffer__' +]) + +modifier_output_mapper = { + 'inline': 'CYTHON_INLINE' +}.get + + +class IncludeCode(object): + """ + An include file and/or verbatim C code to be included in the + generated sources. + """ + # attributes: + # + # pieces {order: unicode}: pieces of C code to be generated. + # For the included file, the key "order" is zero. + # For verbatim include code, the "order" is the "order" + # attribute of the original IncludeCode where this piece + # of C code was first added. This is needed to prevent + # duplication if the same include code is found through + # multiple cimports. + # location int: where to put this include in the C sources, one + # of the constants INITIAL, EARLY, LATE + # order int: sorting order (automatically set by increasing counter) + + # Constants for location. If the same include occurs with different + # locations, the earliest one takes precedense. + INITIAL = 0 + EARLY = 1 + LATE = 2 + + counter = 1 # Counter for "order" + + def __init__(self, include=None, verbatim=None, late=True, initial=False): + self.order = self.counter + type(self).counter += 1 + self.pieces = {} + + if include: + if include[0] == '<' and include[-1] == '>': + self.pieces[0] = u'#include {0}'.format(include) + late = False # system include is never late + else: + self.pieces[0] = u'#include "{0}"'.format(include) + + if verbatim: + self.pieces[self.order] = verbatim + + if initial: + self.location = self.INITIAL + elif late: + self.location = self.LATE + else: + self.location = self.EARLY + + def dict_update(self, d, key): + """ + Insert `self` in dict `d` with key `key`. If that key already + exists, update the attributes of the existing value with `self`. + """ + if key in d: + other = d[key] + other.location = min(self.location, other.location) + other.pieces.update(self.pieces) + else: + d[key] = self + + def sortkey(self): + return self.order + + def mainpiece(self): + """ + Return the main piece of C code, corresponding to the include + file. If there was no include file, return None. + """ + return self.pieces.get(0) + + def write(self, code): + # Write values of self.pieces dict, sorted by the keys + for k in sorted(self.pieces): + code.putln(self.pieces[k]) + + +def get_utility_dir(): + # make this a function and not global variables: + # http://trac.cython.org/cython_trac/ticket/475 + Cython_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + return os.path.join(Cython_dir, "Utility") + + +class UtilityCodeBase(object): + """ + Support for loading utility code from a file. + + Code sections in the file can be specified as follows: + + ##### MyUtility.proto ##### + + [proto declarations] + + ##### MyUtility.init ##### + + [code run at module initialization] + + ##### MyUtility ##### + #@requires: MyOtherUtility + #@substitute: naming + + [definitions] + + for prototypes and implementation respectively. For non-python or + -cython files backslashes should be used instead. 5 to 30 comment + characters may be used on either side. + + If the @cname decorator is not used and this is a CythonUtilityCode, + one should pass in the 'name' keyword argument to be used for name + mangling of such entries. + """ + + is_cython_utility = False + _utility_cache = {} + + @classmethod + def _add_utility(cls, utility, type, lines, begin_lineno, tags=None): + if utility is None: + return + + code = '\n'.join(lines) + if tags and 'substitute' in tags and tags['substitute'] == set(['naming']): + del tags['substitute'] + try: + code = Template(code).substitute(vars(Naming)) + except (KeyError, ValueError) as e: + raise RuntimeError("Error parsing templated utility code of type '%s' at line %d: %s" % ( + type, begin_lineno, e)) + + # remember correct line numbers at least until after templating + code = '\n' * begin_lineno + code + + if type == 'proto': + utility[0] = code + elif type == 'impl': + utility[1] = code + else: + all_tags = utility[2] + if KEYWORDS_MUST_BE_BYTES: + type = type.encode('ASCII') + all_tags[type] = code + + if tags: + all_tags = utility[2] + for name, values in tags.items(): + if KEYWORDS_MUST_BE_BYTES: + name = name.encode('ASCII') + all_tags.setdefault(name, set()).update(values) + + @classmethod + def load_utilities_from_file(cls, path): + utilities = cls._utility_cache.get(path) + if utilities: + return utilities + + filename = os.path.join(get_utility_dir(), path) + _, ext = os.path.splitext(path) + if ext in ('.pyx', '.py', '.pxd', '.pxi'): + comment = '#' + strip_comments = partial(re.compile(r'^\s*#(?!\s*cython\s*:).*').sub, '') + rstrip = StringEncoding._unicode.rstrip + else: + comment = '/' + strip_comments = partial(re.compile(r'^\s*//.*|/\*[^*]*\*/').sub, '') + rstrip = partial(re.compile(r'\s+(\\?)$').sub, r'\1') + match_special = re.compile( + (r'^%(C)s{5,30}\s*(?P(?:\w|\.)+)\s*%(C)s{5,30}|' + r'^%(C)s+@(?P\w+)\s*:\s*(?P(?:\w|[.:])+)') % + {'C': comment}).match + match_type = re.compile(r'(.+)[.](proto(?:[.]\S+)?|impl|init|cleanup)$').match + + with closing(Utils.open_source_file(filename, encoding='UTF-8')) as f: + all_lines = f.readlines() + + utilities = defaultdict(lambda: [None, None, {}]) + lines = [] + tags = defaultdict(set) + utility = type = None + begin_lineno = 0 + + for lineno, line in enumerate(all_lines): + m = match_special(line) + if m: + if m.group('name'): + cls._add_utility(utility, type, lines, begin_lineno, tags) + + begin_lineno = lineno + 1 + del lines[:] + tags.clear() + + name = m.group('name') + mtype = match_type(name) + if mtype: + name, type = mtype.groups() + else: + type = 'impl' + utility = utilities[name] + else: + tags[m.group('tag')].add(m.group('value')) + lines.append('') # keep line number correct + else: + lines.append(rstrip(strip_comments(line))) + + if utility is None: + raise ValueError("Empty utility code file") + + # Don't forget to add the last utility code + cls._add_utility(utility, type, lines, begin_lineno, tags) + + utilities = dict(utilities) # un-defaultdict-ify + cls._utility_cache[path] = utilities + return utilities + + @classmethod + def load(cls, util_code_name, from_file=None, **kwargs): + """ + Load utility code from a file specified by from_file (relative to + Cython/Utility) and name util_code_name. If from_file is not given, + load it from the file util_code_name.*. There should be only one + file matched by this pattern. + """ + if '::' in util_code_name: + from_file, util_code_name = util_code_name.rsplit('::', 1) + if not from_file: + utility_dir = get_utility_dir() + prefix = util_code_name + '.' + try: + listing = os.listdir(utility_dir) + except OSError: + # XXX the code below assumes as 'zipimport.zipimporter' instance + # XXX should be easy to generalize, but too lazy right now to write it + import zipfile + global __loader__ + loader = __loader__ + archive = loader.archive + with closing(zipfile.ZipFile(archive)) as fileobj: + listing = [os.path.basename(name) + for name in fileobj.namelist() + if os.path.join(archive, name).startswith(utility_dir)] + files = [filename for filename in listing + if filename.startswith(prefix)] + if not files: + raise ValueError("No match found for utility code " + util_code_name) + if len(files) > 1: + raise ValueError("More than one filename match found for utility code " + util_code_name) + from_file = files[0] + + utilities = cls.load_utilities_from_file(from_file) + proto, impl, tags = utilities[util_code_name] + + if tags: + orig_kwargs = kwargs.copy() + for name, values in tags.items(): + if name in kwargs: + continue + # only pass lists when we have to: most argument expect one value or None + if name == 'requires': + if orig_kwargs: + values = [cls.load(dep, from_file, **orig_kwargs) + for dep in sorted(values)] + else: + # dependencies are rarely unique, so use load_cached() when we can + values = [cls.load_cached(dep, from_file) + for dep in sorted(values)] + elif not values: + values = None + elif len(values) == 1: + values = list(values)[0] + kwargs[name] = values + + if proto is not None: + kwargs['proto'] = proto + if impl is not None: + kwargs['impl'] = impl + + if 'name' not in kwargs: + kwargs['name'] = util_code_name + + if 'file' not in kwargs and from_file: + kwargs['file'] = from_file + return cls(**kwargs) + + @classmethod + def load_cached(cls, utility_code_name, from_file=None, __cache={}): + """ + Calls .load(), but using a per-type cache based on utility name and file name. + """ + key = (cls, from_file, utility_code_name) + try: + return __cache[key] + except KeyError: + pass + code = __cache[key] = cls.load(utility_code_name, from_file) + return code + + @classmethod + def load_as_string(cls, util_code_name, from_file=None, **kwargs): + """ + Load a utility code as a string. Returns (proto, implementation) + """ + util = cls.load(util_code_name, from_file, **kwargs) + proto, impl = util.proto, util.impl + return util.format_code(proto), util.format_code(impl) + + def format_code(self, code_string, replace_empty_lines=re.compile(r'\n\n+').sub): + """ + Format a code section for output. + """ + if code_string: + code_string = replace_empty_lines('\n', code_string.strip()) + '\n\n' + return code_string + + def __str__(self): + return "<%s(%s)>" % (type(self).__name__, self.name) + + def get_tree(self, **kwargs): + pass + + def __deepcopy__(self, memodict=None): + # No need to deep-copy utility code since it's essentially immutable. + return self + + +class UtilityCode(UtilityCodeBase): + """ + Stores utility code to add during code generation. + + See GlobalState.put_utility_code. + + hashes/equals by instance + + proto C prototypes + impl implementation code + init code to call on module initialization + requires utility code dependencies + proto_block the place in the resulting file where the prototype should + end up + name name of the utility code (or None) + file filename of the utility code file this utility was loaded + from (or None) + """ + + def __init__(self, proto=None, impl=None, init=None, cleanup=None, requires=None, + proto_block='utility_code_proto', name=None, file=None): + # proto_block: Which code block to dump prototype in. See GlobalState. + self.proto = proto + self.impl = impl + self.init = init + self.cleanup = cleanup + self.requires = requires + self._cache = {} + self.specialize_list = [] + self.proto_block = proto_block + self.name = name + self.file = file + + def __hash__(self): + return hash((self.proto, self.impl)) + + def __eq__(self, other): + if self is other: + return True + self_type, other_type = type(self), type(other) + if self_type is not other_type and not (isinstance(other, self_type) or isinstance(self, other_type)): + return False + + self_proto = getattr(self, 'proto', None) + other_proto = getattr(other, 'proto', None) + return (self_proto, self.impl) == (other_proto, other.impl) + + def none_or_sub(self, s, context): + """ + Format a string in this utility code with context. If None, do nothing. + """ + if s is None: + return None + return s % context + + def specialize(self, pyrex_type=None, **data): + # Dicts aren't hashable... + if pyrex_type is not None: + data['type'] = pyrex_type.empty_declaration_code() + data['type_name'] = pyrex_type.specialization_name() + key = tuple(sorted(data.items())) + try: + return self._cache[key] + except KeyError: + if self.requires is None: + requires = None + else: + requires = [r.specialize(data) for r in self.requires] + + s = self._cache[key] = UtilityCode( + self.none_or_sub(self.proto, data), + self.none_or_sub(self.impl, data), + self.none_or_sub(self.init, data), + self.none_or_sub(self.cleanup, data), + requires, + self.proto_block) + + self.specialize_list.append(s) + return s + + def inject_string_constants(self, impl, output): + """Replace 'PYIDENT("xyz")' by a constant Python identifier cname. + """ + if 'PYIDENT(' not in impl and 'PYUNICODE(' not in impl: + return False, impl + + replacements = {} + def externalise(matchobj): + key = matchobj.groups() + try: + cname = replacements[key] + except KeyError: + str_type, name = key + cname = replacements[key] = output.get_py_string_const( + StringEncoding.EncodedString(name), identifier=str_type == 'IDENT').cname + return cname + + impl = re.sub(r'PY(IDENT|UNICODE)\("([^"]+)"\)', externalise, impl) + assert 'PYIDENT(' not in impl and 'PYUNICODE(' not in impl + return bool(replacements), impl + + def inject_unbound_methods(self, impl, output): + """Replace 'UNBOUND_METHOD(type, "name")' by a constant Python identifier cname. + """ + if 'CALL_UNBOUND_METHOD(' not in impl: + return False, impl + + utility_code = set() + def externalise(matchobj): + type_cname, method_name, obj_cname, args = matchobj.groups() + args = [arg.strip() for arg in args[1:].split(',')] if args else [] + assert len(args) < 3, "CALL_UNBOUND_METHOD() does not support %d call arguments" % len(args) + return output.cached_unbound_method_call_code(obj_cname, type_cname, method_name, args) + + impl = re.sub( + r'CALL_UNBOUND_METHOD\(' + r'([a-zA-Z_]+),' # type cname + r'\s*"([^"]+)",' # method name + r'\s*([^),]+)' # object cname + r'((?:,\s*[^),]+)*)' # args* + r'\)', externalise, impl) + assert 'CALL_UNBOUND_METHOD(' not in impl + + for helper in sorted(utility_code): + output.use_utility_code(UtilityCode.load_cached(helper, "ObjectHandling.c")) + return bool(utility_code), impl + + def wrap_c_strings(self, impl): + """Replace CSTRING('''xyz''') by a C compatible string + """ + if 'CSTRING(' not in impl: + return impl + + def split_string(matchobj): + content = matchobj.group(1).replace('"', '\042') + return ''.join( + '"%s\\n"\n' % line if not line.endswith('\\') or line.endswith('\\\\') else '"%s"\n' % line[:-1] + for line in content.splitlines()) + + impl = re.sub(r'CSTRING\(\s*"""([^"]*(?:"[^"]+)*)"""\s*\)', split_string, impl) + assert 'CSTRING(' not in impl + return impl + + def put_code(self, output): + if self.requires: + for dependency in self.requires: + output.use_utility_code(dependency) + if self.proto: + writer = output[self.proto_block] + writer.putln("/* %s.proto */" % self.name) + writer.put_or_include( + self.format_code(self.proto), '%s_proto' % self.name) + if self.impl: + impl = self.format_code(self.wrap_c_strings(self.impl)) + is_specialised1, impl = self.inject_string_constants(impl, output) + is_specialised2, impl = self.inject_unbound_methods(impl, output) + writer = output['utility_code_def'] + writer.putln("/* %s */" % self.name) + if not (is_specialised1 or is_specialised2): + # no module specific adaptations => can be reused + writer.put_or_include(impl, '%s_impl' % self.name) + else: + writer.put(impl) + if self.init: + writer = output['init_globals'] + writer.putln("/* %s.init */" % self.name) + if isinstance(self.init, basestring): + writer.put(self.format_code(self.init)) + else: + self.init(writer, output.module_pos) + writer.putln(writer.error_goto_if_PyErr(output.module_pos)) + writer.putln() + if self.cleanup and Options.generate_cleanup_code: + writer = output['cleanup_globals'] + writer.putln("/* %s.cleanup */" % self.name) + if isinstance(self.cleanup, basestring): + writer.put_or_include( + self.format_code(self.cleanup), + '%s_cleanup' % self.name) + else: + self.cleanup(writer, output.module_pos) + + +def sub_tempita(s, context, file=None, name=None): + "Run tempita on string s with given context." + if not s: + return None + + if file: + context['__name'] = "%s:%s" % (file, name) + elif name: + context['__name'] = name + + from ..Tempita import sub + return sub(s, **context) + + +class TempitaUtilityCode(UtilityCode): + def __init__(self, name=None, proto=None, impl=None, init=None, file=None, context=None, **kwargs): + if context is None: + context = {} + proto = sub_tempita(proto, context, file, name) + impl = sub_tempita(impl, context, file, name) + init = sub_tempita(init, context, file, name) + super(TempitaUtilityCode, self).__init__( + proto, impl, init=init, name=name, file=file, **kwargs) + + @classmethod + def load_cached(cls, utility_code_name, from_file=None, context=None, __cache={}): + context_key = tuple(sorted(context.items())) if context else None + assert hash(context_key) is not None # raise TypeError if not hashable + key = (cls, from_file, utility_code_name, context_key) + try: + return __cache[key] + except KeyError: + pass + code = __cache[key] = cls.load(utility_code_name, from_file, context=context) + return code + + def none_or_sub(self, s, context): + """ + Format a string in this utility code with context. If None, do nothing. + """ + if s is None: + return None + return sub_tempita(s, context, self.file, self.name) + + +class LazyUtilityCode(UtilityCodeBase): + """ + Utility code that calls a callback with the root code writer when + available. Useful when you only have 'env' but not 'code'. + """ + __name__ = '' + requires = None + + def __init__(self, callback): + self.callback = callback + + def put_code(self, globalstate): + utility = self.callback(globalstate.rootwriter) + globalstate.use_utility_code(utility) + + +class FunctionState(object): + # return_label string function return point label + # error_label string error catch point label + # continue_label string loop continue point label + # break_label string loop break point label + # return_from_error_cleanup_label string + # label_counter integer counter for naming labels + # in_try_finally boolean inside try of try...finally + # exc_vars (string * 3) exception variables for reraise, or None + # can_trace boolean line tracing is supported in the current context + # scope Scope the scope object of the current function + + # Not used for now, perhaps later + def __init__(self, owner, names_taken=set(), scope=None): + self.names_taken = names_taken + self.owner = owner + self.scope = scope + + self.error_label = None + self.label_counter = 0 + self.labels_used = set() + self.return_label = self.new_label() + self.new_error_label() + self.continue_label = None + self.break_label = None + self.yield_labels = [] + + self.in_try_finally = 0 + self.exc_vars = None + self.current_except = None + self.can_trace = False + self.gil_owned = True + + self.temps_allocated = [] # of (name, type, manage_ref, static) + self.temps_free = {} # (type, manage_ref) -> list of free vars with same type/managed status + self.temps_used_type = {} # name -> (type, manage_ref) + self.temp_counter = 0 + self.closure_temps = None + + # This is used to collect temporaries, useful to find out which temps + # need to be privatized in parallel sections + self.collect_temps_stack = [] + + # This is used for the error indicator, which needs to be local to the + # function. It used to be global, which relies on the GIL being held. + # However, exceptions may need to be propagated through 'nogil' + # sections, in which case we introduce a race condition. + self.should_declare_error_indicator = False + self.uses_error_indicator = False + + # labels + + def new_label(self, name=None): + n = self.label_counter + self.label_counter = n + 1 + label = "%s%d" % (Naming.label_prefix, n) + if name is not None: + label += '_' + name + return label + + def new_yield_label(self, expr_type='yield'): + label = self.new_label('resume_from_%s' % expr_type) + num_and_label = (len(self.yield_labels) + 1, label) + self.yield_labels.append(num_and_label) + return num_and_label + + def new_error_label(self): + old_err_lbl = self.error_label + self.error_label = self.new_label('error') + return old_err_lbl + + def get_loop_labels(self): + return ( + self.continue_label, + self.break_label) + + def set_loop_labels(self, labels): + (self.continue_label, + self.break_label) = labels + + def new_loop_labels(self): + old_labels = self.get_loop_labels() + self.set_loop_labels( + (self.new_label("continue"), + self.new_label("break"))) + return old_labels + + def get_all_labels(self): + return ( + self.continue_label, + self.break_label, + self.return_label, + self.error_label) + + def set_all_labels(self, labels): + (self.continue_label, + self.break_label, + self.return_label, + self.error_label) = labels + + def all_new_labels(self): + old_labels = self.get_all_labels() + new_labels = [] + for old_label, name in zip(old_labels, ['continue', 'break', 'return', 'error']): + if old_label: + new_labels.append(self.new_label(name)) + else: + new_labels.append(old_label) + self.set_all_labels(new_labels) + return old_labels + + def use_label(self, lbl): + self.labels_used.add(lbl) + + def label_used(self, lbl): + return lbl in self.labels_used + + # temp handling + + def allocate_temp(self, type, manage_ref, static=False): + """ + Allocates a temporary (which may create a new one or get a previously + allocated and released one of the same type). Type is simply registered + and handed back, but will usually be a PyrexType. + + If type.is_pyobject, manage_ref comes into play. If manage_ref is set to + True, the temp will be decref-ed on return statements and in exception + handling clauses. Otherwise the caller has to deal with any reference + counting of the variable. + + If not type.is_pyobject, then manage_ref will be ignored, but it + still has to be passed. It is recommended to pass False by convention + if it is known that type will never be a Python object. + + static=True marks the temporary declaration with "static". + This is only used when allocating backing store for a module-level + C array literals. + + A C string referring to the variable is returned. + """ + if type.is_const and not type.is_reference: + type = type.const_base_type + elif type.is_reference and not type.is_fake_reference: + type = type.ref_base_type + elif type.is_cfunction: + from . import PyrexTypes + type = PyrexTypes.c_ptr_type(type) # A function itself isn't an l-value + if not type.is_pyobject and not type.is_memoryviewslice: + # Make manage_ref canonical, so that manage_ref will always mean + # a decref is needed. + manage_ref = False + + freelist = self.temps_free.get((type, manage_ref)) + if freelist is not None and freelist[0]: + result = freelist[0].pop() + freelist[1].remove(result) + else: + while True: + self.temp_counter += 1 + result = "%s%d" % (Naming.codewriter_temp_prefix, self.temp_counter) + if result not in self.names_taken: break + self.temps_allocated.append((result, type, manage_ref, static)) + self.temps_used_type[result] = (type, manage_ref) + if DebugFlags.debug_temp_code_comments: + self.owner.putln("/* %s allocated (%s) */" % (result, type)) + + if self.collect_temps_stack: + self.collect_temps_stack[-1].add((result, type)) + + return result + + def release_temp(self, name): + """ + Releases a temporary so that it can be reused by other code needing + a temp of the same type. + """ + type, manage_ref = self.temps_used_type[name] + freelist = self.temps_free.get((type, manage_ref)) + if freelist is None: + freelist = ([], set()) # keep order in list and make lookups in set fast + self.temps_free[(type, manage_ref)] = freelist + if name in freelist[1]: + raise RuntimeError("Temp %s freed twice!" % name) + freelist[0].append(name) + freelist[1].add(name) + if DebugFlags.debug_temp_code_comments: + self.owner.putln("/* %s released */" % name) + + def temps_in_use(self): + """Return a list of (cname,type,manage_ref) tuples of temp names and their type + that are currently in use. + """ + used = [] + for name, type, manage_ref, static in self.temps_allocated: + freelist = self.temps_free.get((type, manage_ref)) + if freelist is None or name not in freelist[1]: + used.append((name, type, manage_ref and type.is_pyobject)) + return used + + def temps_holding_reference(self): + """Return a list of (cname,type) tuples of temp names and their type + that are currently in use. This includes only temps of a + Python object type which owns its reference. + """ + return [(name, type) + for name, type, manage_ref in self.temps_in_use() + if manage_ref and type.is_pyobject] + + def all_managed_temps(self): + """Return a list of (cname, type) tuples of refcount-managed Python objects. + """ + return [(cname, type) + for cname, type, manage_ref, static in self.temps_allocated + if manage_ref] + + def all_free_managed_temps(self): + """Return a list of (cname, type) tuples of refcount-managed Python + objects that are not currently in use. This is used by + try-except and try-finally blocks to clean up temps in the + error case. + """ + return sorted([ # Enforce deterministic order. + (cname, type) + for (type, manage_ref), freelist in self.temps_free.items() if manage_ref + for cname in freelist[0] + ]) + + def start_collecting_temps(self): + """ + Useful to find out which temps were used in a code block + """ + self.collect_temps_stack.append(set()) + + def stop_collecting_temps(self): + return self.collect_temps_stack.pop() + + def init_closure_temps(self, scope): + self.closure_temps = ClosureTempAllocator(scope) + + +class NumConst(object): + """Global info about a Python number constant held by GlobalState. + + cname string + value string + py_type string int, long, float + value_code string evaluation code if different from value + """ + + def __init__(self, cname, value, py_type, value_code=None): + self.cname = cname + self.value = value + self.py_type = py_type + self.value_code = value_code or value + + +class PyObjectConst(object): + """Global info about a generic constant held by GlobalState. + """ + # cname string + # type PyrexType + + def __init__(self, cname, type): + self.cname = cname + self.type = type + + +cython.declare(possible_unicode_identifier=object, possible_bytes_identifier=object, + replace_identifier=object, find_alphanums=object) +possible_unicode_identifier = re.compile(br"(?![0-9])\w+$".decode('ascii'), re.U).match +possible_bytes_identifier = re.compile(r"(?![0-9])\w+$".encode('ASCII')).match +replace_identifier = re.compile(r'[^a-zA-Z0-9_]+').sub +find_alphanums = re.compile('([a-zA-Z0-9]+)').findall + +class StringConst(object): + """Global info about a C string constant held by GlobalState. + """ + # cname string + # text EncodedString or BytesLiteral + # py_strings {(identifier, encoding) : PyStringConst} + + def __init__(self, cname, text, byte_string): + self.cname = cname + self.text = text + self.escaped_value = StringEncoding.escape_byte_string(byte_string) + self.py_strings = None + self.py_versions = [] + + def add_py_version(self, version): + if not version: + self.py_versions = [2, 3] + elif version not in self.py_versions: + self.py_versions.append(version) + + def get_py_string_const(self, encoding, identifier=None, + is_str=False, py3str_cstring=None): + py_strings = self.py_strings + text = self.text + + is_str = bool(identifier or is_str) + is_unicode = encoding is None and not is_str + + if encoding is None: + # unicode string + encoding_key = None + else: + # bytes or str + encoding = encoding.lower() + if encoding in ('utf8', 'utf-8', 'ascii', 'usascii', 'us-ascii'): + encoding = None + encoding_key = None + else: + encoding_key = ''.join(find_alphanums(encoding)) + + key = (is_str, is_unicode, encoding_key, py3str_cstring) + if py_strings is not None: + try: + return py_strings[key] + except KeyError: + pass + else: + self.py_strings = {} + + if identifier: + intern = True + elif identifier is None: + if isinstance(text, bytes): + intern = bool(possible_bytes_identifier(text)) + else: + intern = bool(possible_unicode_identifier(text)) + else: + intern = False + if intern: + prefix = Naming.interned_prefixes['str'] + else: + prefix = Naming.py_const_prefix + + if encoding_key: + encoding_prefix = '_%s' % encoding_key + else: + encoding_prefix = '' + + pystring_cname = "%s%s%s_%s" % ( + prefix, + (is_str and 's') or (is_unicode and 'u') or 'b', + encoding_prefix, + self.cname[len(Naming.const_prefix):]) + + py_string = PyStringConst( + pystring_cname, encoding, is_unicode, is_str, py3str_cstring, intern) + self.py_strings[key] = py_string + return py_string + +class PyStringConst(object): + """Global info about a Python string constant held by GlobalState. + """ + # cname string + # py3str_cstring string + # encoding string + # intern boolean + # is_unicode boolean + # is_str boolean + + def __init__(self, cname, encoding, is_unicode, is_str=False, + py3str_cstring=None, intern=False): + self.cname = cname + self.py3str_cstring = py3str_cstring + self.encoding = encoding + self.is_str = is_str + self.is_unicode = is_unicode + self.intern = intern + + def __lt__(self, other): + return self.cname < other.cname + + +class GlobalState(object): + # filename_table {string : int} for finding filename table indexes + # filename_list [string] filenames in filename table order + # input_file_contents dict contents (=list of lines) of any file that was used as input + # to create this output C code. This is + # used to annotate the comments. + # + # utility_codes set IDs of used utility code (to avoid reinsertion) + # + # declared_cnames {string:Entry} used in a transition phase to merge pxd-declared + # constants etc. into the pyx-declared ones (i.e, + # check if constants are already added). + # In time, hopefully the literals etc. will be + # supplied directly instead. + # + # const_cnames_used dict global counter for unique constant identifiers + # + + # parts {string:CCodeWriter} + + + # interned_strings + # consts + # interned_nums + + # directives set Temporary variable used to track + # the current set of directives in the code generation + # process. + + directives = {} + + code_layout = [ + 'h_code', + 'filename_table', + 'utility_code_proto_before_types', + 'numeric_typedefs', # Let these detailed individual parts stay!, + 'complex_type_declarations', # as the proper solution is to make a full DAG... + 'type_declarations', # More coarse-grained blocks would simply hide + 'utility_code_proto', # the ugliness, not fix it + 'module_declarations', + 'typeinfo', + 'before_global_var', + 'global_var', + 'string_decls', + 'decls', + 'late_includes', + 'all_the_rest', + 'pystring_table', + 'cached_builtins', + 'cached_constants', + 'init_globals', + 'init_module', + 'cleanup_globals', + 'cleanup_module', + 'main_method', + 'utility_code_def', + 'end' + ] + + + def __init__(self, writer, module_node, code_config, common_utility_include_dir=None): + self.filename_table = {} + self.filename_list = [] + self.input_file_contents = {} + self.utility_codes = set() + self.declared_cnames = {} + self.in_utility_code_generation = False + self.code_config = code_config + self.common_utility_include_dir = common_utility_include_dir + self.parts = {} + self.module_node = module_node # because some utility code generation needs it + # (generating backwards-compatible Get/ReleaseBuffer + + self.const_cnames_used = {} + self.string_const_index = {} + self.dedup_const_index = {} + self.pyunicode_ptr_const_index = {} + self.num_const_index = {} + self.py_constants = [] + self.cached_cmethods = {} + self.initialised_constants = set() + + writer.set_global_state(self) + self.rootwriter = writer + + def initialize_main_c_code(self): + rootwriter = self.rootwriter + for part in self.code_layout: + self.parts[part] = rootwriter.insertion_point() + + if not Options.cache_builtins: + del self.parts['cached_builtins'] + else: + w = self.parts['cached_builtins'] + w.enter_cfunc_scope() + w.putln("static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) {") + + w = self.parts['cached_constants'] + w.enter_cfunc_scope() + w.putln("") + w.putln("static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) {") + w.put_declare_refcount_context() + w.put_setup_refcount_context("__Pyx_InitCachedConstants") + + w = self.parts['init_globals'] + w.enter_cfunc_scope() + w.putln("") + w.putln("static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) {") + + if not Options.generate_cleanup_code: + del self.parts['cleanup_globals'] + else: + w = self.parts['cleanup_globals'] + w.enter_cfunc_scope() + w.putln("") + w.putln("static CYTHON_SMALL_CODE void __Pyx_CleanupGlobals(void) {") + + code = self.parts['utility_code_proto'] + code.putln("") + code.putln("/* --- Runtime support code (head) --- */") + + code = self.parts['utility_code_def'] + if self.code_config.emit_linenums: + code.write('\n#line 1 "cython_utility"\n') + code.putln("") + code.putln("/* --- Runtime support code --- */") + + def finalize_main_c_code(self): + self.close_global_decls() + + # + # utility_code_def + # + code = self.parts['utility_code_def'] + util = TempitaUtilityCode.load_cached("TypeConversions", "TypeConversion.c") + code.put(util.format_code(util.impl)) + code.putln("") + + def __getitem__(self, key): + return self.parts[key] + + # + # Global constants, interned objects, etc. + # + def close_global_decls(self): + # This is called when it is known that no more global declarations will + # declared. + self.generate_const_declarations() + if Options.cache_builtins: + w = self.parts['cached_builtins'] + w.putln("return 0;") + if w.label_used(w.error_label): + w.put_label(w.error_label) + w.putln("return -1;") + w.putln("}") + w.exit_cfunc_scope() + + w = self.parts['cached_constants'] + w.put_finish_refcount_context() + w.putln("return 0;") + if w.label_used(w.error_label): + w.put_label(w.error_label) + w.put_finish_refcount_context() + w.putln("return -1;") + w.putln("}") + w.exit_cfunc_scope() + + w = self.parts['init_globals'] + w.putln("return 0;") + if w.label_used(w.error_label): + w.put_label(w.error_label) + w.putln("return -1;") + w.putln("}") + w.exit_cfunc_scope() + + if Options.generate_cleanup_code: + w = self.parts['cleanup_globals'] + w.putln("}") + w.exit_cfunc_scope() + + if Options.generate_cleanup_code: + w = self.parts['cleanup_module'] + w.putln("}") + w.exit_cfunc_scope() + + def put_pyobject_decl(self, entry): + self['global_var'].putln("static PyObject *%s;" % entry.cname) + + # constant handling at code generation time + + def get_cached_constants_writer(self, target=None): + if target is not None: + if target in self.initialised_constants: + # Return None on second/later calls to prevent duplicate creation code. + return None + self.initialised_constants.add(target) + return self.parts['cached_constants'] + + def get_int_const(self, str_value, longness=False): + py_type = longness and 'long' or 'int' + try: + c = self.num_const_index[(str_value, py_type)] + except KeyError: + c = self.new_num_const(str_value, py_type) + return c + + def get_float_const(self, str_value, value_code): + try: + c = self.num_const_index[(str_value, 'float')] + except KeyError: + c = self.new_num_const(str_value, 'float', value_code) + return c + + def get_py_const(self, type, prefix='', cleanup_level=None, dedup_key=None): + if dedup_key is not None: + const = self.dedup_const_index.get(dedup_key) + if const is not None: + return const + # create a new Python object constant + const = self.new_py_const(type, prefix) + if cleanup_level is not None \ + and cleanup_level <= Options.generate_cleanup_code: + cleanup_writer = self.parts['cleanup_globals'] + cleanup_writer.putln('Py_CLEAR(%s);' % const.cname) + if dedup_key is not None: + self.dedup_const_index[dedup_key] = const + return const + + def get_string_const(self, text, py_version=None): + # return a C string constant, creating a new one if necessary + if text.is_unicode: + byte_string = text.utf8encode() + else: + byte_string = text.byteencode() + try: + c = self.string_const_index[byte_string] + except KeyError: + c = self.new_string_const(text, byte_string) + c.add_py_version(py_version) + return c + + def get_pyunicode_ptr_const(self, text): + # return a Py_UNICODE[] constant, creating a new one if necessary + assert text.is_unicode + try: + c = self.pyunicode_ptr_const_index[text] + except KeyError: + c = self.pyunicode_ptr_const_index[text] = self.new_const_cname() + return c + + def get_py_string_const(self, text, identifier=None, + is_str=False, unicode_value=None): + # return a Python string constant, creating a new one if necessary + py3str_cstring = None + if is_str and unicode_value is not None \ + and unicode_value.utf8encode() != text.byteencode(): + py3str_cstring = self.get_string_const(unicode_value, py_version=3) + c_string = self.get_string_const(text, py_version=2) + else: + c_string = self.get_string_const(text) + py_string = c_string.get_py_string_const( + text.encoding, identifier, is_str, py3str_cstring) + return py_string + + def get_interned_identifier(self, text): + return self.get_py_string_const(text, identifier=True) + + def new_string_const(self, text, byte_string): + cname = self.new_string_const_cname(byte_string) + c = StringConst(cname, text, byte_string) + self.string_const_index[byte_string] = c + return c + + def new_num_const(self, value, py_type, value_code=None): + cname = self.new_num_const_cname(value, py_type) + c = NumConst(cname, value, py_type, value_code) + self.num_const_index[(value, py_type)] = c + return c + + def new_py_const(self, type, prefix=''): + cname = self.new_const_cname(prefix) + c = PyObjectConst(cname, type) + self.py_constants.append(c) + return c + + def new_string_const_cname(self, bytes_value): + # Create a new globally-unique nice name for a C string constant. + value = bytes_value.decode('ASCII', 'ignore') + return self.new_const_cname(value=value) + + def new_num_const_cname(self, value, py_type): + if py_type == 'long': + value += 'L' + py_type = 'int' + prefix = Naming.interned_prefixes[py_type] + cname = "%s%s" % (prefix, value) + cname = cname.replace('+', '_').replace('-', 'neg_').replace('.', '_') + return cname + + def new_const_cname(self, prefix='', value=''): + value = replace_identifier('_', value)[:32].strip('_') + used = self.const_cnames_used + name_suffix = value + while name_suffix in used: + counter = used[value] = used[value] + 1 + name_suffix = '%s_%d' % (value, counter) + used[name_suffix] = 1 + if prefix: + prefix = Naming.interned_prefixes[prefix] + else: + prefix = Naming.const_prefix + return "%s%s" % (prefix, name_suffix) + + def get_cached_unbound_method(self, type_cname, method_name): + key = (type_cname, method_name) + try: + cname = self.cached_cmethods[key] + except KeyError: + cname = self.cached_cmethods[key] = self.new_const_cname( + 'umethod', '%s_%s' % (type_cname, method_name)) + return cname + + def cached_unbound_method_call_code(self, obj_cname, type_cname, method_name, arg_cnames): + # admittedly, not the best place to put this method, but it is reused by UtilityCode and ExprNodes ... + utility_code_name = "CallUnboundCMethod%d" % len(arg_cnames) + self.use_utility_code(UtilityCode.load_cached(utility_code_name, "ObjectHandling.c")) + cache_cname = self.get_cached_unbound_method(type_cname, method_name) + args = [obj_cname] + arg_cnames + return "__Pyx_%s(&%s, %s)" % ( + utility_code_name, + cache_cname, + ', '.join(args), + ) + + def add_cached_builtin_decl(self, entry): + if entry.is_builtin and entry.is_const: + if self.should_declare(entry.cname, entry): + self.put_pyobject_decl(entry) + w = self.parts['cached_builtins'] + condition = None + if entry.name in non_portable_builtins_map: + condition, replacement = non_portable_builtins_map[entry.name] + w.putln('#if %s' % condition) + self.put_cached_builtin_init( + entry.pos, StringEncoding.EncodedString(replacement), + entry.cname) + w.putln('#else') + self.put_cached_builtin_init( + entry.pos, StringEncoding.EncodedString(entry.name), + entry.cname) + if condition: + w.putln('#endif') + + def put_cached_builtin_init(self, pos, name, cname): + w = self.parts['cached_builtins'] + interned_cname = self.get_interned_identifier(name).cname + self.use_utility_code( + UtilityCode.load_cached("GetBuiltinName", "ObjectHandling.c")) + w.putln('%s = __Pyx_GetBuiltinName(%s); if (!%s) %s' % ( + cname, + interned_cname, + cname, + w.error_goto(pos))) + + def generate_const_declarations(self): + self.generate_cached_methods_decls() + self.generate_string_constants() + self.generate_num_constants() + self.generate_object_constant_decls() + + def generate_object_constant_decls(self): + consts = [(len(c.cname), c.cname, c) + for c in self.py_constants] + consts.sort() + decls_writer = self.parts['decls'] + for _, cname, c in consts: + decls_writer.putln( + "static %s;" % c.type.declaration_code(cname)) + + def generate_cached_methods_decls(self): + if not self.cached_cmethods: + return + + decl = self.parts['decls'] + init = self.parts['init_globals'] + cnames = [] + for (type_cname, method_name), cname in sorted(self.cached_cmethods.items()): + cnames.append(cname) + method_name_cname = self.get_interned_identifier(StringEncoding.EncodedString(method_name)).cname + decl.putln('static __Pyx_CachedCFunction %s = {0, &%s, 0, 0, 0};' % ( + cname, method_name_cname)) + # split type reference storage as it might not be static + init.putln('%s.type = (PyObject*)&%s;' % ( + cname, type_cname)) + + if Options.generate_cleanup_code: + cleanup = self.parts['cleanup_globals'] + for cname in cnames: + cleanup.putln("Py_CLEAR(%s.method);" % cname) + + def generate_string_constants(self): + c_consts = [(len(c.cname), c.cname, c) for c in self.string_const_index.values()] + c_consts.sort() + py_strings = [] + + decls_writer = self.parts['string_decls'] + for _, cname, c in c_consts: + conditional = False + if c.py_versions and (2 not in c.py_versions or 3 not in c.py_versions): + conditional = True + decls_writer.putln("#if PY_MAJOR_VERSION %s 3" % ( + (2 in c.py_versions) and '<' or '>=')) + decls_writer.putln('static const char %s[] = "%s";' % ( + cname, StringEncoding.split_string_literal(c.escaped_value))) + if conditional: + decls_writer.putln("#endif") + if c.py_strings is not None: + for py_string in c.py_strings.values(): + py_strings.append((c.cname, len(py_string.cname), py_string)) + + for c, cname in sorted(self.pyunicode_ptr_const_index.items()): + utf16_array, utf32_array = StringEncoding.encode_pyunicode_string(c) + if utf16_array: + # Narrow and wide representations differ + decls_writer.putln("#ifdef Py_UNICODE_WIDE") + decls_writer.putln("static Py_UNICODE %s[] = { %s };" % (cname, utf32_array)) + if utf16_array: + decls_writer.putln("#else") + decls_writer.putln("static Py_UNICODE %s[] = { %s };" % (cname, utf16_array)) + decls_writer.putln("#endif") + + if py_strings: + self.use_utility_code(UtilityCode.load_cached("InitStrings", "StringTools.c")) + py_strings.sort() + w = self.parts['pystring_table'] + w.putln("") + w.putln("static __Pyx_StringTabEntry %s[] = {" % Naming.stringtab_cname) + for c_cname, _, py_string in py_strings: + if not py_string.is_str or not py_string.encoding or \ + py_string.encoding in ('ASCII', 'USASCII', 'US-ASCII', + 'UTF8', 'UTF-8'): + encoding = '0' + else: + encoding = '"%s"' % py_string.encoding.lower() + + decls_writer.putln( + "static PyObject *%s;" % py_string.cname) + if py_string.py3str_cstring: + w.putln("#if PY_MAJOR_VERSION >= 3") + w.putln("{&%s, %s, sizeof(%s), %s, %d, %d, %d}," % ( + py_string.cname, + py_string.py3str_cstring.cname, + py_string.py3str_cstring.cname, + '0', 1, 0, + py_string.intern + )) + w.putln("#else") + w.putln("{&%s, %s, sizeof(%s), %s, %d, %d, %d}," % ( + py_string.cname, + c_cname, + c_cname, + encoding, + py_string.is_unicode, + py_string.is_str, + py_string.intern + )) + if py_string.py3str_cstring: + w.putln("#endif") + w.putln("{0, 0, 0, 0, 0, 0, 0}") + w.putln("};") + + init_globals = self.parts['init_globals'] + init_globals.putln( + "if (__Pyx_InitStrings(%s) < 0) %s;" % ( + Naming.stringtab_cname, + init_globals.error_goto(self.module_pos))) + + def generate_num_constants(self): + consts = [(c.py_type, c.value[0] == '-', len(c.value), c.value, c.value_code, c) + for c in self.num_const_index.values()] + consts.sort() + decls_writer = self.parts['decls'] + init_globals = self.parts['init_globals'] + for py_type, _, _, value, value_code, c in consts: + cname = c.cname + decls_writer.putln("static PyObject *%s;" % cname) + if py_type == 'float': + function = 'PyFloat_FromDouble(%s)' + elif py_type == 'long': + function = 'PyLong_FromString((char *)"%s", 0, 0)' + elif Utils.long_literal(value): + function = 'PyInt_FromString((char *)"%s", 0, 0)' + elif len(value.lstrip('-')) > 4: + function = "PyInt_FromLong(%sL)" + else: + function = "PyInt_FromLong(%s)" + init_globals.putln('%s = %s; %s' % ( + cname, function % value_code, + init_globals.error_goto_if_null(cname, self.module_pos))) + + # The functions below are there in a transition phase only + # and will be deprecated. They are called from Nodes.BlockNode. + # The copy&paste duplication is intentional in order to be able + # to see quickly how BlockNode worked, until this is replaced. + + def should_declare(self, cname, entry): + if cname in self.declared_cnames: + other = self.declared_cnames[cname] + assert str(entry.type) == str(other.type) + assert entry.init == other.init + return False + else: + self.declared_cnames[cname] = entry + return True + + # + # File name state + # + + def lookup_filename(self, source_desc): + entry = source_desc.get_filenametable_entry() + try: + index = self.filename_table[entry] + except KeyError: + index = len(self.filename_list) + self.filename_list.append(source_desc) + self.filename_table[entry] = index + return index + + def commented_file_contents(self, source_desc): + try: + return self.input_file_contents[source_desc] + except KeyError: + pass + source_file = source_desc.get_lines(encoding='ASCII', + error_handling='ignore') + try: + F = [u' * ' + line.rstrip().replace( + u'*/', u'*[inserted by cython to avoid comment closer]/' + ).replace( + u'/*', u'/[inserted by cython to avoid comment start]*' + ) + for line in source_file] + finally: + if hasattr(source_file, 'close'): + source_file.close() + if not F: F.append(u'') + self.input_file_contents[source_desc] = F + return F + + # + # Utility code state + # + + def use_utility_code(self, utility_code): + """ + Adds code to the C file. utility_code should + a) implement __eq__/__hash__ for the purpose of knowing whether the same + code has already been included + b) implement put_code, which takes a globalstate instance + + See UtilityCode. + """ + if utility_code and utility_code not in self.utility_codes: + self.utility_codes.add(utility_code) + utility_code.put_code(self) + + def use_entry_utility_code(self, entry): + if entry is None: + return + if entry.utility_code: + self.use_utility_code(entry.utility_code) + if entry.utility_code_definition: + self.use_utility_code(entry.utility_code_definition) + + +def funccontext_property(func): + name = func.__name__ + attribute_of = operator.attrgetter(name) + def get(self): + return attribute_of(self.funcstate) + def set(self, value): + setattr(self.funcstate, name, value) + return property(get, set) + + +class CCodeConfig(object): + # emit_linenums boolean write #line pragmas? + # emit_code_comments boolean copy the original code into C comments? + # c_line_in_traceback boolean append the c file and line number to the traceback for exceptions? + + def __init__(self, emit_linenums=True, emit_code_comments=True, c_line_in_traceback=True): + self.emit_code_comments = emit_code_comments + self.emit_linenums = emit_linenums + self.c_line_in_traceback = c_line_in_traceback + + +class CCodeWriter(object): + """ + Utility class to output C code. + + When creating an insertion point one must care about the state that is + kept: + - formatting state (level, bol) is cloned and used in insertion points + as well + - labels, temps, exc_vars: One must construct a scope in which these can + exist by calling enter_cfunc_scope/exit_cfunc_scope (these are for + sanity checking and forward compatibility). Created insertion points + looses this scope and cannot access it. + - marker: Not copied to insertion point + - filename_table, filename_list, input_file_contents: All codewriters + coming from the same root share the same instances simultaneously. + """ + + # f file output file + # buffer StringIOTree + + # level int indentation level + # bol bool beginning of line? + # marker string comment to emit before next line + # funcstate FunctionState contains state local to a C function used for code + # generation (labels and temps state etc.) + # globalstate GlobalState contains state global for a C file (input file info, + # utility code, declared constants etc.) + # pyclass_stack list used during recursive code generation to pass information + # about the current class one is in + # code_config CCodeConfig configuration options for the C code writer + + @cython.locals(create_from='CCodeWriter') + def __init__(self, create_from=None, buffer=None, copy_formatting=False): + if buffer is None: buffer = StringIOTree() + self.buffer = buffer + self.last_pos = None + self.last_marked_pos = None + self.pyclass_stack = [] + + self.funcstate = None + self.globalstate = None + self.code_config = None + self.level = 0 + self.call_level = 0 + self.bol = 1 + + if create_from is not None: + # Use same global state + self.set_global_state(create_from.globalstate) + self.funcstate = create_from.funcstate + # Clone formatting state + if copy_formatting: + self.level = create_from.level + self.bol = create_from.bol + self.call_level = create_from.call_level + self.last_pos = create_from.last_pos + self.last_marked_pos = create_from.last_marked_pos + + def create_new(self, create_from, buffer, copy_formatting): + # polymorphic constructor -- very slightly more versatile + # than using __class__ + result = CCodeWriter(create_from, buffer, copy_formatting) + return result + + def set_global_state(self, global_state): + assert self.globalstate is None # prevent overwriting once it's set + self.globalstate = global_state + self.code_config = global_state.code_config + + def copyto(self, f): + self.buffer.copyto(f) + + def getvalue(self): + return self.buffer.getvalue() + + def write(self, s): + # also put invalid markers (lineno 0), to indicate that those lines + # have no Cython source code correspondence + cython_lineno = self.last_marked_pos[1] if self.last_marked_pos else 0 + self.buffer.markers.extend([cython_lineno] * s.count('\n')) + self.buffer.write(s) + + def insertion_point(self): + other = self.create_new(create_from=self, buffer=self.buffer.insertion_point(), copy_formatting=True) + return other + + def new_writer(self): + """ + Creates a new CCodeWriter connected to the same global state, which + can later be inserted using insert. + """ + return CCodeWriter(create_from=self) + + def insert(self, writer): + """ + Inserts the contents of another code writer (created with + the same global state) in the current location. + + It is ok to write to the inserted writer also after insertion. + """ + assert writer.globalstate is self.globalstate + self.buffer.insert(writer.buffer) + + # Properties delegated to function scope + @funccontext_property + def label_counter(self): pass + @funccontext_property + def return_label(self): pass + @funccontext_property + def error_label(self): pass + @funccontext_property + def labels_used(self): pass + @funccontext_property + def continue_label(self): pass + @funccontext_property + def break_label(self): pass + @funccontext_property + def return_from_error_cleanup_label(self): pass + @funccontext_property + def yield_labels(self): pass + + # Functions delegated to function scope + def new_label(self, name=None): return self.funcstate.new_label(name) + def new_error_label(self): return self.funcstate.new_error_label() + def new_yield_label(self, *args): return self.funcstate.new_yield_label(*args) + def get_loop_labels(self): return self.funcstate.get_loop_labels() + def set_loop_labels(self, labels): return self.funcstate.set_loop_labels(labels) + def new_loop_labels(self): return self.funcstate.new_loop_labels() + def get_all_labels(self): return self.funcstate.get_all_labels() + def set_all_labels(self, labels): return self.funcstate.set_all_labels(labels) + def all_new_labels(self): return self.funcstate.all_new_labels() + def use_label(self, lbl): return self.funcstate.use_label(lbl) + def label_used(self, lbl): return self.funcstate.label_used(lbl) + + + def enter_cfunc_scope(self, scope=None): + self.funcstate = FunctionState(self, scope=scope) + + def exit_cfunc_scope(self): + self.funcstate = None + + # constant handling + + def get_py_int(self, str_value, longness): + return self.globalstate.get_int_const(str_value, longness).cname + + def get_py_float(self, str_value, value_code): + return self.globalstate.get_float_const(str_value, value_code).cname + + def get_py_const(self, type, prefix='', cleanup_level=None, dedup_key=None): + return self.globalstate.get_py_const(type, prefix, cleanup_level, dedup_key).cname + + def get_string_const(self, text): + return self.globalstate.get_string_const(text).cname + + def get_pyunicode_ptr_const(self, text): + return self.globalstate.get_pyunicode_ptr_const(text) + + def get_py_string_const(self, text, identifier=None, + is_str=False, unicode_value=None): + return self.globalstate.get_py_string_const( + text, identifier, is_str, unicode_value).cname + + def get_argument_default_const(self, type): + return self.globalstate.get_py_const(type).cname + + def intern(self, text): + return self.get_py_string_const(text) + + def intern_identifier(self, text): + return self.get_py_string_const(text, identifier=True) + + def get_cached_constants_writer(self, target=None): + return self.globalstate.get_cached_constants_writer(target) + + # code generation + + def putln(self, code="", safe=False): + if self.last_pos and self.bol: + self.emit_marker() + if self.code_config.emit_linenums and self.last_marked_pos: + source_desc, line, _ = self.last_marked_pos + self.write('\n#line %s "%s"\n' % (line, source_desc.get_escaped_description())) + if code: + if safe: + self.put_safe(code) + else: + self.put(code) + self.write("\n") + self.bol = 1 + + def mark_pos(self, pos, trace=True): + if pos is None: + return + if self.last_marked_pos and self.last_marked_pos[:2] == pos[:2]: + return + self.last_pos = (pos, trace) + + def emit_marker(self): + pos, trace = self.last_pos + self.last_marked_pos = pos + self.last_pos = None + self.write("\n") + if self.code_config.emit_code_comments: + self.indent() + self.write("/* %s */\n" % self._build_marker(pos)) + if trace and self.funcstate and self.funcstate.can_trace and self.globalstate.directives['linetrace']: + self.indent() + self.write('__Pyx_TraceLine(%d,%d,%s)\n' % ( + pos[1], not self.funcstate.gil_owned, self.error_goto(pos))) + + def _build_marker(self, pos): + source_desc, line, col = pos + assert isinstance(source_desc, SourceDescriptor) + contents = self.globalstate.commented_file_contents(source_desc) + lines = contents[max(0, line-3):line] # line numbers start at 1 + lines[-1] += u' # <<<<<<<<<<<<<<' + lines += contents[line:line+2] + return u'"%s":%d\n%s\n' % (source_desc.get_escaped_description(), line, u'\n'.join(lines)) + + def put_safe(self, code): + # put code, but ignore {} + self.write(code) + self.bol = 0 + + def put_or_include(self, code, name): + include_dir = self.globalstate.common_utility_include_dir + if include_dir and len(code) > 1024: + include_file = "%s_%s.h" % ( + name, hashlib.md5(code.encode('utf8')).hexdigest()) + path = os.path.join(include_dir, include_file) + if not os.path.exists(path): + tmp_path = '%s.tmp%s' % (path, os.getpid()) + with closing(Utils.open_new_file(tmp_path)) as f: + f.write(code) + shutil.move(tmp_path, path) + code = '#include "%s"\n' % path + self.put(code) + + def put(self, code): + fix_indent = False + if "{" in code: + dl = code.count("{") + else: + dl = 0 + if "}" in code: + dl -= code.count("}") + if dl < 0: + self.level += dl + elif dl == 0 and code[0] == "}": + # special cases like "} else {" need a temporary dedent + fix_indent = True + self.level -= 1 + if self.bol: + self.indent() + self.write(code) + self.bol = 0 + if dl > 0: + self.level += dl + elif fix_indent: + self.level += 1 + + def putln_tempita(self, code, **context): + from ..Tempita import sub + self.putln(sub(code, **context)) + + def put_tempita(self, code, **context): + from ..Tempita import sub + self.put(sub(code, **context)) + + def increase_indent(self): + self.level += 1 + + def decrease_indent(self): + self.level -= 1 + + def begin_block(self): + self.putln("{") + self.increase_indent() + + def end_block(self): + self.decrease_indent() + self.putln("}") + + def indent(self): + self.write(" " * self.level) + + def get_py_version_hex(self, pyversion): + return "0x%02X%02X%02X%02X" % (tuple(pyversion) + (0,0,0,0))[:4] + + def put_label(self, lbl): + if lbl in self.funcstate.labels_used: + self.putln("%s:;" % lbl) + + def put_goto(self, lbl): + self.funcstate.use_label(lbl) + self.putln("goto %s;" % lbl) + + def put_var_declaration(self, entry, storage_class="", + dll_linkage=None, definition=True): + #print "Code.put_var_declaration:", entry.name, "definition =", definition ### + if entry.visibility == 'private' and not (definition or entry.defined_in_pxd): + #print "...private and not definition, skipping", entry.cname ### + return + if entry.visibility == "private" and not entry.used: + #print "...private and not used, skipping", entry.cname ### + return + if storage_class: + self.put("%s " % storage_class) + if not entry.cf_used: + self.put('CYTHON_UNUSED ') + self.put(entry.type.declaration_code( + entry.cname, dll_linkage=dll_linkage)) + if entry.init is not None: + self.put_safe(" = %s" % entry.type.literal_code(entry.init)) + elif entry.type.is_pyobject: + self.put(" = NULL") + self.putln(";") + + def put_temp_declarations(self, func_context): + for name, type, manage_ref, static in func_context.temps_allocated: + decl = type.declaration_code(name) + if type.is_pyobject: + self.putln("%s = NULL;" % decl) + elif type.is_memoryviewslice: + from . import MemoryView + self.putln("%s = %s;" % (decl, MemoryView.memslice_entry_init)) + else: + self.putln("%s%s;" % (static and "static " or "", decl)) + + if func_context.should_declare_error_indicator: + if self.funcstate.uses_error_indicator: + unused = '' + else: + unused = 'CYTHON_UNUSED ' + # Initialize these variables to silence compiler warnings + self.putln("%sint %s = 0;" % (unused, Naming.lineno_cname)) + self.putln("%sconst char *%s = NULL;" % (unused, Naming.filename_cname)) + self.putln("%sint %s = 0;" % (unused, Naming.clineno_cname)) + + def put_generated_by(self): + self.putln("/* Generated by Cython %s */" % Version.watermark) + self.putln("") + + def put_h_guard(self, guard): + self.putln("#ifndef %s" % guard) + self.putln("#define %s" % guard) + + def unlikely(self, cond): + if Options.gcc_branch_hints: + return 'unlikely(%s)' % cond + else: + return cond + + def build_function_modifiers(self, modifiers, mapper=modifier_output_mapper): + if not modifiers: + return '' + return '%s ' % ' '.join([mapper(m,m) for m in modifiers]) + + # Python objects and reference counting + + def entry_as_pyobject(self, entry): + type = entry.type + if (not entry.is_self_arg and not entry.type.is_complete() + or entry.type.is_extension_type): + return "(PyObject *)" + entry.cname + else: + return entry.cname + + def as_pyobject(self, cname, type): + from .PyrexTypes import py_object_type, typecast + return typecast(py_object_type, type, cname) + + def put_gotref(self, cname): + self.putln("__Pyx_GOTREF(%s);" % cname) + + def put_giveref(self, cname): + self.putln("__Pyx_GIVEREF(%s);" % cname) + + def put_xgiveref(self, cname): + self.putln("__Pyx_XGIVEREF(%s);" % cname) + + def put_xgotref(self, cname): + self.putln("__Pyx_XGOTREF(%s);" % cname) + + def put_incref(self, cname, type, nanny=True): + if nanny: + self.putln("__Pyx_INCREF(%s);" % self.as_pyobject(cname, type)) + else: + self.putln("Py_INCREF(%s);" % self.as_pyobject(cname, type)) + + def put_decref(self, cname, type, nanny=True): + self._put_decref(cname, type, nanny, null_check=False, clear=False) + + def put_var_gotref(self, entry): + if entry.type.is_pyobject: + self.putln("__Pyx_GOTREF(%s);" % self.entry_as_pyobject(entry)) + + def put_var_giveref(self, entry): + if entry.type.is_pyobject: + self.putln("__Pyx_GIVEREF(%s);" % self.entry_as_pyobject(entry)) + + def put_var_xgotref(self, entry): + if entry.type.is_pyobject: + self.putln("__Pyx_XGOTREF(%s);" % self.entry_as_pyobject(entry)) + + def put_var_xgiveref(self, entry): + if entry.type.is_pyobject: + self.putln("__Pyx_XGIVEREF(%s);" % self.entry_as_pyobject(entry)) + + def put_var_incref(self, entry, nanny=True): + if entry.type.is_pyobject: + if nanny: + self.putln("__Pyx_INCREF(%s);" % self.entry_as_pyobject(entry)) + else: + self.putln("Py_INCREF(%s);" % self.entry_as_pyobject(entry)) + + def put_var_xincref(self, entry): + if entry.type.is_pyobject: + self.putln("__Pyx_XINCREF(%s);" % self.entry_as_pyobject(entry)) + + def put_decref_clear(self, cname, type, nanny=True, clear_before_decref=False): + self._put_decref(cname, type, nanny, null_check=False, + clear=True, clear_before_decref=clear_before_decref) + + def put_xdecref(self, cname, type, nanny=True, have_gil=True): + self._put_decref(cname, type, nanny, null_check=True, + have_gil=have_gil, clear=False) + + def put_xdecref_clear(self, cname, type, nanny=True, clear_before_decref=False): + self._put_decref(cname, type, nanny, null_check=True, + clear=True, clear_before_decref=clear_before_decref) + + def _put_decref(self, cname, type, nanny=True, null_check=False, + have_gil=True, clear=False, clear_before_decref=False): + if type.is_memoryviewslice: + self.put_xdecref_memoryviewslice(cname, have_gil=have_gil) + return + + prefix = '__Pyx' if nanny else 'Py' + X = 'X' if null_check else '' + + if clear: + if clear_before_decref: + if not nanny: + X = '' # CPython doesn't have a Py_XCLEAR() + self.putln("%s_%sCLEAR(%s);" % (prefix, X, cname)) + else: + self.putln("%s_%sDECREF(%s); %s = 0;" % ( + prefix, X, self.as_pyobject(cname, type), cname)) + else: + self.putln("%s_%sDECREF(%s);" % ( + prefix, X, self.as_pyobject(cname, type))) + + def put_decref_set(self, cname, rhs_cname): + self.putln("__Pyx_DECREF_SET(%s, %s);" % (cname, rhs_cname)) + + def put_xdecref_set(self, cname, rhs_cname): + self.putln("__Pyx_XDECREF_SET(%s, %s);" % (cname, rhs_cname)) + + def put_var_decref(self, entry): + if entry.type.is_pyobject: + self.putln("__Pyx_XDECREF(%s);" % self.entry_as_pyobject(entry)) + + def put_var_xdecref(self, entry, nanny=True): + if entry.type.is_pyobject: + if nanny: + self.putln("__Pyx_XDECREF(%s);" % self.entry_as_pyobject(entry)) + else: + self.putln("Py_XDECREF(%s);" % self.entry_as_pyobject(entry)) + + def put_var_decref_clear(self, entry): + self._put_var_decref_clear(entry, null_check=False) + + def put_var_xdecref_clear(self, entry): + self._put_var_decref_clear(entry, null_check=True) + + def _put_var_decref_clear(self, entry, null_check): + if entry.type.is_pyobject: + if entry.in_closure: + # reset before DECREF to make sure closure state is + # consistent during call to DECREF() + self.putln("__Pyx_%sCLEAR(%s);" % ( + null_check and 'X' or '', + entry.cname)) + else: + self.putln("__Pyx_%sDECREF(%s); %s = 0;" % ( + null_check and 'X' or '', + self.entry_as_pyobject(entry), + entry.cname)) + + def put_var_decrefs(self, entries, used_only = 0): + for entry in entries: + if not used_only or entry.used: + if entry.xdecref_cleanup: + self.put_var_xdecref(entry) + else: + self.put_var_decref(entry) + + def put_var_xdecrefs(self, entries): + for entry in entries: + self.put_var_xdecref(entry) + + def put_var_xdecrefs_clear(self, entries): + for entry in entries: + self.put_var_xdecref_clear(entry) + + def put_incref_memoryviewslice(self, slice_cname, have_gil=False): + from . import MemoryView + self.globalstate.use_utility_code(MemoryView.memviewslice_init_code) + self.putln("__PYX_INC_MEMVIEW(&%s, %d);" % (slice_cname, int(have_gil))) + + def put_xdecref_memoryviewslice(self, slice_cname, have_gil=False): + from . import MemoryView + self.globalstate.use_utility_code(MemoryView.memviewslice_init_code) + self.putln("__PYX_XDEC_MEMVIEW(&%s, %d);" % (slice_cname, int(have_gil))) + + def put_xgiveref_memoryviewslice(self, slice_cname): + self.put_xgiveref("%s.memview" % slice_cname) + + def put_init_to_py_none(self, cname, type, nanny=True): + from .PyrexTypes import py_object_type, typecast + py_none = typecast(type, py_object_type, "Py_None") + if nanny: + self.putln("%s = %s; __Pyx_INCREF(Py_None);" % (cname, py_none)) + else: + self.putln("%s = %s; Py_INCREF(Py_None);" % (cname, py_none)) + + def put_init_var_to_py_none(self, entry, template = "%s", nanny=True): + code = template % entry.cname + #if entry.type.is_extension_type: + # code = "((PyObject*)%s)" % code + self.put_init_to_py_none(code, entry.type, nanny) + if entry.in_closure: + self.put_giveref('Py_None') + + def put_pymethoddef(self, entry, term, allow_skip=True, wrapper_code_writer=None): + if entry.is_special or entry.name == '__getattribute__': + if entry.name not in special_py_methods: + if entry.name == '__getattr__' and not self.globalstate.directives['fast_getattr']: + pass + # Python's typeobject.c will automatically fill in our slot + # in add_operators() (called by PyType_Ready) with a value + # that's better than ours. + elif allow_skip: + return + + method_flags = entry.signature.method_flags() + if not method_flags: + return + if entry.is_special: + from . import TypeSlots + method_flags += [TypeSlots.method_coexist] + func_ptr = wrapper_code_writer.put_pymethoddef_wrapper(entry) if wrapper_code_writer else entry.func_cname + # Add required casts, but try not to shadow real warnings. + cast = '__Pyx_PyCFunctionFast' if 'METH_FASTCALL' in method_flags else 'PyCFunction' + if 'METH_KEYWORDS' in method_flags: + cast += 'WithKeywords' + if cast != 'PyCFunction': + func_ptr = '(void*)(%s)%s' % (cast, func_ptr) + self.putln( + '{"%s", (PyCFunction)%s, %s, %s}%s' % ( + entry.name, + func_ptr, + "|".join(method_flags), + entry.doc_cname if entry.doc else '0', + term)) + + def put_pymethoddef_wrapper(self, entry): + func_cname = entry.func_cname + if entry.is_special: + method_flags = entry.signature.method_flags() + if method_flags and 'METH_NOARGS' in method_flags: + # Special NOARGS methods really take no arguments besides 'self', but PyCFunction expects one. + func_cname = Naming.method_wrapper_prefix + func_cname + self.putln("static PyObject *%s(PyObject *self, CYTHON_UNUSED PyObject *arg) {return %s(self);}" % ( + func_cname, entry.func_cname)) + return func_cname + + # GIL methods + + def put_ensure_gil(self, declare_gilstate=True, variable=None): + """ + Acquire the GIL. The generated code is safe even when no PyThreadState + has been allocated for this thread (for threads not initialized by + using the Python API). Additionally, the code generated by this method + may be called recursively. + """ + self.globalstate.use_utility_code( + UtilityCode.load_cached("ForceInitThreads", "ModuleSetupCode.c")) + if self.globalstate.directives['fast_gil']: + self.globalstate.use_utility_code(UtilityCode.load_cached("FastGil", "ModuleSetupCode.c")) + else: + self.globalstate.use_utility_code(UtilityCode.load_cached("NoFastGil", "ModuleSetupCode.c")) + self.putln("#ifdef WITH_THREAD") + if not variable: + variable = '__pyx_gilstate_save' + if declare_gilstate: + self.put("PyGILState_STATE ") + self.putln("%s = __Pyx_PyGILState_Ensure();" % variable) + self.putln("#endif") + + def put_release_ensured_gil(self, variable=None): + """ + Releases the GIL, corresponds to `put_ensure_gil`. + """ + if self.globalstate.directives['fast_gil']: + self.globalstate.use_utility_code(UtilityCode.load_cached("FastGil", "ModuleSetupCode.c")) + else: + self.globalstate.use_utility_code(UtilityCode.load_cached("NoFastGil", "ModuleSetupCode.c")) + if not variable: + variable = '__pyx_gilstate_save' + self.putln("#ifdef WITH_THREAD") + self.putln("__Pyx_PyGILState_Release(%s);" % variable) + self.putln("#endif") + + def put_acquire_gil(self, variable=None): + """ + Acquire the GIL. The thread's thread state must have been initialized + by a previous `put_release_gil` + """ + if self.globalstate.directives['fast_gil']: + self.globalstate.use_utility_code(UtilityCode.load_cached("FastGil", "ModuleSetupCode.c")) + else: + self.globalstate.use_utility_code(UtilityCode.load_cached("NoFastGil", "ModuleSetupCode.c")) + self.putln("#ifdef WITH_THREAD") + self.putln("__Pyx_FastGIL_Forget();") + if variable: + self.putln('_save = %s;' % variable) + self.putln("Py_BLOCK_THREADS") + self.putln("#endif") + + def put_release_gil(self, variable=None): + "Release the GIL, corresponds to `put_acquire_gil`." + if self.globalstate.directives['fast_gil']: + self.globalstate.use_utility_code(UtilityCode.load_cached("FastGil", "ModuleSetupCode.c")) + else: + self.globalstate.use_utility_code(UtilityCode.load_cached("NoFastGil", "ModuleSetupCode.c")) + self.putln("#ifdef WITH_THREAD") + self.putln("PyThreadState *_save;") + self.putln("Py_UNBLOCK_THREADS") + if variable: + self.putln('%s = _save;' % variable) + self.putln("__Pyx_FastGIL_Remember();") + self.putln("#endif") + + def declare_gilstate(self): + self.putln("#ifdef WITH_THREAD") + self.putln("PyGILState_STATE __pyx_gilstate_save;") + self.putln("#endif") + + # error handling + + def put_error_if_neg(self, pos, value): + # TODO this path is almost _never_ taken, yet this macro makes is slower! + # return self.putln("if (unlikely(%s < 0)) %s" % (value, self.error_goto(pos))) + return self.putln("if (%s < 0) %s" % (value, self.error_goto(pos))) + + def put_error_if_unbound(self, pos, entry, in_nogil_context=False): + from . import ExprNodes + if entry.from_closure: + func = '__Pyx_RaiseClosureNameError' + self.globalstate.use_utility_code( + ExprNodes.raise_closure_name_error_utility_code) + elif entry.type.is_memoryviewslice and in_nogil_context: + func = '__Pyx_RaiseUnboundMemoryviewSliceNogil' + self.globalstate.use_utility_code( + ExprNodes.raise_unbound_memoryview_utility_code_nogil) + else: + func = '__Pyx_RaiseUnboundLocalError' + self.globalstate.use_utility_code( + ExprNodes.raise_unbound_local_error_utility_code) + + self.putln('if (unlikely(!%s)) { %s("%s"); %s }' % ( + entry.type.check_for_null_code(entry.cname), + func, + entry.name, + self.error_goto(pos))) + + def set_error_info(self, pos, used=False): + self.funcstate.should_declare_error_indicator = True + if used: + self.funcstate.uses_error_indicator = True + return "__PYX_MARK_ERR_POS(%s, %s)" % ( + self.lookup_filename(pos[0]), + pos[1]) + + def error_goto(self, pos, used=True): + lbl = self.funcstate.error_label + self.funcstate.use_label(lbl) + if pos is None: + return 'goto %s;' % lbl + self.funcstate.should_declare_error_indicator = True + if used: + self.funcstate.uses_error_indicator = True + return "__PYX_ERR(%s, %s, %s)" % ( + self.lookup_filename(pos[0]), + pos[1], + lbl) + + def error_goto_if(self, cond, pos): + return "if (%s) %s" % (self.unlikely(cond), self.error_goto(pos)) + + def error_goto_if_null(self, cname, pos): + return self.error_goto_if("!%s" % cname, pos) + + def error_goto_if_neg(self, cname, pos): + return self.error_goto_if("%s < 0" % cname, pos) + + def error_goto_if_PyErr(self, pos): + return self.error_goto_if("PyErr_Occurred()", pos) + + def lookup_filename(self, filename): + return self.globalstate.lookup_filename(filename) + + def put_declare_refcount_context(self): + self.putln('__Pyx_RefNannyDeclarations') + + def put_setup_refcount_context(self, name, acquire_gil=False): + if acquire_gil: + self.globalstate.use_utility_code( + UtilityCode.load_cached("ForceInitThreads", "ModuleSetupCode.c")) + self.putln('__Pyx_RefNannySetupContext("%s", %d);' % (name, acquire_gil and 1 or 0)) + + def put_finish_refcount_context(self): + self.putln("__Pyx_RefNannyFinishContext();") + + def put_add_traceback(self, qualified_name, include_cline=True): + """ + Build a Python traceback for propagating exceptions. + + qualified_name should be the qualified name of the function. + """ + format_tuple = ( + qualified_name, + Naming.clineno_cname if include_cline else 0, + Naming.lineno_cname, + Naming.filename_cname, + ) + self.funcstate.uses_error_indicator = True + self.putln('__Pyx_AddTraceback("%s", %s, %s, %s);' % format_tuple) + + def put_unraisable(self, qualified_name, nogil=False): + """ + Generate code to print a Python warning for an unraisable exception. + + qualified_name should be the qualified name of the function. + """ + format_tuple = ( + qualified_name, + Naming.clineno_cname, + Naming.lineno_cname, + Naming.filename_cname, + self.globalstate.directives['unraisable_tracebacks'], + nogil, + ) + self.funcstate.uses_error_indicator = True + self.putln('__Pyx_WriteUnraisable("%s", %s, %s, %s, %d, %d);' % format_tuple) + self.globalstate.use_utility_code( + UtilityCode.load_cached("WriteUnraisableException", "Exceptions.c")) + + def put_trace_declarations(self): + self.putln('__Pyx_TraceDeclarations') + + def put_trace_frame_init(self, codeobj=None): + if codeobj: + self.putln('__Pyx_TraceFrameInit(%s)' % codeobj) + + def put_trace_call(self, name, pos, nogil=False): + self.putln('__Pyx_TraceCall("%s", %s[%s], %s, %d, %s);' % ( + name, Naming.filetable_cname, self.lookup_filename(pos[0]), pos[1], nogil, self.error_goto(pos))) + + def put_trace_exception(self): + self.putln("__Pyx_TraceException();") + + def put_trace_return(self, retvalue_cname, nogil=False): + self.putln("__Pyx_TraceReturn(%s, %d);" % (retvalue_cname, nogil)) + + def putln_openmp(self, string): + self.putln("#ifdef _OPENMP") + self.putln(string) + self.putln("#endif /* _OPENMP */") + + def undef_builtin_expect(self, cond): + """ + Redefine the macros likely() and unlikely to no-ops, depending on + condition 'cond' + """ + self.putln("#if %s" % cond) + self.putln(" #undef likely") + self.putln(" #undef unlikely") + self.putln(" #define likely(x) (x)") + self.putln(" #define unlikely(x) (x)") + self.putln("#endif") + + def redef_builtin_expect(self, cond): + self.putln("#if %s" % cond) + self.putln(" #undef likely") + self.putln(" #undef unlikely") + self.putln(" #define likely(x) __builtin_expect(!!(x), 1)") + self.putln(" #define unlikely(x) __builtin_expect(!!(x), 0)") + self.putln("#endif") + + +class PyrexCodeWriter(object): + # f file output file + # level int indentation level + + def __init__(self, outfile_name): + self.f = Utils.open_new_file(outfile_name) + self.level = 0 + + def putln(self, code): + self.f.write("%s%s\n" % (" " * self.level, code)) + + def indent(self): + self.level += 1 + + def dedent(self): + self.level -= 1 + +class PyxCodeWriter(object): + """ + Can be used for writing out some Cython code. To use the indenter + functionality, the Cython.Compiler.Importer module will have to be used + to load the code to support python 2.4 + """ + + def __init__(self, buffer=None, indent_level=0, context=None, encoding='ascii'): + self.buffer = buffer or StringIOTree() + self.level = indent_level + self.context = context + self.encoding = encoding + + def indent(self, levels=1): + self.level += levels + return True + + def dedent(self, levels=1): + self.level -= levels + + def indenter(self, line): + """ + Instead of + + with pyx_code.indenter("for i in range(10):"): + pyx_code.putln("print i") + + write + + if pyx_code.indenter("for i in range(10);"): + pyx_code.putln("print i") + pyx_code.dedent() + """ + self.putln(line) + self.indent() + return True + + def getvalue(self): + result = self.buffer.getvalue() + if isinstance(result, bytes): + result = result.decode(self.encoding) + return result + + def putln(self, line, context=None): + context = context or self.context + if context: + line = sub_tempita(line, context) + self._putln(line) + + def _putln(self, line): + self.buffer.write("%s%s\n" % (self.level * " ", line)) + + def put_chunk(self, chunk, context=None): + context = context or self.context + if context: + chunk = sub_tempita(chunk, context) + + chunk = textwrap.dedent(chunk) + for line in chunk.splitlines(): + self._putln(line) + + def insertion_point(self): + return PyxCodeWriter(self.buffer.insertion_point(), self.level, + self.context) + + def named_insertion_point(self, name): + setattr(self, name, self.insertion_point()) + + +class ClosureTempAllocator(object): + def __init__(self, klass): + self.klass = klass + self.temps_allocated = {} + self.temps_free = {} + self.temps_count = 0 + + def reset(self): + for type, cnames in self.temps_allocated.items(): + self.temps_free[type] = list(cnames) + + def allocate_temp(self, type): + if type not in self.temps_allocated: + self.temps_allocated[type] = [] + self.temps_free[type] = [] + elif self.temps_free[type]: + return self.temps_free[type].pop(0) + cname = '%s%d' % (Naming.codewriter_temp_prefix, self.temps_count) + self.klass.declare_var(pos=None, name=cname, cname=cname, type=type, is_cdef=True) + self.temps_allocated[type].append(cname) + self.temps_count += 1 + return cname diff --git a/venv/lib/python3.8/site-packages/Cython/Compiler/CodeGeneration.py b/venv/lib/python3.8/site-packages/Cython/Compiler/CodeGeneration.py new file mode 100644 index 0000000..e64049c --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Compiler/CodeGeneration.py @@ -0,0 +1,35 @@ +from __future__ import absolute_import + +from .Visitor import VisitorTransform +from .Nodes import StatListNode + + +class ExtractPxdCode(VisitorTransform): + """ + Finds nodes in a pxd file that should generate code, and + returns them in a StatListNode. + + The result is a tuple (StatListNode, ModuleScope), i.e. + everything that is needed from the pxd after it is processed. + + A purer approach would be to separately compile the pxd code, + but the result would have to be slightly more sophisticated + than pure strings (functions + wanted interned strings + + wanted utility code + wanted cached objects) so for now this + approach is taken. + """ + + def __call__(self, root): + self.funcs = [] + self.visitchildren(root) + return (StatListNode(root.pos, stats=self.funcs), root.scope) + + def visit_FuncDefNode(self, node): + self.funcs.append(node) + # Do not visit children, nested funcdefnodes will + # also be moved by this action... + return node + + def visit_Node(self, node): + self.visitchildren(node) + return node diff --git a/venv/lib/python3.8/site-packages/Cython/Compiler/CythonScope.py b/venv/lib/python3.8/site-packages/Cython/Compiler/CythonScope.py new file mode 100644 index 0000000..1c25d1a --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Compiler/CythonScope.py @@ -0,0 +1,164 @@ +from __future__ import absolute_import + +from .Symtab import ModuleScope +from .PyrexTypes import * +from .UtilityCode import CythonUtilityCode +from .Errors import error +from .Scanning import StringSourceDescriptor +from . import MemoryView + + +class CythonScope(ModuleScope): + is_cython_builtin = 1 + _cythonscope_initialized = False + + def __init__(self, context): + ModuleScope.__init__(self, u'cython', None, None) + self.pxd_file_loaded = True + self.populate_cython_scope() + # The Main.Context object + self.context = context + + for fused_type in (cy_integral_type, cy_floating_type, cy_numeric_type): + entry = self.declare_typedef(fused_type.name, + fused_type, + None, + cname='') + entry.in_cinclude = True + + def is_cpp(self): + # Allow C++ utility code in C++ contexts. + return self.context.cpp + + def lookup_type(self, name): + # This function should go away when types are all first-level objects. + type = parse_basic_type(name) + if type: + return type + + return super(CythonScope, self).lookup_type(name) + + def lookup(self, name): + entry = super(CythonScope, self).lookup(name) + + if entry is None and not self._cythonscope_initialized: + self.load_cythonscope() + entry = super(CythonScope, self).lookup(name) + + return entry + + def find_module(self, module_name, pos): + error("cython.%s is not available" % module_name, pos) + + def find_submodule(self, module_name): + entry = self.entries.get(module_name, None) + if not entry: + self.load_cythonscope() + entry = self.entries.get(module_name, None) + + if entry and entry.as_module: + return entry.as_module + else: + # TODO: fix find_submodule control flow so that we're not + # expected to create a submodule here (to protect CythonScope's + # possible immutability). Hack ourselves out of the situation + # for now. + raise error((StringSourceDescriptor(u"cython", u""), 0, 0), + "cython.%s is not available" % module_name) + + def lookup_qualified_name(self, qname): + # ExprNode.as_cython_attribute generates qnames and we untangle it here... + name_path = qname.split(u'.') + scope = self + while len(name_path) > 1: + scope = scope.lookup_here(name_path[0]) + if scope: + scope = scope.as_module + del name_path[0] + if scope is None: + return None + else: + return scope.lookup_here(name_path[0]) + + def populate_cython_scope(self): + # These are used to optimize isinstance in FinalOptimizePhase + type_object = self.declare_typedef( + 'PyTypeObject', + base_type = c_void_type, + pos = None, + cname = 'PyTypeObject') + type_object.is_void = True + type_object_type = type_object.type + + self.declare_cfunction( + 'PyObject_TypeCheck', + CFuncType(c_bint_type, [CFuncTypeArg("o", py_object_type, None), + CFuncTypeArg("t", c_ptr_type(type_object_type), None)]), + pos = None, + defining = 1, + cname = 'PyObject_TypeCheck') + + def load_cythonscope(self): + """ + Creates some entries for testing purposes and entries for + cython.array() and for cython.view.*. + """ + if self._cythonscope_initialized: + return + + self._cythonscope_initialized = True + cython_testscope_utility_code.declare_in_scope( + self, cython_scope=self) + cython_test_extclass_utility_code.declare_in_scope( + self, cython_scope=self) + + # + # The view sub-scope + # + self.viewscope = viewscope = ModuleScope(u'view', self, None) + self.declare_module('view', viewscope, None).as_module = viewscope + viewscope.is_cython_builtin = True + viewscope.pxd_file_loaded = True + + cythonview_testscope_utility_code.declare_in_scope( + viewscope, cython_scope=self) + + view_utility_scope = MemoryView.view_utility_code.declare_in_scope( + self.viewscope, cython_scope=self, + whitelist=MemoryView.view_utility_whitelist) + + # self.entries["array"] = view_utility_scope.entries.pop("array") + + +def create_cython_scope(context): + # One could in fact probably make it a singleton, + # but not sure yet whether any code mutates it (which would kill reusing + # it across different contexts) + return CythonScope(context) + +# Load test utilities for the cython scope + +def load_testscope_utility(cy_util_name, **kwargs): + return CythonUtilityCode.load(cy_util_name, "TestCythonScope.pyx", **kwargs) + + +undecorated_methods_protos = UtilityCode(proto=u""" + /* These methods are undecorated and have therefore no prototype */ + static PyObject *__pyx_TestClass_cdef_method( + struct __pyx_TestClass_obj *self, int value); + static PyObject *__pyx_TestClass_cpdef_method( + struct __pyx_TestClass_obj *self, int value, int skip_dispatch); + static PyObject *__pyx_TestClass_def_method( + PyObject *self, PyObject *value); +""") + +cython_testscope_utility_code = load_testscope_utility("TestScope") + +test_cython_utility_dep = load_testscope_utility("TestDep") + +cython_test_extclass_utility_code = \ + load_testscope_utility("TestClass", name="TestClass", + requires=[undecorated_methods_protos, + test_cython_utility_dep]) + +cythonview_testscope_utility_code = load_testscope_utility("View.TestScope") diff --git a/venv/lib/python3.8/site-packages/Cython/Compiler/DebugFlags.py b/venv/lib/python3.8/site-packages/Cython/Compiler/DebugFlags.py new file mode 100644 index 0000000..e830ab1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Compiler/DebugFlags.py @@ -0,0 +1,21 @@ +# Can be enabled at the command line with --debug-xxx. + +debug_disposal_code = 0 +debug_temp_alloc = 0 +debug_coercion = 0 + +# Write comments into the C code that show where temporary variables +# are allocated and released. +debug_temp_code_comments = 0 + +# Write a call trace of the code generation phase into the C code. +debug_trace_code_generation = 0 + +# Do not replace exceptions with user-friendly error messages. +debug_no_exception_intercept = 0 + +# Print a message each time a new stage in the pipeline is entered. +debug_verbose_pipeline = 0 + +# Raise an exception when an error is encountered. +debug_exception_on_error = 0 diff --git a/venv/lib/python3.8/site-packages/Cython/Compiler/Errors.py b/venv/lib/python3.8/site-packages/Cython/Compiler/Errors.py new file mode 100644 index 0000000..9761b52 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Compiler/Errors.py @@ -0,0 +1,265 @@ +# +# Errors +# + +from __future__ import absolute_import + +try: + from __builtin__ import basestring as any_string_type +except ImportError: + any_string_type = (bytes, str) + +import sys +from contextlib import contextmanager + +from ..Utils import open_new_file +from . import DebugFlags +from . import Options + + +class PyrexError(Exception): + pass + + +class PyrexWarning(Exception): + pass + + +def context(position): + source = position[0] + assert not (isinstance(source, any_string_type)), ( + "Please replace filename strings with Scanning.FileSourceDescriptor instances %r" % source) + try: + F = source.get_lines() + except UnicodeDecodeError: + # file has an encoding problem + s = u"[unprintable code]\n" + else: + s = u''.join(F[max(0, position[1]-6):position[1]]) + s = u'...\n%s%s^\n' % (s, u' '*(position[2]-1)) + s = u'%s\n%s%s\n' % (u'-'*60, s, u'-'*60) + return s + +def format_position(position): + if position: + return u"%s:%d:%d: " % (position[0].get_error_description(), + position[1], position[2]) + return u'' + +def format_error(message, position): + if position: + pos_str = format_position(position) + cont = context(position) + message = u'\nError compiling Cython file:\n%s\n%s%s' % (cont, pos_str, message or u'') + return message + +class CompileError(PyrexError): + + def __init__(self, position = None, message = u""): + self.position = position + self.message_only = message + self.formatted_message = format_error(message, position) + self.reported = False + # Deprecated and withdrawn in 2.6: + # self.message = message + Exception.__init__(self, self.formatted_message) + # Python Exception subclass pickling is broken, + # see http://bugs.python.org/issue1692335 + self.args = (position, message) + + def __str__(self): + return self.formatted_message + +class CompileWarning(PyrexWarning): + + def __init__(self, position = None, message = ""): + self.position = position + # Deprecated and withdrawn in 2.6: + # self.message = message + Exception.__init__(self, format_position(position) + message) + +class InternalError(Exception): + # If this is ever raised, there is a bug in the compiler. + + def __init__(self, message): + self.message_only = message + Exception.__init__(self, u"Internal compiler error: %s" + % message) + +class AbortError(Exception): + # Throw this to stop the compilation immediately. + + def __init__(self, message): + self.message_only = message + Exception.__init__(self, u"Abort error: %s" % message) + +class CompilerCrash(CompileError): + # raised when an unexpected exception occurs in a transform + def __init__(self, pos, context, message, cause, stacktrace=None): + if message: + message = u'\n' + message + else: + message = u'\n' + self.message_only = message + if context: + message = u"Compiler crash in %s%s" % (context, message) + if stacktrace: + import traceback + message += ( + u'\n\nCompiler crash traceback from this point on:\n' + + u''.join(traceback.format_tb(stacktrace))) + if cause: + if not stacktrace: + message += u'\n' + message += u'%s: %s' % (cause.__class__.__name__, cause) + CompileError.__init__(self, pos, message) + # Python Exception subclass pickling is broken, + # see http://bugs.python.org/issue1692335 + self.args = (pos, context, message, cause, stacktrace) + +class NoElementTreeInstalledException(PyrexError): + """raised when the user enabled options.gdb_debug but no ElementTree + implementation was found + """ + +listing_file = None +num_errors = 0 +echo_file = None + +def open_listing_file(path, echo_to_stderr = 1): + # Begin a new error listing. If path is None, no file + # is opened, the error counter is just reset. + global listing_file, num_errors, echo_file + if path is not None: + listing_file = open_new_file(path) + else: + listing_file = None + if echo_to_stderr: + echo_file = sys.stderr + else: + echo_file = None + num_errors = 0 + +def close_listing_file(): + global listing_file + if listing_file: + listing_file.close() + listing_file = None + +def report_error(err, use_stack=True): + if error_stack and use_stack: + error_stack[-1].append(err) + else: + global num_errors + # See Main.py for why dual reporting occurs. Quick fix for now. + if err.reported: return + err.reported = True + try: line = u"%s\n" % err + except UnicodeEncodeError: + # Python <= 2.5 does this for non-ASCII Unicode exceptions + line = format_error(getattr(err, 'message_only', "[unprintable exception message]"), + getattr(err, 'position', None)) + u'\n' + if listing_file: + try: listing_file.write(line) + except UnicodeEncodeError: + listing_file.write(line.encode('ASCII', 'replace')) + if echo_file: + try: echo_file.write(line) + except UnicodeEncodeError: + echo_file.write(line.encode('ASCII', 'replace')) + num_errors += 1 + if Options.fast_fail: + raise AbortError("fatal errors") + + +def error(position, message): + #print("Errors.error:", repr(position), repr(message)) ### + if position is None: + raise InternalError(message) + err = CompileError(position, message) + if DebugFlags.debug_exception_on_error: raise Exception(err) # debug + report_error(err) + return err + + +LEVEL = 1 # warn about all errors level 1 or higher + + +def message(position, message, level=1): + if level < LEVEL: + return + warn = CompileWarning(position, message) + line = "note: %s\n" % warn + if listing_file: + listing_file.write(line) + if echo_file: + echo_file.write(line) + return warn + + +def warning(position, message, level=0): + if level < LEVEL: + return + if Options.warning_errors and position: + return error(position, message) + warn = CompileWarning(position, message) + line = "warning: %s\n" % warn + if listing_file: + listing_file.write(line) + if echo_file: + echo_file.write(line) + return warn + + +_warn_once_seen = {} +def warn_once(position, message, level=0): + if level < LEVEL or message in _warn_once_seen: + return + warn = CompileWarning(position, message) + line = "warning: %s\n" % warn + if listing_file: + listing_file.write(line) + if echo_file: + echo_file.write(line) + _warn_once_seen[message] = True + return warn + + +# These functions can be used to momentarily suppress errors. + +error_stack = [] + + +def hold_errors(): + error_stack.append([]) + + +def release_errors(ignore=False): + held_errors = error_stack.pop() + if not ignore: + for err in held_errors: + report_error(err) + + +def held_errors(): + return error_stack[-1] + + +# same as context manager: + +@contextmanager +def local_errors(ignore=False): + errors = [] + error_stack.append(errors) + try: + yield errors + finally: + release_errors(ignore=ignore) + + +# this module needs a redesign to support parallel cythonisation, but +# for now, the following works at least in sequential compiler runs + +def reset(): + _warn_once_seen.clear() + del error_stack[:] diff --git a/venv/lib/python3.8/site-packages/Cython/Compiler/ExprNodes.py b/venv/lib/python3.8/site-packages/Cython/Compiler/ExprNodes.py new file mode 100644 index 0000000..7d184e6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Compiler/ExprNodes.py @@ -0,0 +1,13674 @@ +# +# Parse tree nodes for expressions +# + +from __future__ import absolute_import + +import cython +cython.declare(error=object, warning=object, warn_once=object, InternalError=object, + CompileError=object, UtilityCode=object, TempitaUtilityCode=object, + StringEncoding=object, operator=object, local_errors=object, report_error=object, + Naming=object, Nodes=object, PyrexTypes=object, py_object_type=object, + list_type=object, tuple_type=object, set_type=object, dict_type=object, + unicode_type=object, str_type=object, bytes_type=object, type_type=object, + Builtin=object, Symtab=object, Utils=object, find_coercion_error=object, + debug_disposal_code=object, debug_temp_alloc=object, debug_coercion=object, + bytearray_type=object, slice_type=object, _py_int_types=object, + IS_PYTHON3=cython.bint) + +import re +import sys +import copy +import os.path +import operator + +from .Errors import ( + error, warning, InternalError, CompileError, report_error, local_errors) +from .Code import UtilityCode, TempitaUtilityCode +from . import StringEncoding +from . import Naming +from . import Nodes +from .Nodes import Node, utility_code_for_imports, analyse_type_annotation +from . import PyrexTypes +from .PyrexTypes import py_object_type, c_long_type, typecast, error_type, \ + unspecified_type +from . import TypeSlots +from .Builtin import list_type, tuple_type, set_type, dict_type, type_type, \ + unicode_type, str_type, bytes_type, bytearray_type, basestring_type, slice_type +from . import Builtin +from . import Symtab +from .. import Utils +from .Annotate import AnnotationItem +from . import Future +from ..Debugging import print_call_chain +from .DebugFlags import debug_disposal_code, debug_temp_alloc, \ + debug_coercion +from .Pythran import (to_pythran, is_pythran_supported_type, is_pythran_supported_operation_type, + is_pythran_expr, pythran_func_type, pythran_binop_type, pythran_unaryop_type, has_np_pythran, + pythran_indexing_code, pythran_indexing_type, is_pythran_supported_node_or_none, pythran_type, + pythran_is_numpy_func_supported, pythran_get_func_include_file, pythran_functor) +from .PyrexTypes import PythranExpr + +try: + from __builtin__ import basestring +except ImportError: + # Python 3 + basestring = str + any_string_type = (bytes, str) +else: + # Python 2 + any_string_type = (bytes, unicode) + + +if sys.version_info[0] >= 3: + IS_PYTHON3 = True + _py_int_types = int +else: + IS_PYTHON3 = False + _py_int_types = (int, long) + + +class NotConstant(object): + _obj = None + + def __new__(cls): + if NotConstant._obj is None: + NotConstant._obj = super(NotConstant, cls).__new__(cls) + + return NotConstant._obj + + def __repr__(self): + return "" + +not_a_constant = NotConstant() +constant_value_not_set = object() + +# error messages when coercing from key[0] to key[1] +coercion_error_dict = { + # string related errors + (unicode_type, str_type): ("Cannot convert Unicode string to 'str' implicitly." + " This is not portable and requires explicit encoding."), + (unicode_type, bytes_type): "Cannot convert Unicode string to 'bytes' implicitly, encoding required.", + (unicode_type, PyrexTypes.c_char_ptr_type): "Unicode objects only support coercion to Py_UNICODE*.", + (unicode_type, PyrexTypes.c_const_char_ptr_type): "Unicode objects only support coercion to Py_UNICODE*.", + (unicode_type, PyrexTypes.c_uchar_ptr_type): "Unicode objects only support coercion to Py_UNICODE*.", + (unicode_type, PyrexTypes.c_const_uchar_ptr_type): "Unicode objects only support coercion to Py_UNICODE*.", + (bytes_type, unicode_type): "Cannot convert 'bytes' object to unicode implicitly, decoding required", + (bytes_type, str_type): "Cannot convert 'bytes' object to str implicitly. This is not portable to Py3.", + (bytes_type, basestring_type): ("Cannot convert 'bytes' object to basestring implicitly." + " This is not portable to Py3."), + (bytes_type, PyrexTypes.c_py_unicode_ptr_type): "Cannot convert 'bytes' object to Py_UNICODE*, use 'unicode'.", + (bytes_type, PyrexTypes.c_const_py_unicode_ptr_type): ( + "Cannot convert 'bytes' object to Py_UNICODE*, use 'unicode'."), + (basestring_type, bytes_type): "Cannot convert 'basestring' object to bytes implicitly. This is not portable.", + (str_type, unicode_type): ("str objects do not support coercion to unicode," + " use a unicode string literal instead (u'')"), + (str_type, bytes_type): "Cannot convert 'str' to 'bytes' implicitly. This is not portable.", + (str_type, PyrexTypes.c_char_ptr_type): "'str' objects do not support coercion to C types (use 'bytes'?).", + (str_type, PyrexTypes.c_const_char_ptr_type): "'str' objects do not support coercion to C types (use 'bytes'?).", + (str_type, PyrexTypes.c_uchar_ptr_type): "'str' objects do not support coercion to C types (use 'bytes'?).", + (str_type, PyrexTypes.c_const_uchar_ptr_type): "'str' objects do not support coercion to C types (use 'bytes'?).", + (str_type, PyrexTypes.c_py_unicode_ptr_type): "'str' objects do not support coercion to C types (use 'unicode'?).", + (str_type, PyrexTypes.c_const_py_unicode_ptr_type): ( + "'str' objects do not support coercion to C types (use 'unicode'?)."), + (PyrexTypes.c_char_ptr_type, unicode_type): "Cannot convert 'char*' to unicode implicitly, decoding required", + (PyrexTypes.c_const_char_ptr_type, unicode_type): ( + "Cannot convert 'char*' to unicode implicitly, decoding required"), + (PyrexTypes.c_uchar_ptr_type, unicode_type): "Cannot convert 'char*' to unicode implicitly, decoding required", + (PyrexTypes.c_const_uchar_ptr_type, unicode_type): ( + "Cannot convert 'char*' to unicode implicitly, decoding required"), +} + +def find_coercion_error(type_tuple, default, env): + err = coercion_error_dict.get(type_tuple) + if err is None: + return default + elif (env.directives['c_string_encoding'] and + any(t in type_tuple for t in (PyrexTypes.c_char_ptr_type, PyrexTypes.c_uchar_ptr_type, + PyrexTypes.c_const_char_ptr_type, PyrexTypes.c_const_uchar_ptr_type))): + if type_tuple[1].is_pyobject: + return default + elif env.directives['c_string_encoding'] in ('ascii', 'default'): + return default + else: + return "'%s' objects do not support coercion to C types with non-ascii or non-default c_string_encoding" % type_tuple[0].name + else: + return err + + +def default_str_type(env): + return { + 'bytes': bytes_type, + 'bytearray': bytearray_type, + 'str': str_type, + 'unicode': unicode_type + }.get(env.directives['c_string_type']) + + +def check_negative_indices(*nodes): + """ + Raise a warning on nodes that are known to have negative numeric values. + Used to find (potential) bugs inside of "wraparound=False" sections. + """ + for node in nodes: + if node is None or ( + not isinstance(node.constant_result, _py_int_types) and + not isinstance(node.constant_result, float)): + continue + if node.constant_result < 0: + warning(node.pos, + "the result of using negative indices inside of " + "code sections marked as 'wraparound=False' is " + "undefined", level=1) + + +def infer_sequence_item_type(env, seq_node, index_node=None, seq_type=None): + if not seq_node.is_sequence_constructor: + if seq_type is None: + seq_type = seq_node.infer_type(env) + if seq_type is tuple_type: + # tuples are immutable => we can safely follow assignments + if seq_node.cf_state and len(seq_node.cf_state) == 1: + try: + seq_node = seq_node.cf_state[0].rhs + except AttributeError: + pass + if seq_node is not None and seq_node.is_sequence_constructor: + if index_node is not None and index_node.has_constant_result(): + try: + item = seq_node.args[index_node.constant_result] + except (ValueError, TypeError, IndexError): + pass + else: + return item.infer_type(env) + # if we're lucky, all items have the same type + item_types = set([item.infer_type(env) for item in seq_node.args]) + if len(item_types) == 1: + return item_types.pop() + return None + + +def make_dedup_key(outer_type, item_nodes): + """ + Recursively generate a deduplication key from a sequence of values. + Includes Cython node types to work around the fact that (1, 2.0) == (1.0, 2), for example. + + @param outer_type: The type of the outer container. + @param item_nodes: A sequence of constant nodes that will be traversed recursively. + @return: A tuple that can be used as a dict key for deduplication. + """ + item_keys = [ + (py_object_type, None, type(None)) if node is None + # For sequences and their "mult_factor", see TupleNode. + else make_dedup_key(node.type, [node.mult_factor if node.is_literal else None] + node.args) if node.is_sequence_constructor + else make_dedup_key(node.type, (node.start, node.stop, node.step)) if node.is_slice + # For constants, look at the Python value type if we don't know the concrete Cython type. + else (node.type, node.constant_result, + type(node.constant_result) if node.type is py_object_type else None) if node.has_constant_result() + else None # something we cannot handle => short-circuit below + for node in item_nodes + ] + if None in item_keys: + return None + return outer_type, tuple(item_keys) + + +# Returns a block of code to translate the exception, +# plus a boolean indicating whether to check for Python exceptions. +def get_exception_handler(exception_value): + if exception_value is None: + return "__Pyx_CppExn2PyErr();", False + elif (exception_value.type == PyrexTypes.c_char_type + and exception_value.value == '*'): + return "__Pyx_CppExn2PyErr();", True + elif exception_value.type.is_pyobject: + return ( + 'try { throw; } catch(const std::exception& exn) {' + 'PyErr_SetString(%s, exn.what());' + '} catch(...) { PyErr_SetNone(%s); }' % ( + exception_value.entry.cname, + exception_value.entry.cname), + False) + else: + return ( + '%s(); if (!PyErr_Occurred())' + 'PyErr_SetString(PyExc_RuntimeError, ' + '"Error converting c++ exception.");' % ( + exception_value.entry.cname), + False) + +def maybe_check_py_error(code, check_py_exception, pos, nogil): + if check_py_exception: + if nogil: + code.putln(code.error_goto_if("__Pyx_ErrOccurredWithGIL()", pos)) + else: + code.putln(code.error_goto_if("PyErr_Occurred()", pos)) + +def translate_cpp_exception(code, pos, inside, py_result, exception_value, nogil): + raise_py_exception, check_py_exception = get_exception_handler(exception_value) + code.putln("try {") + code.putln("%s" % inside) + if py_result: + code.putln(code.error_goto_if_null(py_result, pos)) + maybe_check_py_error(code, check_py_exception, pos, nogil) + code.putln("} catch(...) {") + if nogil: + code.put_ensure_gil(declare_gilstate=True) + code.putln(raise_py_exception) + if nogil: + code.put_release_ensured_gil() + code.putln(code.error_goto(pos)) + code.putln("}") + +# Used to handle the case where an lvalue expression and an overloaded assignment +# both have an exception declaration. +def translate_double_cpp_exception(code, pos, lhs_type, lhs_code, rhs_code, + lhs_exc_val, assign_exc_val, nogil): + handle_lhs_exc, lhc_check_py_exc = get_exception_handler(lhs_exc_val) + handle_assignment_exc, assignment_check_py_exc = get_exception_handler(assign_exc_val) + code.putln("try {") + code.putln(lhs_type.declaration_code("__pyx_local_lvalue = %s;" % lhs_code)) + maybe_check_py_error(code, lhc_check_py_exc, pos, nogil) + code.putln("try {") + code.putln("__pyx_local_lvalue = %s;" % rhs_code) + maybe_check_py_error(code, assignment_check_py_exc, pos, nogil) + # Catch any exception from the overloaded assignment. + code.putln("} catch(...) {") + if nogil: + code.put_ensure_gil(declare_gilstate=True) + code.putln(handle_assignment_exc) + if nogil: + code.put_release_ensured_gil() + code.putln(code.error_goto(pos)) + code.putln("}") + # Catch any exception from evaluating lhs. + code.putln("} catch(...) {") + if nogil: + code.put_ensure_gil(declare_gilstate=True) + code.putln(handle_lhs_exc) + if nogil: + code.put_release_ensured_gil() + code.putln(code.error_goto(pos)) + code.putln('}') + + +class ExprNode(Node): + # subexprs [string] Class var holding names of subexpr node attrs + # type PyrexType Type of the result + # result_code string Code fragment + # result_ctype string C type of result_code if different from type + # is_temp boolean Result is in a temporary variable + # is_sequence_constructor + # boolean Is a list or tuple constructor expression + # is_starred boolean Is a starred expression (e.g. '*a') + # saved_subexpr_nodes + # [ExprNode or [ExprNode or None] or None] + # Cached result of subexpr_nodes() + # use_managed_ref boolean use ref-counted temps/assignments/etc. + # result_is_used boolean indicates that the result will be dropped and the + # is_numpy_attribute boolean Is a Numpy module attribute + # result_code/temp_result can safely be set to None + # annotation ExprNode or None PEP526 annotation for names or expressions + + result_ctype = None + type = None + annotation = None + temp_code = None + old_temp = None # error checker for multiple frees etc. + use_managed_ref = True # can be set by optimisation transforms + result_is_used = True + is_numpy_attribute = False + + # The Analyse Expressions phase for expressions is split + # into two sub-phases: + # + # Analyse Types + # Determines the result type of the expression based + # on the types of its sub-expressions, and inserts + # coercion nodes into the expression tree where needed. + # Marks nodes which will need to have temporary variables + # allocated. + # + # Allocate Temps + # Allocates temporary variables where needed, and fills + # in the result_code field of each node. + # + # ExprNode provides some convenience routines which + # perform both of the above phases. These should only + # be called from statement nodes, and only when no + # coercion nodes need to be added around the expression + # being analysed. In that case, the above two phases + # should be invoked separately. + # + # Framework code in ExprNode provides much of the common + # processing for the various phases. It makes use of the + # 'subexprs' class attribute of ExprNodes, which should + # contain a list of the names of attributes which can + # hold sub-nodes or sequences of sub-nodes. + # + # The framework makes use of a number of abstract methods. + # Their responsibilities are as follows. + # + # Declaration Analysis phase + # + # analyse_target_declaration + # Called during the Analyse Declarations phase to analyse + # the LHS of an assignment or argument of a del statement. + # Nodes which cannot be the LHS of an assignment need not + # implement it. + # + # Expression Analysis phase + # + # analyse_types + # - Call analyse_types on all sub-expressions. + # - Check operand types, and wrap coercion nodes around + # sub-expressions where needed. + # - Set the type of this node. + # - If a temporary variable will be required for the + # result, set the is_temp flag of this node. + # + # analyse_target_types + # Called during the Analyse Types phase to analyse + # the LHS of an assignment or argument of a del + # statement. Similar responsibilities to analyse_types. + # + # target_code + # Called by the default implementation of allocate_target_temps. + # Should return a C lvalue for assigning to the node. The default + # implementation calls calculate_result_code. + # + # check_const + # - Check that this node and its subnodes form a + # legal constant expression. If so, do nothing, + # otherwise call not_const. + # + # The default implementation of check_const + # assumes that the expression is not constant. + # + # check_const_addr + # - Same as check_const, except check that the + # expression is a C lvalue whose address is + # constant. Otherwise, call addr_not_const. + # + # The default implementation of calc_const_addr + # assumes that the expression is not a constant + # lvalue. + # + # Code Generation phase + # + # generate_evaluation_code + # - Call generate_evaluation_code for sub-expressions. + # - Perform the functions of generate_result_code + # (see below). + # - If result is temporary, call generate_disposal_code + # on all sub-expressions. + # + # A default implementation of generate_evaluation_code + # is provided which uses the following abstract methods: + # + # generate_result_code + # - Generate any C statements necessary to calculate + # the result of this node from the results of its + # sub-expressions. + # + # calculate_result_code + # - Should return a C code fragment evaluating to the + # result. This is only called when the result is not + # a temporary. + # + # generate_assignment_code + # Called on the LHS of an assignment. + # - Call generate_evaluation_code for sub-expressions. + # - Generate code to perform the assignment. + # - If the assignment absorbed a reference, call + # generate_post_assignment_code on the RHS, + # otherwise call generate_disposal_code on it. + # + # generate_deletion_code + # Called on an argument of a del statement. + # - Call generate_evaluation_code for sub-expressions. + # - Generate code to perform the deletion. + # - Call generate_disposal_code on all sub-expressions. + # + # + + is_sequence_constructor = False + is_dict_literal = False + is_set_literal = False + is_string_literal = False + is_attribute = False + is_subscript = False + is_slice = False + + is_buffer_access = False + is_memview_index = False + is_memview_slice = False + is_memview_broadcast = False + is_memview_copy_assignment = False + + saved_subexpr_nodes = None + is_temp = False + is_target = False + is_starred = False + + constant_result = constant_value_not_set + + child_attrs = property(fget=operator.attrgetter('subexprs')) + + def not_implemented(self, method_name): + print_call_chain(method_name, "not implemented") ### + raise InternalError( + "%s.%s not implemented" % + (self.__class__.__name__, method_name)) + + def is_lvalue(self): + return 0 + + def is_addressable(self): + return self.is_lvalue() and not self.type.is_memoryviewslice + + def is_ephemeral(self): + # An ephemeral node is one whose result is in + # a Python temporary and we suspect there are no + # other references to it. Certain operations are + # disallowed on such values, since they are + # likely to result in a dangling pointer. + return self.type.is_pyobject and self.is_temp + + def subexpr_nodes(self): + # Extract a list of subexpression nodes based + # on the contents of the subexprs class attribute. + nodes = [] + for name in self.subexprs: + item = getattr(self, name) + if item is not None: + if type(item) is list: + nodes.extend(item) + else: + nodes.append(item) + return nodes + + def result(self): + if self.is_temp: + #if not self.temp_code: + # pos = (os.path.basename(self.pos[0].get_description()),) + self.pos[1:] if self.pos else '(?)' + # raise RuntimeError("temp result name not set in %s at %r" % ( + # self.__class__.__name__, pos)) + return self.temp_code + else: + return self.calculate_result_code() + + def pythran_result(self, type_=None): + if is_pythran_supported_node_or_none(self): + return to_pythran(self) + + assert(type_ is not None) + return to_pythran(self, type_) + + def is_c_result_required(self): + """ + Subtypes may return False here if result temp allocation can be skipped. + """ + return True + + def result_as(self, type = None): + # Return the result code cast to the specified C type. + if (self.is_temp and self.type.is_pyobject and + type != py_object_type): + # Allocated temporaries are always PyObject *, which may not + # reflect the actual type (e.g. an extension type) + return typecast(type, py_object_type, self.result()) + return typecast(type, self.ctype(), self.result()) + + def py_result(self): + # Return the result code cast to PyObject *. + return self.result_as(py_object_type) + + def ctype(self): + # Return the native C type of the result (i.e. the + # C type of the result_code expression). + return self.result_ctype or self.type + + def get_constant_c_result_code(self): + # Return the constant value of this node as a result code + # string, or None if the node is not constant. This method + # can be called when the constant result code is required + # before the code generation phase. + # + # The return value is a string that can represent a simple C + # value, a constant C name or a constant C expression. If the + # node type depends on Python code, this must return None. + return None + + def calculate_constant_result(self): + # Calculate the constant compile time result value of this + # expression and store it in ``self.constant_result``. Does + # nothing by default, thus leaving ``self.constant_result`` + # unknown. If valid, the result can be an arbitrary Python + # value. + # + # This must only be called when it is assured that all + # sub-expressions have a valid constant_result value. The + # ConstantFolding transform will do this. + pass + + def has_constant_result(self): + return self.constant_result is not constant_value_not_set and \ + self.constant_result is not not_a_constant + + def compile_time_value(self, denv): + # Return value of compile-time expression, or report error. + error(self.pos, "Invalid compile-time expression") + + def compile_time_value_error(self, e): + error(self.pos, "Error in compile-time expression: %s: %s" % ( + e.__class__.__name__, e)) + + # ------------- Declaration Analysis ---------------- + + def analyse_target_declaration(self, env): + error(self.pos, "Cannot assign to or delete this") + + # ------------- Expression Analysis ---------------- + + def analyse_const_expression(self, env): + # Called during the analyse_declarations phase of a + # constant expression. Analyses the expression's type, + # checks whether it is a legal const expression, + # and determines its value. + node = self.analyse_types(env) + node.check_const() + return node + + def analyse_expressions(self, env): + # Convenience routine performing both the Type + # Analysis and Temp Allocation phases for a whole + # expression. + return self.analyse_types(env) + + def analyse_target_expression(self, env, rhs): + # Convenience routine performing both the Type + # Analysis and Temp Allocation phases for the LHS of + # an assignment. + return self.analyse_target_types(env) + + def analyse_boolean_expression(self, env): + # Analyse expression and coerce to a boolean. + node = self.analyse_types(env) + bool = node.coerce_to_boolean(env) + return bool + + def analyse_temp_boolean_expression(self, env): + # Analyse boolean expression and coerce result into + # a temporary. This is used when a branch is to be + # performed on the result and we won't have an + # opportunity to ensure disposal code is executed + # afterwards. By forcing the result into a temporary, + # we ensure that all disposal has been done by the + # time we get the result. + node = self.analyse_types(env) + return node.coerce_to_boolean(env).coerce_to_simple(env) + + # --------------- Type Inference ----------------- + + def type_dependencies(self, env): + # Returns the list of entries whose types must be determined + # before the type of self can be inferred. + if hasattr(self, 'type') and self.type is not None: + return () + return sum([node.type_dependencies(env) for node in self.subexpr_nodes()], ()) + + def infer_type(self, env): + # Attempt to deduce the type of self. + # Differs from analyse_types as it avoids unnecessary + # analysis of subexpressions, but can assume everything + # in self.type_dependencies() has been resolved. + if hasattr(self, 'type') and self.type is not None: + return self.type + elif hasattr(self, 'entry') and self.entry is not None: + return self.entry.type + else: + self.not_implemented("infer_type") + + def nonlocally_immutable(self): + # Returns whether this variable is a safe reference, i.e. + # can't be modified as part of globals or closures. + return self.is_literal or self.is_temp or self.type.is_array or self.type.is_cfunction + + def inferable_item_node(self, index=0): + """ + Return a node that represents the (type) result of an indexing operation, + e.g. for tuple unpacking or iteration. + """ + return IndexNode(self.pos, base=self, index=IntNode( + self.pos, value=str(index), constant_result=index, type=PyrexTypes.c_py_ssize_t_type)) + + # --------------- Type Analysis ------------------ + + def analyse_as_module(self, env): + # If this node can be interpreted as a reference to a + # cimported module, return its scope, else None. + return None + + def analyse_as_type(self, env): + # If this node can be interpreted as a reference to a + # type, return that type, else None. + return None + + def analyse_as_extension_type(self, env): + # If this node can be interpreted as a reference to an + # extension type or builtin type, return its type, else None. + return None + + def analyse_types(self, env): + self.not_implemented("analyse_types") + + def analyse_target_types(self, env): + return self.analyse_types(env) + + def nogil_check(self, env): + # By default, any expression based on Python objects is + # prevented in nogil environments. Subtypes must override + # this if they can work without the GIL. + if self.type and self.type.is_pyobject: + self.gil_error() + + def gil_assignment_check(self, env): + if env.nogil and self.type.is_pyobject: + error(self.pos, "Assignment of Python object not allowed without gil") + + def check_const(self): + self.not_const() + return False + + def not_const(self): + error(self.pos, "Not allowed in a constant expression") + + def check_const_addr(self): + self.addr_not_const() + return False + + def addr_not_const(self): + error(self.pos, "Address is not constant") + + # ----------------- Result Allocation ----------------- + + def result_in_temp(self): + # Return true if result is in a temporary owned by + # this node or one of its subexpressions. Overridden + # by certain nodes which can share the result of + # a subnode. + return self.is_temp + + def target_code(self): + # Return code fragment for use as LHS of a C assignment. + return self.calculate_result_code() + + def calculate_result_code(self): + self.not_implemented("calculate_result_code") + +# def release_target_temp(self, env): +# # Release temporaries used by LHS of an assignment. +# self.release_subexpr_temps(env) + + def allocate_temp_result(self, code): + if self.temp_code: + raise RuntimeError("Temp allocated multiple times in %r: %r" % (self.__class__.__name__, self.pos)) + type = self.type + if not type.is_void: + if type.is_pyobject: + type = PyrexTypes.py_object_type + elif not (self.result_is_used or type.is_memoryviewslice or self.is_c_result_required()): + self.temp_code = None + return + self.temp_code = code.funcstate.allocate_temp( + type, manage_ref=self.use_managed_ref) + else: + self.temp_code = None + + def release_temp_result(self, code): + if not self.temp_code: + if not self.result_is_used: + # not used anyway, so ignore if not set up + return + pos = (os.path.basename(self.pos[0].get_description()),) + self.pos[1:] if self.pos else '(?)' + if self.old_temp: + raise RuntimeError("temp %s released multiple times in %s at %r" % ( + self.old_temp, self.__class__.__name__, pos)) + else: + raise RuntimeError("no temp, but release requested in %s at %r" % ( + self.__class__.__name__, pos)) + code.funcstate.release_temp(self.temp_code) + self.old_temp = self.temp_code + self.temp_code = None + + # ---------------- Code Generation ----------------- + + def make_owned_reference(self, code): + """ + If result is a pyobject, make sure we own a reference to it. + If the result is in a temp, it is already a new reference. + """ + if self.type.is_pyobject and not self.result_in_temp(): + code.put_incref(self.result(), self.ctype()) + + def make_owned_memoryviewslice(self, code): + """ + Make sure we own the reference to this memoryview slice. + """ + if not self.result_in_temp(): + code.put_incref_memoryviewslice(self.result(), + have_gil=self.in_nogil_context) + + def generate_evaluation_code(self, code): + # Generate code to evaluate this node and + # its sub-expressions, and dispose of any + # temporary results of its sub-expressions. + self.generate_subexpr_evaluation_code(code) + + code.mark_pos(self.pos) + if self.is_temp: + self.allocate_temp_result(code) + + self.generate_result_code(code) + if self.is_temp and not (self.type.is_string or self.type.is_pyunicode_ptr): + # If we are temp we do not need to wait until this node is disposed + # before disposing children. + self.generate_subexpr_disposal_code(code) + self.free_subexpr_temps(code) + + def generate_subexpr_evaluation_code(self, code): + for node in self.subexpr_nodes(): + node.generate_evaluation_code(code) + + def generate_result_code(self, code): + self.not_implemented("generate_result_code") + + def generate_disposal_code(self, code): + if self.is_temp: + if self.type.is_string or self.type.is_pyunicode_ptr: + # postponed from self.generate_evaluation_code() + self.generate_subexpr_disposal_code(code) + self.free_subexpr_temps(code) + if self.result(): + if self.type.is_pyobject: + code.put_decref_clear(self.result(), self.ctype()) + elif self.type.is_memoryviewslice: + code.put_xdecref_memoryviewslice( + self.result(), have_gil=not self.in_nogil_context) + code.putln("%s.memview = NULL;" % self.result()) + code.putln("%s.data = NULL;" % self.result()) + else: + # Already done if self.is_temp + self.generate_subexpr_disposal_code(code) + + def generate_subexpr_disposal_code(self, code): + # Generate code to dispose of temporary results + # of all sub-expressions. + for node in self.subexpr_nodes(): + node.generate_disposal_code(code) + + def generate_post_assignment_code(self, code): + if self.is_temp: + if self.type.is_string or self.type.is_pyunicode_ptr: + # postponed from self.generate_evaluation_code() + self.generate_subexpr_disposal_code(code) + self.free_subexpr_temps(code) + elif self.type.is_pyobject: + code.putln("%s = 0;" % self.result()) + elif self.type.is_memoryviewslice: + code.putln("%s.memview = NULL;" % self.result()) + code.putln("%s.data = NULL;" % self.result()) + else: + self.generate_subexpr_disposal_code(code) + + def generate_assignment_code(self, rhs, code, overloaded_assignment=False, + exception_check=None, exception_value=None): + # Stub method for nodes which are not legal as + # the LHS of an assignment. An error will have + # been reported earlier. + pass + + def generate_deletion_code(self, code, ignore_nonexisting=False): + # Stub method for nodes that are not legal as + # the argument of a del statement. An error + # will have been reported earlier. + pass + + def free_temps(self, code): + if self.is_temp: + if not self.type.is_void: + self.release_temp_result(code) + else: + self.free_subexpr_temps(code) + + def free_subexpr_temps(self, code): + for sub in self.subexpr_nodes(): + sub.free_temps(code) + + def generate_function_definitions(self, env, code): + pass + + # ---------------- Annotation --------------------- + + def annotate(self, code): + for node in self.subexpr_nodes(): + node.annotate(code) + + # ----------------- Coercion ---------------------- + + def coerce_to(self, dst_type, env): + # Coerce the result so that it can be assigned to + # something of type dst_type. If processing is necessary, + # wraps this node in a coercion node and returns that. + # Otherwise, returns this node unchanged. + # + # This method is called during the analyse_expressions + # phase of the src_node's processing. + # + # Note that subclasses that override this (especially + # ConstNodes) must not (re-)set their own .type attribute + # here. Since expression nodes may turn up in different + # places in the tree (e.g. inside of CloneNodes in cascaded + # assignments), this method must return a new node instance + # if it changes the type. + # + src = self + src_type = self.type + + if self.check_for_coercion_error(dst_type, env): + return self + + used_as_reference = dst_type.is_reference + if used_as_reference and not src_type.is_reference: + dst_type = dst_type.ref_base_type + + if src_type.is_const: + src_type = src_type.const_base_type + + if src_type.is_fused or dst_type.is_fused: + # See if we are coercing a fused function to a pointer to a + # specialized function + if (src_type.is_cfunction and not dst_type.is_fused and + dst_type.is_ptr and dst_type.base_type.is_cfunction): + + dst_type = dst_type.base_type + + for signature in src_type.get_all_specialized_function_types(): + if signature.same_as(dst_type): + src.type = signature + src.entry = src.type.entry + src.entry.used = True + return self + + if src_type.is_fused: + error(self.pos, "Type is not specialized") + elif src_type.is_null_ptr and dst_type.is_ptr: + # NULL can be implicitly cast to any pointer type + return self + else: + error(self.pos, "Cannot coerce to a type that is not specialized") + + self.type = error_type + return self + + if self.coercion_type is not None: + # This is purely for error checking purposes! + node = NameNode(self.pos, name='', type=self.coercion_type) + node.coerce_to(dst_type, env) + + if dst_type.is_memoryviewslice: + from . import MemoryView + if not src.type.is_memoryviewslice: + if src.type.is_pyobject: + src = CoerceToMemViewSliceNode(src, dst_type, env) + elif src.type.is_array: + src = CythonArrayNode.from_carray(src, env).coerce_to(dst_type, env) + elif not src_type.is_error: + error(self.pos, + "Cannot convert '%s' to memoryviewslice" % (src_type,)) + else: + if src.type.writable_needed: + dst_type.writable_needed = True + if not src.type.conforms_to(dst_type, broadcast=self.is_memview_broadcast, + copying=self.is_memview_copy_assignment): + if src.type.dtype.same_as(dst_type.dtype): + msg = "Memoryview '%s' not conformable to memoryview '%s'." + tup = src.type, dst_type + else: + msg = "Different base types for memoryviews (%s, %s)" + tup = src.type.dtype, dst_type.dtype + + error(self.pos, msg % tup) + + elif dst_type.is_pyobject: + if not src.type.is_pyobject: + if dst_type is bytes_type and src.type.is_int: + src = CoerceIntToBytesNode(src, env) + else: + src = CoerceToPyTypeNode(src, env, type=dst_type) + if not src.type.subtype_of(dst_type): + if src.constant_result is not None: + src = PyTypeTestNode(src, dst_type, env) + elif is_pythran_expr(dst_type) and is_pythran_supported_type(src.type): + # We let the compiler decide whether this is valid + return src + elif is_pythran_expr(src.type): + if is_pythran_supported_type(dst_type): + # Match the case were a pythran expr is assigned to a value, or vice versa. + # We let the C++ compiler decide whether this is valid or not! + return src + # Else, we need to convert the Pythran expression to a Python object + src = CoerceToPyTypeNode(src, env, type=dst_type) + elif src.type.is_pyobject: + if used_as_reference and dst_type.is_cpp_class: + warning( + self.pos, + "Cannot pass Python object as C++ data structure reference (%s &), will pass by copy." % dst_type) + src = CoerceFromPyTypeNode(dst_type, src, env) + elif (dst_type.is_complex + and src_type != dst_type + and dst_type.assignable_from(src_type)): + src = CoerceToComplexNode(src, dst_type, env) + else: # neither src nor dst are py types + # Added the string comparison, since for c types that + # is enough, but Cython gets confused when the types are + # in different pxi files. + # TODO: Remove this hack and require shared declarations. + if not (src.type == dst_type or str(src.type) == str(dst_type) or dst_type.assignable_from(src_type)): + self.fail_assignment(dst_type) + return src + + def fail_assignment(self, dst_type): + error(self.pos, "Cannot assign type '%s' to '%s'" % (self.type, dst_type)) + + def check_for_coercion_error(self, dst_type, env, fail=False, default=None): + if fail and not default: + default = "Cannot assign type '%(FROM)s' to '%(TO)s'" + message = find_coercion_error((self.type, dst_type), default, env) + if message is not None: + error(self.pos, message % {'FROM': self.type, 'TO': dst_type}) + return True + if fail: + self.fail_assignment(dst_type) + return True + return False + + def coerce_to_pyobject(self, env): + return self.coerce_to(PyrexTypes.py_object_type, env) + + def coerce_to_boolean(self, env): + # Coerce result to something acceptable as + # a boolean value. + + # if it's constant, calculate the result now + if self.has_constant_result(): + bool_value = bool(self.constant_result) + return BoolNode(self.pos, value=bool_value, + constant_result=bool_value) + + type = self.type + if type.is_enum or type.is_error: + return self + elif type.is_pyobject or type.is_int or type.is_ptr or type.is_float: + return CoerceToBooleanNode(self, env) + elif type.is_cpp_class: + return SimpleCallNode( + self.pos, + function=AttributeNode( + self.pos, obj=self, attribute='operator bool'), + args=[]).analyse_types(env) + elif type.is_ctuple: + bool_value = len(type.components) == 0 + return BoolNode(self.pos, value=bool_value, + constant_result=bool_value) + else: + error(self.pos, "Type '%s' not acceptable as a boolean" % type) + return self + + def coerce_to_integer(self, env): + # If not already some C integer type, coerce to longint. + if self.type.is_int: + return self + else: + return self.coerce_to(PyrexTypes.c_long_type, env) + + def coerce_to_temp(self, env): + # Ensure that the result is in a temporary. + if self.result_in_temp(): + return self + else: + return CoerceToTempNode(self, env) + + def coerce_to_simple(self, env): + # Ensure that the result is simple (see is_simple). + if self.is_simple(): + return self + else: + return self.coerce_to_temp(env) + + def is_simple(self): + # A node is simple if its result is something that can + # be referred to without performing any operations, e.g. + # a constant, local var, C global var, struct member + # reference, or temporary. + return self.result_in_temp() + + def may_be_none(self): + if self.type and not (self.type.is_pyobject or + self.type.is_memoryviewslice): + return False + if self.has_constant_result(): + return self.constant_result is not None + return True + + def as_cython_attribute(self): + return None + + def as_none_safe_node(self, message, error="PyExc_TypeError", format_args=()): + # Wraps the node in a NoneCheckNode if it is not known to be + # not-None (e.g. because it is a Python literal). + if self.may_be_none(): + return NoneCheckNode(self, error, message, format_args) + else: + return self + + @classmethod + def from_node(cls, node, **kwargs): + """Instantiate this node class from another node, properly + copying over all attributes that one would forget otherwise. + """ + attributes = "cf_state cf_maybe_null cf_is_null constant_result".split() + for attr_name in attributes: + if attr_name in kwargs: + continue + try: + value = getattr(node, attr_name) + except AttributeError: + pass + else: + kwargs[attr_name] = value + return cls(node.pos, **kwargs) + + +class AtomicExprNode(ExprNode): + # Abstract base class for expression nodes which have + # no sub-expressions. + + subexprs = [] + + # Override to optimize -- we know we have no children + def generate_subexpr_evaluation_code(self, code): + pass + def generate_subexpr_disposal_code(self, code): + pass + +class PyConstNode(AtomicExprNode): + # Abstract base class for constant Python values. + + is_literal = 1 + type = py_object_type + + def is_simple(self): + return 1 + + def may_be_none(self): + return False + + def analyse_types(self, env): + return self + + def calculate_result_code(self): + return self.value + + def generate_result_code(self, code): + pass + + +class NoneNode(PyConstNode): + # The constant value None + + is_none = 1 + value = "Py_None" + + constant_result = None + + nogil_check = None + + def compile_time_value(self, denv): + return None + + def may_be_none(self): + return True + + def coerce_to(self, dst_type, env): + if not (dst_type.is_pyobject or dst_type.is_memoryviewslice or dst_type.is_error): + # Catch this error early and loudly. + error(self.pos, "Cannot assign None to %s" % dst_type) + return super(NoneNode, self).coerce_to(dst_type, env) + + +class EllipsisNode(PyConstNode): + # '...' in a subscript list. + + value = "Py_Ellipsis" + + constant_result = Ellipsis + + def compile_time_value(self, denv): + return Ellipsis + + +class ConstNode(AtomicExprNode): + # Abstract base type for literal constant nodes. + # + # value string C code fragment + + is_literal = 1 + nogil_check = None + + def is_simple(self): + return 1 + + def nonlocally_immutable(self): + return 1 + + def may_be_none(self): + return False + + def analyse_types(self, env): + return self # Types are held in class variables + + def check_const(self): + return True + + def get_constant_c_result_code(self): + return self.calculate_result_code() + + def calculate_result_code(self): + return str(self.value) + + def generate_result_code(self, code): + pass + + +class BoolNode(ConstNode): + type = PyrexTypes.c_bint_type + # The constant value True or False + + def calculate_constant_result(self): + self.constant_result = self.value + + def compile_time_value(self, denv): + return self.value + + def calculate_result_code(self): + if self.type.is_pyobject: + return self.value and 'Py_True' or 'Py_False' + else: + return str(int(self.value)) + + def coerce_to(self, dst_type, env): + if dst_type == self.type: + return self + if dst_type is py_object_type and self.type is Builtin.bool_type: + return self + if dst_type.is_pyobject and self.type.is_int: + return BoolNode( + self.pos, value=self.value, + constant_result=self.constant_result, + type=Builtin.bool_type) + if dst_type.is_int and self.type.is_pyobject: + return BoolNode( + self.pos, value=self.value, + constant_result=self.constant_result, + type=PyrexTypes.c_bint_type) + return ConstNode.coerce_to(self, dst_type, env) + + +class NullNode(ConstNode): + type = PyrexTypes.c_null_ptr_type + value = "NULL" + constant_result = 0 + + def get_constant_c_result_code(self): + return self.value + + +class CharNode(ConstNode): + type = PyrexTypes.c_char_type + + def calculate_constant_result(self): + self.constant_result = ord(self.value) + + def compile_time_value(self, denv): + return ord(self.value) + + def calculate_result_code(self): + return "'%s'" % StringEncoding.escape_char(self.value) + + +class IntNode(ConstNode): + + # unsigned "" or "U" + # longness "" or "L" or "LL" + # is_c_literal True/False/None creator considers this a C integer literal + + unsigned = "" + longness = "" + is_c_literal = None # unknown + + def __init__(self, pos, **kwds): + ExprNode.__init__(self, pos, **kwds) + if 'type' not in kwds: + self.type = self.find_suitable_type_for_value() + + def find_suitable_type_for_value(self): + if self.constant_result is constant_value_not_set: + try: + self.calculate_constant_result() + except ValueError: + pass + # we ignore 'is_c_literal = True' and instead map signed 32bit + # integers as C long values + if self.is_c_literal or \ + not self.has_constant_result() or \ + self.unsigned or self.longness == 'LL': + # clearly a C literal + rank = (self.longness == 'LL') and 2 or 1 + suitable_type = PyrexTypes.modifiers_and_name_to_type[not self.unsigned, rank, "int"] + if self.type: + suitable_type = PyrexTypes.widest_numeric_type(suitable_type, self.type) + else: + # C literal or Python literal - split at 32bit boundary + if -2**31 <= self.constant_result < 2**31: + if self.type and self.type.is_int: + suitable_type = self.type + else: + suitable_type = PyrexTypes.c_long_type + else: + suitable_type = PyrexTypes.py_object_type + return suitable_type + + def coerce_to(self, dst_type, env): + if self.type is dst_type: + return self + elif dst_type.is_float: + if self.has_constant_result(): + return FloatNode(self.pos, value='%d.0' % int(self.constant_result), type=dst_type, + constant_result=float(self.constant_result)) + else: + return FloatNode(self.pos, value=self.value, type=dst_type, + constant_result=not_a_constant) + if dst_type.is_numeric and not dst_type.is_complex: + node = IntNode(self.pos, value=self.value, constant_result=self.constant_result, + type=dst_type, is_c_literal=True, + unsigned=self.unsigned, longness=self.longness) + return node + elif dst_type.is_pyobject: + node = IntNode(self.pos, value=self.value, constant_result=self.constant_result, + type=PyrexTypes.py_object_type, is_c_literal=False, + unsigned=self.unsigned, longness=self.longness) + else: + # FIXME: not setting the type here to keep it working with + # complex numbers. Should they be special cased? + node = IntNode(self.pos, value=self.value, constant_result=self.constant_result, + unsigned=self.unsigned, longness=self.longness) + # We still need to perform normal coerce_to processing on the + # result, because we might be coercing to an extension type, + # in which case a type test node will be needed. + return ConstNode.coerce_to(node, dst_type, env) + + def coerce_to_boolean(self, env): + return IntNode( + self.pos, value=self.value, + constant_result=self.constant_result, + type=PyrexTypes.c_bint_type, + unsigned=self.unsigned, longness=self.longness) + + def generate_evaluation_code(self, code): + if self.type.is_pyobject: + # pre-allocate a Python version of the number + plain_integer_string = str(Utils.str_to_number(self.value)) + self.result_code = code.get_py_int(plain_integer_string, self.longness) + else: + self.result_code = self.get_constant_c_result_code() + + def get_constant_c_result_code(self): + unsigned, longness = self.unsigned, self.longness + literal = self.value_as_c_integer_string() + if not (unsigned or longness) and self.type.is_int and literal[0] == '-' and literal[1] != '0': + # negative decimal literal => guess longness from type to prevent wrap-around + if self.type.rank >= PyrexTypes.c_longlong_type.rank: + longness = 'LL' + elif self.type.rank >= PyrexTypes.c_long_type.rank: + longness = 'L' + return literal + unsigned + longness + + def value_as_c_integer_string(self): + value = self.value + if len(value) <= 2: + # too short to go wrong (and simplifies code below) + return value + neg_sign = '' + if value[0] == '-': + neg_sign = '-' + value = value[1:] + if value[0] == '0': + literal_type = value[1] # 0'o' - 0'b' - 0'x' + # 0x123 hex literals and 0123 octal literals work nicely in C + # but C-incompatible Py3 oct/bin notations need conversion + if neg_sign and literal_type in 'oOxX0123456789' and value[2:].isdigit(): + # negative hex/octal literal => prevent C compiler from using + # unsigned integer types by converting to decimal (see C standard 6.4.4.1) + value = str(Utils.str_to_number(value)) + elif literal_type in 'oO': + value = '0' + value[2:] # '0o123' => '0123' + elif literal_type in 'bB': + value = str(int(value[2:], 2)) + elif value.isdigit() and not self.unsigned and not self.longness: + if not neg_sign: + # C compilers do not consider unsigned types for decimal literals, + # but they do for hex (see C standard 6.4.4.1) + value = '0x%X' % int(value) + return neg_sign + value + + def calculate_result_code(self): + return self.result_code + + def calculate_constant_result(self): + self.constant_result = Utils.str_to_number(self.value) + + def compile_time_value(self, denv): + return Utils.str_to_number(self.value) + +class FloatNode(ConstNode): + type = PyrexTypes.c_double_type + + def calculate_constant_result(self): + self.constant_result = float(self.value) + + def compile_time_value(self, denv): + return float(self.value) + + def coerce_to(self, dst_type, env): + if dst_type.is_pyobject and self.type.is_float: + return FloatNode( + self.pos, value=self.value, + constant_result=self.constant_result, + type=Builtin.float_type) + if dst_type.is_float and self.type.is_pyobject: + return FloatNode( + self.pos, value=self.value, + constant_result=self.constant_result, + type=dst_type) + return ConstNode.coerce_to(self, dst_type, env) + + def calculate_result_code(self): + return self.result_code + + def get_constant_c_result_code(self): + strval = self.value + assert isinstance(strval, basestring) + cmpval = repr(float(strval)) + if cmpval == 'nan': + return "(Py_HUGE_VAL * 0)" + elif cmpval == 'inf': + return "Py_HUGE_VAL" + elif cmpval == '-inf': + return "(-Py_HUGE_VAL)" + else: + return strval + + def generate_evaluation_code(self, code): + c_value = self.get_constant_c_result_code() + if self.type.is_pyobject: + self.result_code = code.get_py_float(self.value, c_value) + else: + self.result_code = c_value + + +def _analyse_name_as_type(name, pos, env): + type = PyrexTypes.parse_basic_type(name) + if type is not None: + return type + + global_entry = env.global_scope().lookup(name) + if global_entry and global_entry.type and ( + global_entry.type.is_extension_type + or global_entry.type.is_struct_or_union + or global_entry.type.is_builtin_type + or global_entry.type.is_cpp_class): + return global_entry.type + + from .TreeFragment import TreeFragment + with local_errors(ignore=True): + pos = (pos[0], pos[1], pos[2]-7) + try: + declaration = TreeFragment(u"sizeof(%s)" % name, name=pos[0].filename, initial_pos=pos) + except CompileError: + pass + else: + sizeof_node = declaration.root.stats[0].expr + if isinstance(sizeof_node, SizeofTypeNode): + sizeof_node = sizeof_node.analyse_types(env) + if isinstance(sizeof_node, SizeofTypeNode): + return sizeof_node.arg_type + return None + + +class BytesNode(ConstNode): + # A char* or bytes literal + # + # value BytesLiteral + + is_string_literal = True + # start off as Python 'bytes' to support len() in O(1) + type = bytes_type + + def calculate_constant_result(self): + self.constant_result = self.value + + def as_sliced_node(self, start, stop, step=None): + value = StringEncoding.bytes_literal(self.value[start:stop:step], self.value.encoding) + return BytesNode(self.pos, value=value, constant_result=value) + + def compile_time_value(self, denv): + return self.value.byteencode() + + def analyse_as_type(self, env): + return _analyse_name_as_type(self.value.decode('ISO8859-1'), self.pos, env) + + def can_coerce_to_char_literal(self): + return len(self.value) == 1 + + def coerce_to_boolean(self, env): + # This is special because testing a C char* for truth directly + # would yield the wrong result. + bool_value = bool(self.value) + return BoolNode(self.pos, value=bool_value, constant_result=bool_value) + + def coerce_to(self, dst_type, env): + if self.type == dst_type: + return self + if dst_type.is_int: + if not self.can_coerce_to_char_literal(): + error(self.pos, "Only single-character string literals can be coerced into ints.") + return self + if dst_type.is_unicode_char: + error(self.pos, "Bytes literals cannot coerce to Py_UNICODE/Py_UCS4, use a unicode literal instead.") + return self + return CharNode(self.pos, value=self.value, + constant_result=ord(self.value)) + + node = BytesNode(self.pos, value=self.value, constant_result=self.constant_result) + if dst_type.is_pyobject: + if dst_type in (py_object_type, Builtin.bytes_type): + node.type = Builtin.bytes_type + else: + self.check_for_coercion_error(dst_type, env, fail=True) + return node + elif dst_type in (PyrexTypes.c_char_ptr_type, PyrexTypes.c_const_char_ptr_type): + node.type = dst_type + return node + elif dst_type in (PyrexTypes.c_uchar_ptr_type, PyrexTypes.c_const_uchar_ptr_type, PyrexTypes.c_void_ptr_type): + node.type = (PyrexTypes.c_const_char_ptr_type if dst_type == PyrexTypes.c_const_uchar_ptr_type + else PyrexTypes.c_char_ptr_type) + return CastNode(node, dst_type) + elif dst_type.assignable_from(PyrexTypes.c_char_ptr_type): + # Exclude the case of passing a C string literal into a non-const C++ string. + if not dst_type.is_cpp_class or dst_type.is_const: + node.type = dst_type + return node + + # We still need to perform normal coerce_to processing on the + # result, because we might be coercing to an extension type, + # in which case a type test node will be needed. + return ConstNode.coerce_to(node, dst_type, env) + + def generate_evaluation_code(self, code): + if self.type.is_pyobject: + result = code.get_py_string_const(self.value) + elif self.type.is_const: + result = code.get_string_const(self.value) + else: + # not const => use plain C string literal and cast to mutable type + literal = self.value.as_c_string_literal() + # C++ may require a cast + result = typecast(self.type, PyrexTypes.c_void_ptr_type, literal) + self.result_code = result + + def get_constant_c_result_code(self): + return None # FIXME + + def calculate_result_code(self): + return self.result_code + + +class UnicodeNode(ConstNode): + # A Py_UNICODE* or unicode literal + # + # value EncodedString + # bytes_value BytesLiteral the literal parsed as bytes string + # ('-3' unicode literals only) + + is_string_literal = True + bytes_value = None + type = unicode_type + + def calculate_constant_result(self): + self.constant_result = self.value + + def analyse_as_type(self, env): + return _analyse_name_as_type(self.value, self.pos, env) + + def as_sliced_node(self, start, stop, step=None): + if StringEncoding.string_contains_surrogates(self.value[:stop]): + # this is unsafe as it may give different results + # in different runtimes + return None + value = StringEncoding.EncodedString(self.value[start:stop:step]) + value.encoding = self.value.encoding + if self.bytes_value is not None: + bytes_value = StringEncoding.bytes_literal( + self.bytes_value[start:stop:step], self.bytes_value.encoding) + else: + bytes_value = None + return UnicodeNode( + self.pos, value=value, bytes_value=bytes_value, + constant_result=value) + + def coerce_to(self, dst_type, env): + if dst_type is self.type: + pass + elif dst_type.is_unicode_char: + if not self.can_coerce_to_char_literal(): + error(self.pos, + "Only single-character Unicode string literals or " + "surrogate pairs can be coerced into Py_UCS4/Py_UNICODE.") + return self + int_value = ord(self.value) + return IntNode(self.pos, type=dst_type, value=str(int_value), + constant_result=int_value) + elif not dst_type.is_pyobject: + if dst_type.is_string and self.bytes_value is not None: + # special case: '-3' enforced unicode literal used in a + # C char* context + return BytesNode(self.pos, value=self.bytes_value + ).coerce_to(dst_type, env) + if dst_type.is_pyunicode_ptr: + node = UnicodeNode(self.pos, value=self.value) + node.type = dst_type + return node + error(self.pos, + "Unicode literals do not support coercion to C types other " + "than Py_UNICODE/Py_UCS4 (for characters) or Py_UNICODE* " + "(for strings).") + elif dst_type not in (py_object_type, Builtin.basestring_type): + self.check_for_coercion_error(dst_type, env, fail=True) + return self + + def can_coerce_to_char_literal(self): + return len(self.value) == 1 + ## or (len(self.value) == 2 + ## and (0xD800 <= self.value[0] <= 0xDBFF) + ## and (0xDC00 <= self.value[1] <= 0xDFFF)) + + def coerce_to_boolean(self, env): + bool_value = bool(self.value) + return BoolNode(self.pos, value=bool_value, constant_result=bool_value) + + def contains_surrogates(self): + return StringEncoding.string_contains_surrogates(self.value) + + def generate_evaluation_code(self, code): + if self.type.is_pyobject: + if self.contains_surrogates(): + # surrogates are not really portable and cannot be + # decoded by the UTF-8 codec in Py3.3 + self.result_code = code.get_py_const(py_object_type, 'ustring') + data_cname = code.get_pyunicode_ptr_const(self.value) + const_code = code.get_cached_constants_writer(self.result_code) + if const_code is None: + return # already initialised + const_code.mark_pos(self.pos) + const_code.putln( + "%s = PyUnicode_FromUnicode(%s, (sizeof(%s) / sizeof(Py_UNICODE))-1); %s" % ( + self.result_code, + data_cname, + data_cname, + const_code.error_goto_if_null(self.result_code, self.pos))) + const_code.put_error_if_neg( + self.pos, "__Pyx_PyUnicode_READY(%s)" % self.result_code) + else: + self.result_code = code.get_py_string_const(self.value) + else: + self.result_code = code.get_pyunicode_ptr_const(self.value) + + def calculate_result_code(self): + return self.result_code + + def compile_time_value(self, env): + return self.value + + +class StringNode(PyConstNode): + # A Python str object, i.e. a byte string in Python 2.x and a + # unicode string in Python 3.x + # + # value BytesLiteral (or EncodedString with ASCII content) + # unicode_value EncodedString or None + # is_identifier boolean + + type = str_type + is_string_literal = True + is_identifier = None + unicode_value = None + + def calculate_constant_result(self): + if self.unicode_value is not None: + # only the Unicode value is portable across Py2/3 + self.constant_result = self.unicode_value + + def analyse_as_type(self, env): + return _analyse_name_as_type(self.unicode_value or self.value.decode('ISO8859-1'), self.pos, env) + + def as_sliced_node(self, start, stop, step=None): + value = type(self.value)(self.value[start:stop:step]) + value.encoding = self.value.encoding + if self.unicode_value is not None: + if StringEncoding.string_contains_surrogates(self.unicode_value[:stop]): + # this is unsafe as it may give different results in different runtimes + return None + unicode_value = StringEncoding.EncodedString( + self.unicode_value[start:stop:step]) + else: + unicode_value = None + return StringNode( + self.pos, value=value, unicode_value=unicode_value, + constant_result=value, is_identifier=self.is_identifier) + + def coerce_to(self, dst_type, env): + if dst_type is not py_object_type and not str_type.subtype_of(dst_type): +# if dst_type is Builtin.bytes_type: +# # special case: bytes = 'str literal' +# return BytesNode(self.pos, value=self.value) + if not dst_type.is_pyobject: + return BytesNode(self.pos, value=self.value).coerce_to(dst_type, env) + if dst_type is not Builtin.basestring_type: + self.check_for_coercion_error(dst_type, env, fail=True) + return self + + def can_coerce_to_char_literal(self): + return not self.is_identifier and len(self.value) == 1 + + def generate_evaluation_code(self, code): + self.result_code = code.get_py_string_const( + self.value, identifier=self.is_identifier, is_str=True, + unicode_value=self.unicode_value) + + def get_constant_c_result_code(self): + return None + + def calculate_result_code(self): + return self.result_code + + def compile_time_value(self, env): + if self.value.is_unicode: + return self.value + if not IS_PYTHON3: + # use plain str/bytes object in Py2 + return self.value.byteencode() + # in Py3, always return a Unicode string + if self.unicode_value is not None: + return self.unicode_value + return self.value.decode('iso8859-1') + + +class IdentifierStringNode(StringNode): + # A special str value that represents an identifier (bytes in Py2, + # unicode in Py3). + is_identifier = True + + +class ImagNode(AtomicExprNode): + # Imaginary number literal + # + # value string imaginary part (float value) + + type = PyrexTypes.c_double_complex_type + + def calculate_constant_result(self): + self.constant_result = complex(0.0, float(self.value)) + + def compile_time_value(self, denv): + return complex(0.0, float(self.value)) + + def analyse_types(self, env): + self.type.create_declaration_utility_code(env) + return self + + def may_be_none(self): + return False + + def coerce_to(self, dst_type, env): + if self.type is dst_type: + return self + node = ImagNode(self.pos, value=self.value) + if dst_type.is_pyobject: + node.is_temp = 1 + node.type = Builtin.complex_type + # We still need to perform normal coerce_to processing on the + # result, because we might be coercing to an extension type, + # in which case a type test node will be needed. + return AtomicExprNode.coerce_to(node, dst_type, env) + + gil_message = "Constructing complex number" + + def calculate_result_code(self): + if self.type.is_pyobject: + return self.result() + else: + return "%s(0, %r)" % (self.type.from_parts, float(self.value)) + + def generate_result_code(self, code): + if self.type.is_pyobject: + code.putln( + "%s = PyComplex_FromDoubles(0.0, %r); %s" % ( + self.result(), + float(self.value), + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + + +class NewExprNode(AtomicExprNode): + + # C++ new statement + # + # cppclass node c++ class to create + + type = None + + def infer_type(self, env): + type = self.cppclass.analyse_as_type(env) + if type is None or not type.is_cpp_class: + error(self.pos, "new operator can only be applied to a C++ class") + self.type = error_type + return + self.cpp_check(env) + constructor = type.get_constructor(self.pos) + self.class_type = type + self.entry = constructor + self.type = constructor.type + return self.type + + def analyse_types(self, env): + if self.type is None: + self.infer_type(env) + return self + + def may_be_none(self): + return False + + def generate_result_code(self, code): + pass + + def calculate_result_code(self): + return "new " + self.class_type.empty_declaration_code() + + +class NameNode(AtomicExprNode): + # Reference to a local or global variable name. + # + # name string Python name of the variable + # entry Entry Symbol table entry + # type_entry Entry For extension type names, the original type entry + # cf_is_null boolean Is uninitialized before this node + # cf_maybe_null boolean Maybe uninitialized before this node + # allow_null boolean Don't raise UnboundLocalError + # nogil boolean Whether it is used in a nogil context + + is_name = True + is_cython_module = False + cython_attribute = None + lhs_of_first_assignment = False # TODO: remove me + is_used_as_rvalue = 0 + entry = None + type_entry = None + cf_maybe_null = True + cf_is_null = False + allow_null = False + nogil = False + inferred_type = None + + def as_cython_attribute(self): + return self.cython_attribute + + def type_dependencies(self, env): + if self.entry is None: + self.entry = env.lookup(self.name) + if self.entry is not None and self.entry.type.is_unspecified: + return (self,) + else: + return () + + def infer_type(self, env): + if self.entry is None: + self.entry = env.lookup(self.name) + if self.entry is None or self.entry.type is unspecified_type: + if self.inferred_type is not None: + return self.inferred_type + return py_object_type + elif (self.entry.type.is_extension_type or self.entry.type.is_builtin_type) and \ + self.name == self.entry.type.name: + # Unfortunately the type attribute of type objects + # is used for the pointer to the type they represent. + return type_type + elif self.entry.type.is_cfunction: + if self.entry.scope.is_builtin_scope: + # special case: optimised builtin functions must be treated as Python objects + return py_object_type + else: + # special case: referring to a C function must return its pointer + return PyrexTypes.CPtrType(self.entry.type) + else: + # If entry is inferred as pyobject it's safe to use local + # NameNode's inferred_type. + if self.entry.type.is_pyobject and self.inferred_type: + # Overflow may happen if integer + if not (self.inferred_type.is_int and self.entry.might_overflow): + return self.inferred_type + return self.entry.type + + def compile_time_value(self, denv): + try: + return denv.lookup(self.name) + except KeyError: + error(self.pos, "Compile-time name '%s' not defined" % self.name) + + def get_constant_c_result_code(self): + if not self.entry or self.entry.type.is_pyobject: + return None + return self.entry.cname + + def coerce_to(self, dst_type, env): + # If coercing to a generic pyobject and this is a builtin + # C function with a Python equivalent, manufacture a NameNode + # referring to the Python builtin. + #print "NameNode.coerce_to:", self.name, dst_type ### + if dst_type is py_object_type: + entry = self.entry + if entry and entry.is_cfunction: + var_entry = entry.as_variable + if var_entry: + if var_entry.is_builtin and var_entry.is_const: + var_entry = env.declare_builtin(var_entry.name, self.pos) + node = NameNode(self.pos, name = self.name) + node.entry = var_entry + node.analyse_rvalue_entry(env) + return node + + return super(NameNode, self).coerce_to(dst_type, env) + + def declare_from_annotation(self, env, as_target=False): + """Implements PEP 526 annotation typing in a fairly relaxed way. + + Annotations are ignored for global variables, Python class attributes and already declared variables. + String literals are allowed and ignored. + The ambiguous Python types 'int' and 'long' are ignored and the 'cython.int' form must be used instead. + """ + if not env.directives['annotation_typing']: + return + if env.is_module_scope or env.is_py_class_scope: + # annotations never create global cdef names and Python classes don't support them anyway + return + name = self.name + if self.entry or env.lookup_here(name) is not None: + # already declared => ignore annotation + return + + annotation = self.annotation + if annotation.is_string_literal: + # name: "description" => not a type, but still a declared variable or attribute + atype = None + else: + _, atype = analyse_type_annotation(annotation, env) + if atype is None: + atype = unspecified_type if as_target and env.directives['infer_types'] != False else py_object_type + self.entry = env.declare_var(name, atype, self.pos, is_cdef=not as_target) + self.entry.annotation = annotation + + def analyse_as_module(self, env): + # Try to interpret this as a reference to a cimported module. + # Returns the module scope, or None. + entry = self.entry + if not entry: + entry = env.lookup(self.name) + if entry and entry.as_module: + return entry.as_module + return None + + def analyse_as_type(self, env): + if self.cython_attribute: + type = PyrexTypes.parse_basic_type(self.cython_attribute) + else: + type = PyrexTypes.parse_basic_type(self.name) + if type: + return type + entry = self.entry + if not entry: + entry = env.lookup(self.name) + if entry and entry.is_type: + return entry.type + else: + return None + + def analyse_as_extension_type(self, env): + # Try to interpret this as a reference to an extension type. + # Returns the extension type, or None. + entry = self.entry + if not entry: + entry = env.lookup(self.name) + if entry and entry.is_type: + if entry.type.is_extension_type or entry.type.is_builtin_type: + return entry.type + return None + + def analyse_target_declaration(self, env): + if not self.entry: + self.entry = env.lookup_here(self.name) + if not self.entry and self.annotation is not None: + # name : type = ... + self.declare_from_annotation(env, as_target=True) + if not self.entry: + if env.directives['warn.undeclared']: + warning(self.pos, "implicit declaration of '%s'" % self.name, 1) + if env.directives['infer_types'] != False: + type = unspecified_type + else: + type = py_object_type + self.entry = env.declare_var(self.name, type, self.pos) + if self.entry.is_declared_generic: + self.result_ctype = py_object_type + if self.entry.as_module: + # cimported modules namespace can shadow actual variables + self.entry.is_variable = 1 + + def analyse_types(self, env): + self.initialized_check = env.directives['initializedcheck'] + entry = self.entry + if entry is None: + entry = env.lookup(self.name) + if not entry: + entry = env.declare_builtin(self.name, self.pos) + if entry and entry.is_builtin and entry.is_const: + self.is_literal = True + if not entry: + self.type = PyrexTypes.error_type + return self + self.entry = entry + entry.used = 1 + if entry.type.is_buffer: + from . import Buffer + Buffer.used_buffer_aux_vars(entry) + self.analyse_rvalue_entry(env) + return self + + def analyse_target_types(self, env): + self.analyse_entry(env, is_target=True) + + entry = self.entry + if entry.is_cfunction and entry.as_variable: + # FIXME: unify "is_overridable" flags below + if (entry.is_overridable or entry.type.is_overridable) or not self.is_lvalue() and entry.fused_cfunction: + # We need this for assigning to cpdef names and for the fused 'def' TreeFragment + entry = self.entry = entry.as_variable + self.type = entry.type + + if self.type.is_const: + error(self.pos, "Assignment to const '%s'" % self.name) + if self.type.is_reference: + error(self.pos, "Assignment to reference '%s'" % self.name) + if not self.is_lvalue(): + error(self.pos, "Assignment to non-lvalue '%s'" % self.name) + self.type = PyrexTypes.error_type + entry.used = 1 + if entry.type.is_buffer: + from . import Buffer + Buffer.used_buffer_aux_vars(entry) + return self + + def analyse_rvalue_entry(self, env): + #print "NameNode.analyse_rvalue_entry:", self.name ### + #print "Entry:", self.entry.__dict__ ### + self.analyse_entry(env) + entry = self.entry + + if entry.is_declared_generic: + self.result_ctype = py_object_type + + if entry.is_pyglobal or entry.is_builtin: + if entry.is_builtin and entry.is_const: + self.is_temp = 0 + else: + self.is_temp = 1 + + self.is_used_as_rvalue = 1 + elif entry.type.is_memoryviewslice: + self.is_temp = False + self.is_used_as_rvalue = True + self.use_managed_ref = True + return self + + def nogil_check(self, env): + self.nogil = True + if self.is_used_as_rvalue: + entry = self.entry + if entry.is_builtin: + if not entry.is_const: # cached builtins are ok + self.gil_error() + elif entry.is_pyglobal: + self.gil_error() + + gil_message = "Accessing Python global or builtin" + + def analyse_entry(self, env, is_target=False): + #print "NameNode.analyse_entry:", self.name ### + self.check_identifier_kind() + entry = self.entry + type = entry.type + if (not is_target and type.is_pyobject and self.inferred_type and + self.inferred_type.is_builtin_type): + # assume that type inference is smarter than the static entry + type = self.inferred_type + self.type = type + + def check_identifier_kind(self): + # Check that this is an appropriate kind of name for use in an + # expression. Also finds the variable entry associated with + # an extension type. + entry = self.entry + if entry.is_type and entry.type.is_extension_type: + self.type_entry = entry + if entry.is_type and entry.type.is_enum: + py_entry = Symtab.Entry(self.name, None, py_object_type) + py_entry.is_pyglobal = True + py_entry.scope = self.entry.scope + self.entry = py_entry + elif not (entry.is_const or entry.is_variable or + entry.is_builtin or entry.is_cfunction or + entry.is_cpp_class): + if self.entry.as_variable: + self.entry = self.entry.as_variable + elif not self.is_cython_module: + error(self.pos, "'%s' is not a constant, variable or function identifier" % self.name) + + def is_cimported_module_without_shadow(self, env): + if self.is_cython_module or self.cython_attribute: + return False + entry = self.entry or env.lookup(self.name) + return entry.as_module and not entry.is_variable + + def is_simple(self): + # If it's not a C variable, it'll be in a temp. + return 1 + + def may_be_none(self): + if self.cf_state and self.type and (self.type.is_pyobject or + self.type.is_memoryviewslice): + # gard against infinite recursion on self-dependencies + if getattr(self, '_none_checking', False): + # self-dependency - either this node receives a None + # value from *another* node, or it can not reference + # None at this point => safe to assume "not None" + return False + self._none_checking = True + # evaluate control flow state to see if there were any + # potential None values assigned to the node so far + may_be_none = False + for assignment in self.cf_state: + if assignment.rhs.may_be_none(): + may_be_none = True + break + del self._none_checking + return may_be_none + return super(NameNode, self).may_be_none() + + def nonlocally_immutable(self): + if ExprNode.nonlocally_immutable(self): + return True + entry = self.entry + if not entry or entry.in_closure: + return False + return entry.is_local or entry.is_arg or entry.is_builtin or entry.is_readonly + + def calculate_target_results(self, env): + pass + + def check_const(self): + entry = self.entry + if entry is not None and not ( + entry.is_const or + entry.is_cfunction or + entry.is_builtin or + entry.type.is_const): + self.not_const() + return False + return True + + def check_const_addr(self): + entry = self.entry + if not (entry.is_cglobal or entry.is_cfunction or entry.is_builtin): + self.addr_not_const() + return False + return True + + def is_lvalue(self): + return ( + self.entry.is_variable and + not self.entry.is_readonly + ) or ( + self.entry.is_cfunction and + self.entry.is_overridable + ) + + def is_addressable(self): + return self.entry.is_variable and not self.type.is_memoryviewslice + + def is_ephemeral(self): + # Name nodes are never ephemeral, even if the + # result is in a temporary. + return 0 + + def calculate_result_code(self): + entry = self.entry + if not entry: + return "" # There was an error earlier + return entry.cname + + def generate_result_code(self, code): + assert hasattr(self, 'entry') + entry = self.entry + if entry is None: + return # There was an error earlier + if entry.utility_code: + code.globalstate.use_utility_code(entry.utility_code) + if entry.is_builtin and entry.is_const: + return # Lookup already cached + elif entry.is_pyclass_attr: + assert entry.type.is_pyobject, "Python global or builtin not a Python object" + interned_cname = code.intern_identifier(self.entry.name) + if entry.is_builtin: + namespace = Naming.builtins_cname + else: # entry.is_pyglobal + namespace = entry.scope.namespace_cname + if not self.cf_is_null: + code.putln( + '%s = PyObject_GetItem(%s, %s);' % ( + self.result(), + namespace, + interned_cname)) + code.putln('if (unlikely(!%s)) {' % self.result()) + code.putln('PyErr_Clear();') + code.globalstate.use_utility_code( + UtilityCode.load_cached("GetModuleGlobalName", "ObjectHandling.c")) + code.putln( + '__Pyx_GetModuleGlobalName(%s, %s);' % ( + self.result(), + interned_cname)) + if not self.cf_is_null: + code.putln("}") + code.putln(code.error_goto_if_null(self.result(), self.pos)) + code.put_gotref(self.py_result()) + + elif entry.is_builtin and not entry.scope.is_module_scope: + # known builtin + assert entry.type.is_pyobject, "Python global or builtin not a Python object" + interned_cname = code.intern_identifier(self.entry.name) + code.globalstate.use_utility_code( + UtilityCode.load_cached("GetBuiltinName", "ObjectHandling.c")) + code.putln( + '%s = __Pyx_GetBuiltinName(%s); %s' % ( + self.result(), + interned_cname, + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + + elif entry.is_pyglobal or (entry.is_builtin and entry.scope.is_module_scope): + # name in class body, global name or unknown builtin + assert entry.type.is_pyobject, "Python global or builtin not a Python object" + interned_cname = code.intern_identifier(self.entry.name) + if entry.scope.is_module_scope: + code.globalstate.use_utility_code( + UtilityCode.load_cached("GetModuleGlobalName", "ObjectHandling.c")) + code.putln( + '__Pyx_GetModuleGlobalName(%s, %s); %s' % ( + self.result(), + interned_cname, + code.error_goto_if_null(self.result(), self.pos))) + else: + # FIXME: is_pyglobal is also used for class namespace + code.globalstate.use_utility_code( + UtilityCode.load_cached("GetNameInClass", "ObjectHandling.c")) + code.putln( + '__Pyx_GetNameInClass(%s, %s, %s); %s' % ( + self.result(), + entry.scope.namespace_cname, + interned_cname, + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + + elif entry.is_local or entry.in_closure or entry.from_closure or entry.type.is_memoryviewslice: + # Raise UnboundLocalError for objects and memoryviewslices + raise_unbound = ( + (self.cf_maybe_null or self.cf_is_null) and not self.allow_null) + null_code = entry.type.check_for_null_code(entry.cname) + + memslice_check = entry.type.is_memoryviewslice and self.initialized_check + + if null_code and raise_unbound and (entry.type.is_pyobject or memslice_check): + code.put_error_if_unbound(self.pos, entry, self.in_nogil_context) + + def generate_assignment_code(self, rhs, code, overloaded_assignment=False, + exception_check=None, exception_value=None): + #print "NameNode.generate_assignment_code:", self.name ### + entry = self.entry + if entry is None: + return # There was an error earlier + + if (self.entry.type.is_ptr and isinstance(rhs, ListNode) + and not self.lhs_of_first_assignment and not rhs.in_module_scope): + error(self.pos, "Literal list must be assigned to pointer at time of declaration") + + # is_pyglobal seems to be True for module level-globals only. + # We use this to access class->tp_dict if necessary. + if entry.is_pyglobal: + assert entry.type.is_pyobject, "Python global or builtin not a Python object" + interned_cname = code.intern_identifier(self.entry.name) + namespace = self.entry.scope.namespace_cname + if entry.is_member: + # if the entry is a member we have to cheat: SetAttr does not work + # on types, so we create a descriptor which is then added to tp_dict + setter = 'PyDict_SetItem' + namespace = '%s->tp_dict' % namespace + elif entry.scope.is_module_scope: + setter = 'PyDict_SetItem' + namespace = Naming.moddict_cname + elif entry.is_pyclass_attr: + code.globalstate.use_utility_code(UtilityCode.load_cached("SetNameInClass", "ObjectHandling.c")) + setter = '__Pyx_SetNameInClass' + else: + assert False, repr(entry) + code.put_error_if_neg( + self.pos, + '%s(%s, %s, %s)' % ( + setter, + namespace, + interned_cname, + rhs.py_result())) + if debug_disposal_code: + print("NameNode.generate_assignment_code:") + print("...generating disposal code for %s" % rhs) + rhs.generate_disposal_code(code) + rhs.free_temps(code) + if entry.is_member: + # in Py2.6+, we need to invalidate the method cache + code.putln("PyType_Modified(%s);" % + entry.scope.parent_type.typeptr_cname) + else: + if self.type.is_memoryviewslice: + self.generate_acquire_memoryviewslice(rhs, code) + + elif self.type.is_buffer: + # Generate code for doing the buffer release/acquisition. + # This might raise an exception in which case the assignment (done + # below) will not happen. + # + # The reason this is not in a typetest-like node is because the + # variables that the acquired buffer info is stored to is allocated + # per entry and coupled with it. + self.generate_acquire_buffer(rhs, code) + assigned = False + if self.type.is_pyobject: + #print "NameNode.generate_assignment_code: to", self.name ### + #print "...from", rhs ### + #print "...LHS type", self.type, "ctype", self.ctype() ### + #print "...RHS type", rhs.type, "ctype", rhs.ctype() ### + if self.use_managed_ref: + rhs.make_owned_reference(code) + is_external_ref = entry.is_cglobal or self.entry.in_closure or self.entry.from_closure + if is_external_ref: + if not self.cf_is_null: + if self.cf_maybe_null: + code.put_xgotref(self.py_result()) + else: + code.put_gotref(self.py_result()) + assigned = True + if entry.is_cglobal: + code.put_decref_set( + self.result(), rhs.result_as(self.ctype())) + else: + if not self.cf_is_null: + if self.cf_maybe_null: + code.put_xdecref_set( + self.result(), rhs.result_as(self.ctype())) + else: + code.put_decref_set( + self.result(), rhs.result_as(self.ctype())) + else: + assigned = False + if is_external_ref: + code.put_giveref(rhs.py_result()) + if not self.type.is_memoryviewslice: + if not assigned: + if overloaded_assignment: + result = rhs.result() + if exception_check == '+': + translate_cpp_exception( + code, self.pos, + '%s = %s;' % (self.result(), result), + self.result() if self.type.is_pyobject else None, + exception_value, self.in_nogil_context) + else: + code.putln('%s = %s;' % (self.result(), result)) + else: + result = rhs.result_as(self.ctype()) + + if is_pythran_expr(self.type): + code.putln('new (&%s) decltype(%s){%s};' % (self.result(), self.result(), result)) + elif result != self.result(): + code.putln('%s = %s;' % (self.result(), result)) + if debug_disposal_code: + print("NameNode.generate_assignment_code:") + print("...generating post-assignment code for %s" % rhs) + rhs.generate_post_assignment_code(code) + elif rhs.result_in_temp(): + rhs.generate_post_assignment_code(code) + + rhs.free_temps(code) + + def generate_acquire_memoryviewslice(self, rhs, code): + """ + Slices, coercions from objects, return values etc are new references. + We have a borrowed reference in case of dst = src + """ + from . import MemoryView + + MemoryView.put_acquire_memoryviewslice( + lhs_cname=self.result(), + lhs_type=self.type, + lhs_pos=self.pos, + rhs=rhs, + code=code, + have_gil=not self.in_nogil_context, + first_assignment=self.cf_is_null) + + def generate_acquire_buffer(self, rhs, code): + # rhstmp is only used in case the rhs is a complicated expression leading to + # the object, to avoid repeating the same C expression for every reference + # to the rhs. It does NOT hold a reference. + pretty_rhs = isinstance(rhs, NameNode) or rhs.is_temp + if pretty_rhs: + rhstmp = rhs.result_as(self.ctype()) + else: + rhstmp = code.funcstate.allocate_temp(self.entry.type, manage_ref=False) + code.putln('%s = %s;' % (rhstmp, rhs.result_as(self.ctype()))) + + from . import Buffer + Buffer.put_assign_to_buffer(self.result(), rhstmp, self.entry, + is_initialized=not self.lhs_of_first_assignment, + pos=self.pos, code=code) + + if not pretty_rhs: + code.putln("%s = 0;" % rhstmp) + code.funcstate.release_temp(rhstmp) + + def generate_deletion_code(self, code, ignore_nonexisting=False): + if self.entry is None: + return # There was an error earlier + elif self.entry.is_pyclass_attr: + namespace = self.entry.scope.namespace_cname + interned_cname = code.intern_identifier(self.entry.name) + if ignore_nonexisting: + key_error_code = 'PyErr_Clear(); else' + else: + # minor hack: fake a NameError on KeyError + key_error_code = ( + '{ PyErr_Clear(); PyErr_Format(PyExc_NameError, "name \'%%s\' is not defined", "%s"); }' % + self.entry.name) + code.putln( + 'if (unlikely(PyObject_DelItem(%s, %s) < 0)) {' + ' if (likely(PyErr_ExceptionMatches(PyExc_KeyError))) %s' + ' %s ' + '}' % (namespace, interned_cname, + key_error_code, + code.error_goto(self.pos))) + elif self.entry.is_pyglobal: + code.globalstate.use_utility_code( + UtilityCode.load_cached("PyObjectSetAttrStr", "ObjectHandling.c")) + interned_cname = code.intern_identifier(self.entry.name) + del_code = '__Pyx_PyObject_DelAttrStr(%s, %s)' % ( + Naming.module_cname, interned_cname) + if ignore_nonexisting: + code.putln( + 'if (unlikely(%s < 0)) {' + ' if (likely(PyErr_ExceptionMatches(PyExc_AttributeError))) PyErr_Clear(); else %s ' + '}' % (del_code, code.error_goto(self.pos))) + else: + code.put_error_if_neg(self.pos, del_code) + elif self.entry.type.is_pyobject or self.entry.type.is_memoryviewslice: + if not self.cf_is_null: + if self.cf_maybe_null and not ignore_nonexisting: + code.put_error_if_unbound(self.pos, self.entry) + + if self.entry.type.is_pyobject: + if self.entry.in_closure: + # generator + if ignore_nonexisting and self.cf_maybe_null: + code.put_xgotref(self.result()) + else: + code.put_gotref(self.result()) + if ignore_nonexisting and self.cf_maybe_null: + code.put_xdecref(self.result(), self.ctype()) + else: + code.put_decref(self.result(), self.ctype()) + code.putln('%s = NULL;' % self.result()) + else: + code.put_xdecref_memoryviewslice(self.entry.cname, + have_gil=not self.nogil) + else: + error(self.pos, "Deletion of C names not supported") + + def annotate(self, code): + if hasattr(self, 'is_called') and self.is_called: + pos = (self.pos[0], self.pos[1], self.pos[2] - len(self.name) - 1) + if self.type.is_pyobject: + style, text = 'py_call', 'python function (%s)' + else: + style, text = 'c_call', 'c function (%s)' + code.annotate(pos, AnnotationItem(style, text % self.type, size=len(self.name))) + +class BackquoteNode(ExprNode): + # `expr` + # + # arg ExprNode + + type = py_object_type + + subexprs = ['arg'] + + def analyse_types(self, env): + self.arg = self.arg.analyse_types(env) + self.arg = self.arg.coerce_to_pyobject(env) + self.is_temp = 1 + return self + + gil_message = "Backquote expression" + + def calculate_constant_result(self): + self.constant_result = repr(self.arg.constant_result) + + def generate_result_code(self, code): + code.putln( + "%s = PyObject_Repr(%s); %s" % ( + self.result(), + self.arg.py_result(), + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + + +class ImportNode(ExprNode): + # Used as part of import statement implementation. + # Implements result = + # __import__(module_name, globals(), None, name_list, level) + # + # module_name StringNode dotted name of module. Empty module + # name means importing the parent package according + # to level + # name_list ListNode or None list of names to be imported + # level int relative import level: + # -1: attempt both relative import and absolute import; + # 0: absolute import; + # >0: the number of parent directories to search + # relative to the current module. + # None: decide the level according to language level and + # directives + + type = py_object_type + + subexprs = ['module_name', 'name_list'] + + def analyse_types(self, env): + if self.level is None: + if (env.directives['py2_import'] or + Future.absolute_import not in env.global_scope().context.future_directives): + self.level = -1 + else: + self.level = 0 + module_name = self.module_name.analyse_types(env) + self.module_name = module_name.coerce_to_pyobject(env) + if self.name_list: + name_list = self.name_list.analyse_types(env) + self.name_list = name_list.coerce_to_pyobject(env) + self.is_temp = 1 + return self + + gil_message = "Python import" + + def generate_result_code(self, code): + if self.name_list: + name_list_code = self.name_list.py_result() + else: + name_list_code = "0" + + code.globalstate.use_utility_code(UtilityCode.load_cached("Import", "ImportExport.c")) + import_code = "__Pyx_Import(%s, %s, %d)" % ( + self.module_name.py_result(), + name_list_code, + self.level) + + if (self.level <= 0 and + self.module_name.is_string_literal and + self.module_name.value in utility_code_for_imports): + helper_func, code_name, code_file = utility_code_for_imports[self.module_name.value] + code.globalstate.use_utility_code(UtilityCode.load_cached(code_name, code_file)) + import_code = '%s(%s)' % (helper_func, import_code) + + code.putln("%s = %s; %s" % ( + self.result(), + import_code, + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + + +class IteratorNode(ExprNode): + # Used as part of for statement implementation. + # + # Implements result = iter(sequence) + # + # sequence ExprNode + + type = py_object_type + iter_func_ptr = None + counter_cname = None + cpp_iterator_cname = None + reversed = False # currently only used for list/tuple types (see Optimize.py) + is_async = False + + subexprs = ['sequence'] + + def analyse_types(self, env): + self.sequence = self.sequence.analyse_types(env) + if (self.sequence.type.is_array or self.sequence.type.is_ptr) and \ + not self.sequence.type.is_string: + # C array iteration will be transformed later on + self.type = self.sequence.type + elif self.sequence.type.is_cpp_class: + self.analyse_cpp_types(env) + else: + self.sequence = self.sequence.coerce_to_pyobject(env) + if self.sequence.type in (list_type, tuple_type): + self.sequence = self.sequence.as_none_safe_node("'NoneType' object is not iterable") + self.is_temp = 1 + return self + + gil_message = "Iterating over Python object" + + _func_iternext_type = PyrexTypes.CPtrType(PyrexTypes.CFuncType( + PyrexTypes.py_object_type, [ + PyrexTypes.CFuncTypeArg("it", PyrexTypes.py_object_type, None), + ])) + + def type_dependencies(self, env): + return self.sequence.type_dependencies(env) + + def infer_type(self, env): + sequence_type = self.sequence.infer_type(env) + if sequence_type.is_array or sequence_type.is_ptr: + return sequence_type + elif sequence_type.is_cpp_class: + begin = sequence_type.scope.lookup("begin") + if begin is not None: + return begin.type.return_type + elif sequence_type.is_pyobject: + return sequence_type + return py_object_type + + def analyse_cpp_types(self, env): + sequence_type = self.sequence.type + if sequence_type.is_ptr: + sequence_type = sequence_type.base_type + begin = sequence_type.scope.lookup("begin") + end = sequence_type.scope.lookup("end") + if (begin is None + or not begin.type.is_cfunction + or begin.type.args): + error(self.pos, "missing begin() on %s" % self.sequence.type) + self.type = error_type + return + if (end is None + or not end.type.is_cfunction + or end.type.args): + error(self.pos, "missing end() on %s" % self.sequence.type) + self.type = error_type + return + iter_type = begin.type.return_type + if iter_type.is_cpp_class: + if env.lookup_operator_for_types( + self.pos, + "!=", + [iter_type, end.type.return_type]) is None: + error(self.pos, "missing operator!= on result of begin() on %s" % self.sequence.type) + self.type = error_type + return + if env.lookup_operator_for_types(self.pos, '++', [iter_type]) is None: + error(self.pos, "missing operator++ on result of begin() on %s" % self.sequence.type) + self.type = error_type + return + if env.lookup_operator_for_types(self.pos, '*', [iter_type]) is None: + error(self.pos, "missing operator* on result of begin() on %s" % self.sequence.type) + self.type = error_type + return + self.type = iter_type + elif iter_type.is_ptr: + if not (iter_type == end.type.return_type): + error(self.pos, "incompatible types for begin() and end()") + self.type = iter_type + else: + error(self.pos, "result type of begin() on %s must be a C++ class or pointer" % self.sequence.type) + self.type = error_type + return + + def generate_result_code(self, code): + sequence_type = self.sequence.type + if sequence_type.is_cpp_class: + if self.sequence.is_name: + # safe: C++ won't allow you to reassign to class references + begin_func = "%s.begin" % self.sequence.result() + else: + sequence_type = PyrexTypes.c_ptr_type(sequence_type) + self.cpp_iterator_cname = code.funcstate.allocate_temp(sequence_type, manage_ref=False) + code.putln("%s = &%s;" % (self.cpp_iterator_cname, self.sequence.result())) + begin_func = "%s->begin" % self.cpp_iterator_cname + # TODO: Limit scope. + code.putln("%s = %s();" % (self.result(), begin_func)) + return + if sequence_type.is_array or sequence_type.is_ptr: + raise InternalError("for in carray slice not transformed") + + is_builtin_sequence = sequence_type in (list_type, tuple_type) + if not is_builtin_sequence: + # reversed() not currently optimised (see Optimize.py) + assert not self.reversed, "internal error: reversed() only implemented for list/tuple objects" + self.may_be_a_sequence = not sequence_type.is_builtin_type + if self.may_be_a_sequence: + code.putln( + "if (likely(PyList_CheckExact(%s)) || PyTuple_CheckExact(%s)) {" % ( + self.sequence.py_result(), + self.sequence.py_result())) + + if is_builtin_sequence or self.may_be_a_sequence: + self.counter_cname = code.funcstate.allocate_temp( + PyrexTypes.c_py_ssize_t_type, manage_ref=False) + if self.reversed: + if sequence_type is list_type: + init_value = 'PyList_GET_SIZE(%s) - 1' % self.result() + else: + init_value = 'PyTuple_GET_SIZE(%s) - 1' % self.result() + else: + init_value = '0' + code.putln("%s = %s; __Pyx_INCREF(%s); %s = %s;" % ( + self.result(), + self.sequence.py_result(), + self.result(), + self.counter_cname, + init_value)) + if not is_builtin_sequence: + self.iter_func_ptr = code.funcstate.allocate_temp(self._func_iternext_type, manage_ref=False) + if self.may_be_a_sequence: + code.putln("%s = NULL;" % self.iter_func_ptr) + code.putln("} else {") + code.put("%s = -1; " % self.counter_cname) + + code.putln("%s = PyObject_GetIter(%s); %s" % ( + self.result(), + self.sequence.py_result(), + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + + # PyObject_GetIter() fails if "tp_iternext" is not set, but the check below + # makes it visible to the C compiler that the pointer really isn't NULL, so that + # it can distinguish between the special cases and the generic case + code.putln("%s = Py_TYPE(%s)->tp_iternext; %s" % ( + self.iter_func_ptr, self.py_result(), + code.error_goto_if_null(self.iter_func_ptr, self.pos))) + if self.may_be_a_sequence: + code.putln("}") + + def generate_next_sequence_item(self, test_name, result_name, code): + assert self.counter_cname, "internal error: counter_cname temp not prepared" + final_size = 'Py%s_GET_SIZE(%s)' % (test_name, self.py_result()) + if self.sequence.is_sequence_constructor: + item_count = len(self.sequence.args) + if self.sequence.mult_factor is None: + final_size = item_count + elif isinstance(self.sequence.mult_factor.constant_result, _py_int_types): + final_size = item_count * self.sequence.mult_factor.constant_result + code.putln("if (%s >= %s) break;" % (self.counter_cname, final_size)) + if self.reversed: + inc_dec = '--' + else: + inc_dec = '++' + code.putln("#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS") + code.putln( + "%s = Py%s_GET_ITEM(%s, %s); __Pyx_INCREF(%s); %s%s; %s" % ( + result_name, + test_name, + self.py_result(), + self.counter_cname, + result_name, + self.counter_cname, + inc_dec, + # use the error label to avoid C compiler warnings if we only use it below + code.error_goto_if_neg('0', self.pos) + )) + code.putln("#else") + code.putln( + "%s = PySequence_ITEM(%s, %s); %s%s; %s" % ( + result_name, + self.py_result(), + self.counter_cname, + self.counter_cname, + inc_dec, + code.error_goto_if_null(result_name, self.pos))) + code.put_gotref(result_name) + code.putln("#endif") + + def generate_iter_next_result_code(self, result_name, code): + sequence_type = self.sequence.type + if self.reversed: + code.putln("if (%s < 0) break;" % self.counter_cname) + if sequence_type.is_cpp_class: + if self.cpp_iterator_cname: + end_func = "%s->end" % self.cpp_iterator_cname + else: + end_func = "%s.end" % self.sequence.result() + # TODO: Cache end() call? + code.putln("if (!(%s != %s())) break;" % ( + self.result(), + end_func)) + code.putln("%s = *%s;" % ( + result_name, + self.result())) + code.putln("++%s;" % self.result()) + return + elif sequence_type is list_type: + self.generate_next_sequence_item('List', result_name, code) + return + elif sequence_type is tuple_type: + self.generate_next_sequence_item('Tuple', result_name, code) + return + + if self.may_be_a_sequence: + code.putln("if (likely(!%s)) {" % self.iter_func_ptr) + code.putln("if (likely(PyList_CheckExact(%s))) {" % self.py_result()) + self.generate_next_sequence_item('List', result_name, code) + code.putln("} else {") + self.generate_next_sequence_item('Tuple', result_name, code) + code.putln("}") + code.put("} else ") + + code.putln("{") + code.putln( + "%s = %s(%s);" % ( + result_name, + self.iter_func_ptr, + self.py_result())) + code.putln("if (unlikely(!%s)) {" % result_name) + code.putln("PyObject* exc_type = PyErr_Occurred();") + code.putln("if (exc_type) {") + code.putln("if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();") + code.putln("else %s" % code.error_goto(self.pos)) + code.putln("}") + code.putln("break;") + code.putln("}") + code.put_gotref(result_name) + code.putln("}") + + def free_temps(self, code): + if self.counter_cname: + code.funcstate.release_temp(self.counter_cname) + if self.iter_func_ptr: + code.funcstate.release_temp(self.iter_func_ptr) + self.iter_func_ptr = None + if self.cpp_iterator_cname: + code.funcstate.release_temp(self.cpp_iterator_cname) + ExprNode.free_temps(self, code) + + +class NextNode(AtomicExprNode): + # Used as part of for statement implementation. + # Implements result = next(iterator) + # Created during analyse_types phase. + # The iterator is not owned by this node. + # + # iterator IteratorNode + + def __init__(self, iterator): + AtomicExprNode.__init__(self, iterator.pos) + self.iterator = iterator + + def nogil_check(self, env): + # ignore - errors (if any) are already handled by IteratorNode + pass + + def type_dependencies(self, env): + return self.iterator.type_dependencies(env) + + def infer_type(self, env, iterator_type=None): + if iterator_type is None: + iterator_type = self.iterator.infer_type(env) + if iterator_type.is_ptr or iterator_type.is_array: + return iterator_type.base_type + elif iterator_type.is_cpp_class: + item_type = env.lookup_operator_for_types(self.pos, "*", [iterator_type]).type.return_type + if item_type.is_reference: + item_type = item_type.ref_base_type + if item_type.is_const: + item_type = item_type.const_base_type + return item_type + else: + # Avoid duplication of complicated logic. + fake_index_node = IndexNode( + self.pos, + base=self.iterator.sequence, + index=IntNode(self.pos, value='PY_SSIZE_T_MAX', + type=PyrexTypes.c_py_ssize_t_type)) + return fake_index_node.infer_type(env) + + def analyse_types(self, env): + self.type = self.infer_type(env, self.iterator.type) + self.is_temp = 1 + return self + + def generate_result_code(self, code): + self.iterator.generate_iter_next_result_code(self.result(), code) + + +class AsyncIteratorNode(ExprNode): + # Used as part of 'async for' statement implementation. + # + # Implements result = sequence.__aiter__() + # + # sequence ExprNode + + subexprs = ['sequence'] + + is_async = True + type = py_object_type + is_temp = 1 + + def infer_type(self, env): + return py_object_type + + def analyse_types(self, env): + self.sequence = self.sequence.analyse_types(env) + if not self.sequence.type.is_pyobject: + error(self.pos, "async for loops not allowed on C/C++ types") + self.sequence = self.sequence.coerce_to_pyobject(env) + return self + + def generate_result_code(self, code): + code.globalstate.use_utility_code(UtilityCode.load_cached("AsyncIter", "Coroutine.c")) + code.putln("%s = __Pyx_Coroutine_GetAsyncIter(%s); %s" % ( + self.result(), + self.sequence.py_result(), + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.result()) + + +class AsyncNextNode(AtomicExprNode): + # Used as part of 'async for' statement implementation. + # Implements result = iterator.__anext__() + # Created during analyse_types phase. + # The iterator is not owned by this node. + # + # iterator IteratorNode + + type = py_object_type + is_temp = 1 + + def __init__(self, iterator): + AtomicExprNode.__init__(self, iterator.pos) + self.iterator = iterator + + def infer_type(self, env): + return py_object_type + + def analyse_types(self, env): + return self + + def generate_result_code(self, code): + code.globalstate.use_utility_code(UtilityCode.load_cached("AsyncIter", "Coroutine.c")) + code.putln("%s = __Pyx_Coroutine_AsyncIterNext(%s); %s" % ( + self.result(), + self.iterator.py_result(), + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.result()) + + +class WithExitCallNode(ExprNode): + # The __exit__() call of a 'with' statement. Used in both the + # except and finally clauses. + + # with_stat WithStatNode the surrounding 'with' statement + # args TupleNode or ResultStatNode the exception info tuple + # await_expr AwaitExprNode the await expression of an 'async with' statement + + subexprs = ['args', 'await_expr'] + test_if_run = True + await_expr = None + + def analyse_types(self, env): + self.args = self.args.analyse_types(env) + if self.await_expr: + self.await_expr = self.await_expr.analyse_types(env) + self.type = PyrexTypes.c_bint_type + self.is_temp = True + return self + + def generate_evaluation_code(self, code): + if self.test_if_run: + # call only if it was not already called (and decref-cleared) + code.putln("if (%s) {" % self.with_stat.exit_var) + + self.args.generate_evaluation_code(code) + result_var = code.funcstate.allocate_temp(py_object_type, manage_ref=False) + + code.mark_pos(self.pos) + code.globalstate.use_utility_code(UtilityCode.load_cached( + "PyObjectCall", "ObjectHandling.c")) + code.putln("%s = __Pyx_PyObject_Call(%s, %s, NULL);" % ( + result_var, + self.with_stat.exit_var, + self.args.result())) + code.put_decref_clear(self.with_stat.exit_var, type=py_object_type) + self.args.generate_disposal_code(code) + self.args.free_temps(code) + + code.putln(code.error_goto_if_null(result_var, self.pos)) + code.put_gotref(result_var) + + if self.await_expr: + # FIXME: result_var temp currently leaks into the closure + self.await_expr.generate_evaluation_code(code, source_cname=result_var, decref_source=True) + code.putln("%s = %s;" % (result_var, self.await_expr.py_result())) + self.await_expr.generate_post_assignment_code(code) + self.await_expr.free_temps(code) + + if self.result_is_used: + self.allocate_temp_result(code) + code.putln("%s = __Pyx_PyObject_IsTrue(%s);" % (self.result(), result_var)) + code.put_decref_clear(result_var, type=py_object_type) + if self.result_is_used: + code.put_error_if_neg(self.pos, self.result()) + code.funcstate.release_temp(result_var) + if self.test_if_run: + code.putln("}") + + +class ExcValueNode(AtomicExprNode): + # Node created during analyse_types phase + # of an ExceptClauseNode to fetch the current + # exception value. + + type = py_object_type + + def __init__(self, pos): + ExprNode.__init__(self, pos) + + def set_var(self, var): + self.var = var + + def calculate_result_code(self): + return self.var + + def generate_result_code(self, code): + pass + + def analyse_types(self, env): + return self + + +class TempNode(ExprNode): + # Node created during analyse_types phase + # of some nodes to hold a temporary value. + # + # Note: One must call "allocate" and "release" on + # the node during code generation to get/release the temp. + # This is because the temp result is often used outside of + # the regular cycle. + + subexprs = [] + + def __init__(self, pos, type, env=None): + ExprNode.__init__(self, pos) + self.type = type + if type.is_pyobject: + self.result_ctype = py_object_type + self.is_temp = 1 + + def analyse_types(self, env): + return self + + def analyse_target_declaration(self, env): + pass + + def generate_result_code(self, code): + pass + + def allocate(self, code): + self.temp_cname = code.funcstate.allocate_temp(self.type, manage_ref=True) + + def release(self, code): + code.funcstate.release_temp(self.temp_cname) + self.temp_cname = None + + def result(self): + try: + return self.temp_cname + except: + assert False, "Remember to call allocate/release on TempNode" + raise + + # Do not participate in normal temp alloc/dealloc: + def allocate_temp_result(self, code): + pass + + def release_temp_result(self, code): + pass + +class PyTempNode(TempNode): + # TempNode holding a Python value. + + def __init__(self, pos, env): + TempNode.__init__(self, pos, PyrexTypes.py_object_type, env) + +class RawCNameExprNode(ExprNode): + subexprs = [] + + def __init__(self, pos, type=None, cname=None): + ExprNode.__init__(self, pos, type=type) + if cname is not None: + self.cname = cname + + def analyse_types(self, env): + return self + + def set_cname(self, cname): + self.cname = cname + + def result(self): + return self.cname + + def generate_result_code(self, code): + pass + + +#------------------------------------------------------------------- +# +# F-strings +# +#------------------------------------------------------------------- + + +class JoinedStrNode(ExprNode): + # F-strings + # + # values [UnicodeNode|FormattedValueNode] Substrings of the f-string + # + type = unicode_type + is_temp = True + + subexprs = ['values'] + + def analyse_types(self, env): + self.values = [v.analyse_types(env).coerce_to_pyobject(env) for v in self.values] + return self + + def may_be_none(self): + # PyUnicode_Join() always returns a Unicode string or raises an exception + return False + + def generate_evaluation_code(self, code): + code.mark_pos(self.pos) + num_items = len(self.values) + list_var = code.funcstate.allocate_temp(py_object_type, manage_ref=True) + ulength_var = code.funcstate.allocate_temp(PyrexTypes.c_py_ssize_t_type, manage_ref=False) + max_char_var = code.funcstate.allocate_temp(PyrexTypes.c_py_ucs4_type, manage_ref=False) + + code.putln('%s = PyTuple_New(%s); %s' % ( + list_var, + num_items, + code.error_goto_if_null(list_var, self.pos))) + code.put_gotref(list_var) + code.putln("%s = 0;" % ulength_var) + code.putln("%s = 127;" % max_char_var) # at least ASCII character range + + for i, node in enumerate(self.values): + node.generate_evaluation_code(code) + node.make_owned_reference(code) + + ulength = "__Pyx_PyUnicode_GET_LENGTH(%s)" % node.py_result() + max_char_value = "__Pyx_PyUnicode_MAX_CHAR_VALUE(%s)" % node.py_result() + is_ascii = False + if isinstance(node, UnicodeNode): + try: + # most strings will be ASCII or at least Latin-1 + node.value.encode('iso8859-1') + max_char_value = '255' + node.value.encode('us-ascii') + is_ascii = True + except UnicodeEncodeError: + if max_char_value != '255': + # not ISO8859-1 => check BMP limit + max_char = max(map(ord, node.value)) + if max_char < 0xD800: + # BMP-only, no surrogate pairs used + max_char_value = '65535' + ulength = str(len(node.value)) + elif max_char >= 65536: + # cleary outside of BMP, and not on a 16-bit Unicode system + max_char_value = '1114111' + ulength = str(len(node.value)) + else: + # not really worth implementing a check for surrogate pairs here + # drawback: C code can differ when generating on Py2 with 2-byte Unicode + pass + else: + ulength = str(len(node.value)) + elif isinstance(node, FormattedValueNode) and node.value.type.is_numeric: + is_ascii = True # formatted C numbers are always ASCII + + if not is_ascii: + code.putln("%s = (%s > %s) ? %s : %s;" % ( + max_char_var, max_char_value, max_char_var, max_char_value, max_char_var)) + code.putln("%s += %s;" % (ulength_var, ulength)) + + code.put_giveref(node.py_result()) + code.putln('PyTuple_SET_ITEM(%s, %s, %s);' % (list_var, i, node.py_result())) + node.generate_post_assignment_code(code) + node.free_temps(code) + + code.mark_pos(self.pos) + self.allocate_temp_result(code) + code.globalstate.use_utility_code(UtilityCode.load_cached("JoinPyUnicode", "StringTools.c")) + code.putln('%s = __Pyx_PyUnicode_Join(%s, %d, %s, %s); %s' % ( + self.result(), + list_var, + num_items, + ulength_var, + max_char_var, + code.error_goto_if_null(self.py_result(), self.pos))) + code.put_gotref(self.py_result()) + + code.put_decref_clear(list_var, py_object_type) + code.funcstate.release_temp(list_var) + code.funcstate.release_temp(ulength_var) + code.funcstate.release_temp(max_char_var) + + +class FormattedValueNode(ExprNode): + # {}-delimited portions of an f-string + # + # value ExprNode The expression itself + # conversion_char str or None Type conversion (!s, !r, !a, or none, or 'd' for integer conversion) + # format_spec JoinedStrNode or None Format string passed to __format__ + # c_format_spec str or None If not None, formatting can be done at the C level + + subexprs = ['value', 'format_spec'] + + type = unicode_type + is_temp = True + c_format_spec = None + + find_conversion_func = { + 's': 'PyObject_Unicode', + 'r': 'PyObject_Repr', + 'a': 'PyObject_ASCII', # NOTE: mapped to PyObject_Repr() in Py2 + 'd': '__Pyx_PyNumber_IntOrLong', # NOTE: internal mapping for '%d' formatting + }.get + + def may_be_none(self): + # PyObject_Format() always returns a Unicode string or raises an exception + return False + + def analyse_types(self, env): + self.value = self.value.analyse_types(env) + if not self.format_spec or self.format_spec.is_string_literal: + c_format_spec = self.format_spec.value if self.format_spec else self.value.type.default_format_spec + if self.value.type.can_coerce_to_pystring(env, format_spec=c_format_spec): + self.c_format_spec = c_format_spec + + if self.format_spec: + self.format_spec = self.format_spec.analyse_types(env).coerce_to_pyobject(env) + if self.c_format_spec is None: + self.value = self.value.coerce_to_pyobject(env) + if not self.format_spec and (not self.conversion_char or self.conversion_char == 's'): + if self.value.type is unicode_type and not self.value.may_be_none(): + # value is definitely a unicode string and we don't format it any special + return self.value + return self + + def generate_result_code(self, code): + if self.c_format_spec is not None and not self.value.type.is_pyobject: + convert_func_call = self.value.type.convert_to_pystring( + self.value.result(), code, self.c_format_spec) + code.putln("%s = %s; %s" % ( + self.result(), + convert_func_call, + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + return + + value_result = self.value.py_result() + value_is_unicode = self.value.type is unicode_type and not self.value.may_be_none() + if self.format_spec: + format_func = '__Pyx_PyObject_Format' + format_spec = self.format_spec.py_result() + else: + # common case: expect simple Unicode pass-through if no format spec + format_func = '__Pyx_PyObject_FormatSimple' + # passing a Unicode format string in Py2 forces PyObject_Format() to also return a Unicode string + format_spec = Naming.empty_unicode + + conversion_char = self.conversion_char + if conversion_char == 's' and value_is_unicode: + # no need to pipe unicode strings through str() + conversion_char = None + + if conversion_char: + fn = self.find_conversion_func(conversion_char) + assert fn is not None, "invalid conversion character found: '%s'" % conversion_char + value_result = '%s(%s)' % (fn, value_result) + code.globalstate.use_utility_code( + UtilityCode.load_cached("PyObjectFormatAndDecref", "StringTools.c")) + format_func += 'AndDecref' + elif self.format_spec: + code.globalstate.use_utility_code( + UtilityCode.load_cached("PyObjectFormat", "StringTools.c")) + else: + code.globalstate.use_utility_code( + UtilityCode.load_cached("PyObjectFormatSimple", "StringTools.c")) + + code.putln("%s = %s(%s, %s); %s" % ( + self.result(), + format_func, + value_result, + format_spec, + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + + +#------------------------------------------------------------------- +# +# Parallel nodes (cython.parallel.thread(savailable|id)) +# +#------------------------------------------------------------------- + +class ParallelThreadsAvailableNode(AtomicExprNode): + """ + Note: this is disabled and not a valid directive at this moment + + Implements cython.parallel.threadsavailable(). If we are called from the + sequential part of the application, we need to call omp_get_max_threads(), + and in the parallel part we can just call omp_get_num_threads() + """ + + type = PyrexTypes.c_int_type + + def analyse_types(self, env): + self.is_temp = True + # env.add_include_file("omp.h") + return self + + def generate_result_code(self, code): + code.putln("#ifdef _OPENMP") + code.putln("if (omp_in_parallel()) %s = omp_get_max_threads();" % + self.temp_code) + code.putln("else %s = omp_get_num_threads();" % self.temp_code) + code.putln("#else") + code.putln("%s = 1;" % self.temp_code) + code.putln("#endif") + + def result(self): + return self.temp_code + + +class ParallelThreadIdNode(AtomicExprNode): #, Nodes.ParallelNode): + """ + Implements cython.parallel.threadid() + """ + + type = PyrexTypes.c_int_type + + def analyse_types(self, env): + self.is_temp = True + # env.add_include_file("omp.h") + return self + + def generate_result_code(self, code): + code.putln("#ifdef _OPENMP") + code.putln("%s = omp_get_thread_num();" % self.temp_code) + code.putln("#else") + code.putln("%s = 0;" % self.temp_code) + code.putln("#endif") + + def result(self): + return self.temp_code + + +#------------------------------------------------------------------- +# +# Trailer nodes +# +#------------------------------------------------------------------- + + +class _IndexingBaseNode(ExprNode): + # Base class for indexing nodes. + # + # base ExprNode the value being indexed + + def is_ephemeral(self): + # in most cases, indexing will return a safe reference to an object in a container, + # so we consider the result safe if the base object is + return self.base.is_ephemeral() or self.base.type in ( + basestring_type, str_type, bytes_type, bytearray_type, unicode_type) + + def check_const_addr(self): + return self.base.check_const_addr() and self.index.check_const() + + def is_lvalue(self): + # NOTE: references currently have both is_reference and is_ptr + # set. Since pointers and references have different lvalue + # rules, we must be careful to separate the two. + if self.type.is_reference: + if self.type.ref_base_type.is_array: + # fixed-sized arrays aren't l-values + return False + elif self.type.is_ptr: + # non-const pointers can always be reassigned + return True + # Just about everything else returned by the index operator + # can be an lvalue. + return True + + +class IndexNode(_IndexingBaseNode): + # Sequence indexing. + # + # base ExprNode + # index ExprNode + # type_indices [PyrexType] + # + # is_fused_index boolean Whether the index is used to specialize a + # c(p)def function + + subexprs = ['base', 'index'] + type_indices = None + + is_subscript = True + is_fused_index = False + + def calculate_constant_result(self): + self.constant_result = self.base.constant_result[self.index.constant_result] + + def compile_time_value(self, denv): + base = self.base.compile_time_value(denv) + index = self.index.compile_time_value(denv) + try: + return base[index] + except Exception as e: + self.compile_time_value_error(e) + + def is_simple(self): + base = self.base + return (base.is_simple() and self.index.is_simple() + and base.type and (base.type.is_ptr or base.type.is_array)) + + def may_be_none(self): + base_type = self.base.type + if base_type: + if base_type.is_string: + return False + if isinstance(self.index, SliceNode): + # slicing! + if base_type in (bytes_type, bytearray_type, str_type, unicode_type, + basestring_type, list_type, tuple_type): + return False + return ExprNode.may_be_none(self) + + def analyse_target_declaration(self, env): + pass + + def analyse_as_type(self, env): + base_type = self.base.analyse_as_type(env) + if base_type and not base_type.is_pyobject: + if base_type.is_cpp_class: + if isinstance(self.index, TupleNode): + template_values = self.index.args + else: + template_values = [self.index] + type_node = Nodes.TemplatedTypeNode( + pos=self.pos, + positional_args=template_values, + keyword_args=None) + return type_node.analyse(env, base_type=base_type) + elif self.index.is_slice or self.index.is_sequence_constructor: + # memory view + from . import MemoryView + env.use_utility_code(MemoryView.view_utility_code) + axes = [self.index] if self.index.is_slice else list(self.index.args) + return PyrexTypes.MemoryViewSliceType(base_type, MemoryView.get_axes_specs(env, axes)) + else: + # C array + index = self.index.compile_time_value(env) + if index is not None: + try: + index = int(index) + except (ValueError, TypeError): + pass + else: + return PyrexTypes.CArrayType(base_type, index) + error(self.pos, "Array size must be a compile time constant") + return None + + def type_dependencies(self, env): + return self.base.type_dependencies(env) + self.index.type_dependencies(env) + + def infer_type(self, env): + base_type = self.base.infer_type(env) + if self.index.is_slice: + # slicing! + if base_type.is_string: + # sliced C strings must coerce to Python + return bytes_type + elif base_type.is_pyunicode_ptr: + # sliced Py_UNICODE* strings must coerce to Python + return unicode_type + elif base_type in (unicode_type, bytes_type, str_type, + bytearray_type, list_type, tuple_type): + # slicing these returns the same type + return base_type + else: + # TODO: Handle buffers (hopefully without too much redundancy). + return py_object_type + + index_type = self.index.infer_type(env) + if index_type and index_type.is_int or isinstance(self.index, IntNode): + # indexing! + if base_type is unicode_type: + # Py_UCS4 will automatically coerce to a unicode string + # if required, so this is safe. We only infer Py_UCS4 + # when the index is a C integer type. Otherwise, we may + # need to use normal Python item access, in which case + # it's faster to return the one-char unicode string than + # to receive it, throw it away, and potentially rebuild it + # on a subsequent PyObject coercion. + return PyrexTypes.c_py_ucs4_type + elif base_type is str_type: + # always returns str - Py2: bytes, Py3: unicode + return base_type + elif base_type is bytearray_type: + return PyrexTypes.c_uchar_type + elif isinstance(self.base, BytesNode): + #if env.global_scope().context.language_level >= 3: + # # inferring 'char' can be made to work in Python 3 mode + # return PyrexTypes.c_char_type + # Py2/3 return different types on indexing bytes objects + return py_object_type + elif base_type in (tuple_type, list_type): + # if base is a literal, take a look at its values + item_type = infer_sequence_item_type( + env, self.base, self.index, seq_type=base_type) + if item_type is not None: + return item_type + elif base_type.is_ptr or base_type.is_array: + return base_type.base_type + elif base_type.is_ctuple and isinstance(self.index, IntNode): + if self.index.has_constant_result(): + index = self.index.constant_result + if index < 0: + index += base_type.size + if 0 <= index < base_type.size: + return base_type.components[index] + + if base_type.is_cpp_class: + class FakeOperand: + def __init__(self, **kwds): + self.__dict__.update(kwds) + operands = [ + FakeOperand(pos=self.pos, type=base_type), + FakeOperand(pos=self.pos, type=index_type), + ] + index_func = env.lookup_operator('[]', operands) + if index_func is not None: + return index_func.type.return_type + + if is_pythran_expr(base_type) and is_pythran_expr(index_type): + index_with_type = (self.index, index_type) + return PythranExpr(pythran_indexing_type(base_type, [index_with_type])) + + # may be slicing or indexing, we don't know + if base_type in (unicode_type, str_type): + # these types always returns their own type on Python indexing/slicing + return base_type + else: + # TODO: Handle buffers (hopefully without too much redundancy). + return py_object_type + + def analyse_types(self, env): + return self.analyse_base_and_index_types(env, getting=True) + + def analyse_target_types(self, env): + node = self.analyse_base_and_index_types(env, setting=True) + if node.type.is_const: + error(self.pos, "Assignment to const dereference") + if node is self and not node.is_lvalue(): + error(self.pos, "Assignment to non-lvalue of type '%s'" % node.type) + return node + + def analyse_base_and_index_types(self, env, getting=False, setting=False, + analyse_base=True): + # Note: This might be cleaned up by having IndexNode + # parsed in a saner way and only construct the tuple if + # needed. + if analyse_base: + self.base = self.base.analyse_types(env) + + if self.base.type.is_error: + # Do not visit child tree if base is undeclared to avoid confusing + # error messages + self.type = PyrexTypes.error_type + return self + + is_slice = self.index.is_slice + if not env.directives['wraparound']: + if is_slice: + check_negative_indices(self.index.start, self.index.stop) + else: + check_negative_indices(self.index) + + # Potentially overflowing index value. + if not is_slice and isinstance(self.index, IntNode) and Utils.long_literal(self.index.value): + self.index = self.index.coerce_to_pyobject(env) + + is_memslice = self.base.type.is_memoryviewslice + # Handle the case where base is a literal char* (and we expect a string, not an int) + if not is_memslice and (isinstance(self.base, BytesNode) or is_slice): + if self.base.type.is_string or not (self.base.type.is_ptr or self.base.type.is_array): + self.base = self.base.coerce_to_pyobject(env) + + replacement_node = self.analyse_as_buffer_operation(env, getting) + if replacement_node is not None: + return replacement_node + + self.nogil = env.nogil + base_type = self.base.type + + if not base_type.is_cfunction: + self.index = self.index.analyse_types(env) + self.original_index_type = self.index.type + + if base_type.is_unicode_char: + # we infer Py_UNICODE/Py_UCS4 for unicode strings in some + # cases, but indexing must still work for them + if setting: + warning(self.pos, "cannot assign to Unicode string index", level=1) + elif self.index.constant_result in (0, -1): + # uchar[0] => uchar + return self.base + self.base = self.base.coerce_to_pyobject(env) + base_type = self.base.type + + if base_type.is_pyobject: + return self.analyse_as_pyobject(env, is_slice, getting, setting) + elif base_type.is_ptr or base_type.is_array: + return self.analyse_as_c_array(env, is_slice) + elif base_type.is_cpp_class: + return self.analyse_as_cpp(env, setting) + elif base_type.is_cfunction: + return self.analyse_as_c_function(env) + elif base_type.is_ctuple: + return self.analyse_as_c_tuple(env, getting, setting) + else: + error(self.pos, + "Attempting to index non-array type '%s'" % + base_type) + self.type = PyrexTypes.error_type + return self + + def analyse_as_pyobject(self, env, is_slice, getting, setting): + base_type = self.base.type + if self.index.type.is_unicode_char and base_type is not dict_type: + # TODO: eventually fold into case below and remove warning, once people have adapted their code + warning(self.pos, + "Item lookup of unicode character codes now always converts to a Unicode string. " + "Use an explicit C integer cast to get back the previous integer lookup behaviour.", level=1) + self.index = self.index.coerce_to_pyobject(env) + self.is_temp = 1 + elif self.index.type.is_int and base_type is not dict_type: + if (getting + and (base_type in (list_type, tuple_type, bytearray_type)) + and (not self.index.type.signed + or not env.directives['wraparound'] + or (isinstance(self.index, IntNode) and + self.index.has_constant_result() and self.index.constant_result >= 0)) + and not env.directives['boundscheck']): + self.is_temp = 0 + else: + self.is_temp = 1 + self.index = self.index.coerce_to(PyrexTypes.c_py_ssize_t_type, env).coerce_to_simple(env) + self.original_index_type.create_to_py_utility_code(env) + else: + self.index = self.index.coerce_to_pyobject(env) + self.is_temp = 1 + + if self.index.type.is_int and base_type is unicode_type: + # Py_UNICODE/Py_UCS4 will automatically coerce to a unicode string + # if required, so this is fast and safe + self.type = PyrexTypes.c_py_ucs4_type + elif self.index.type.is_int and base_type is bytearray_type: + if setting: + self.type = PyrexTypes.c_uchar_type + else: + # not using 'uchar' to enable fast and safe error reporting as '-1' + self.type = PyrexTypes.c_int_type + elif is_slice and base_type in (bytes_type, bytearray_type, str_type, unicode_type, list_type, tuple_type): + self.type = base_type + else: + item_type = None + if base_type in (list_type, tuple_type) and self.index.type.is_int: + item_type = infer_sequence_item_type( + env, self.base, self.index, seq_type=base_type) + if item_type is None: + item_type = py_object_type + self.type = item_type + if base_type in (list_type, tuple_type, dict_type): + # do the None check explicitly (not in a helper) to allow optimising it away + self.base = self.base.as_none_safe_node("'NoneType' object is not subscriptable") + + self.wrap_in_nonecheck_node(env, getting) + return self + + def analyse_as_c_array(self, env, is_slice): + base_type = self.base.type + self.type = base_type.base_type + if is_slice: + self.type = base_type + elif self.index.type.is_pyobject: + self.index = self.index.coerce_to(PyrexTypes.c_py_ssize_t_type, env) + elif not self.index.type.is_int: + error(self.pos, "Invalid index type '%s'" % self.index.type) + return self + + def analyse_as_cpp(self, env, setting): + base_type = self.base.type + function = env.lookup_operator("[]", [self.base, self.index]) + if function is None: + error(self.pos, "Indexing '%s' not supported for index type '%s'" % (base_type, self.index.type)) + self.type = PyrexTypes.error_type + self.result_code = "" + return self + func_type = function.type + if func_type.is_ptr: + func_type = func_type.base_type + self.exception_check = func_type.exception_check + self.exception_value = func_type.exception_value + if self.exception_check: + if not setting: + self.is_temp = True + if self.exception_value is None: + env.use_utility_code(UtilityCode.load_cached("CppExceptionConversion", "CppSupport.cpp")) + self.index = self.index.coerce_to(func_type.args[0].type, env) + self.type = func_type.return_type + if setting and not func_type.return_type.is_reference: + error(self.pos, "Can't set non-reference result '%s'" % self.type) + return self + + def analyse_as_c_function(self, env): + base_type = self.base.type + if base_type.is_fused: + self.parse_indexed_fused_cdef(env) + else: + self.type_indices = self.parse_index_as_types(env) + self.index = None # FIXME: use a dedicated Node class instead of generic IndexNode + if base_type.templates is None: + error(self.pos, "Can only parameterize template functions.") + self.type = error_type + elif self.type_indices is None: + # Error recorded earlier. + self.type = error_type + elif len(base_type.templates) != len(self.type_indices): + error(self.pos, "Wrong number of template arguments: expected %s, got %s" % ( + (len(base_type.templates), len(self.type_indices)))) + self.type = error_type + else: + self.type = base_type.specialize(dict(zip(base_type.templates, self.type_indices))) + # FIXME: use a dedicated Node class instead of generic IndexNode + return self + + def analyse_as_c_tuple(self, env, getting, setting): + base_type = self.base.type + if isinstance(self.index, IntNode) and self.index.has_constant_result(): + index = self.index.constant_result + if -base_type.size <= index < base_type.size: + if index < 0: + index += base_type.size + self.type = base_type.components[index] + else: + error(self.pos, + "Index %s out of bounds for '%s'" % + (index, base_type)) + self.type = PyrexTypes.error_type + return self + else: + self.base = self.base.coerce_to_pyobject(env) + return self.analyse_base_and_index_types(env, getting=getting, setting=setting, analyse_base=False) + + def analyse_as_buffer_operation(self, env, getting): + """ + Analyse buffer indexing and memoryview indexing/slicing + """ + if isinstance(self.index, TupleNode): + indices = self.index.args + else: + indices = [self.index] + + base = self.base + base_type = base.type + replacement_node = None + if base_type.is_memoryviewslice: + # memoryviewslice indexing or slicing + from . import MemoryView + if base.is_memview_slice: + # For memory views, "view[i][j]" is the same as "view[i, j]" => use the latter for speed. + merged_indices = base.merged_indices(indices) + if merged_indices is not None: + base = base.base + base_type = base.type + indices = merged_indices + have_slices, indices, newaxes = MemoryView.unellipsify(indices, base_type.ndim) + if have_slices: + replacement_node = MemoryViewSliceNode(self.pos, indices=indices, base=base) + else: + replacement_node = MemoryViewIndexNode(self.pos, indices=indices, base=base) + elif base_type.is_buffer or base_type.is_pythran_expr: + if base_type.is_pythran_expr or len(indices) == base_type.ndim: + # Buffer indexing + is_buffer_access = True + indices = [index.analyse_types(env) for index in indices] + if base_type.is_pythran_expr: + do_replacement = all( + index.type.is_int or index.is_slice or index.type.is_pythran_expr + for index in indices) + if do_replacement: + for i,index in enumerate(indices): + if index.is_slice: + index = SliceIntNode(index.pos, start=index.start, stop=index.stop, step=index.step) + index = index.analyse_types(env) + indices[i] = index + else: + do_replacement = all(index.type.is_int for index in indices) + if do_replacement: + replacement_node = BufferIndexNode(self.pos, indices=indices, base=base) + # On cloning, indices is cloned. Otherwise, unpack index into indices. + assert not isinstance(self.index, CloneNode) + + if replacement_node is not None: + replacement_node = replacement_node.analyse_types(env, getting) + return replacement_node + + def wrap_in_nonecheck_node(self, env, getting): + if not env.directives['nonecheck'] or not self.base.may_be_none(): + return + self.base = self.base.as_none_safe_node("'NoneType' object is not subscriptable") + + def parse_index_as_types(self, env, required=True): + if isinstance(self.index, TupleNode): + indices = self.index.args + else: + indices = [self.index] + type_indices = [] + for index in indices: + type_indices.append(index.analyse_as_type(env)) + if type_indices[-1] is None: + if required: + error(index.pos, "not parsable as a type") + return None + return type_indices + + def parse_indexed_fused_cdef(self, env): + """ + Interpret fused_cdef_func[specific_type1, ...] + + Note that if this method is called, we are an indexed cdef function + with fused argument types, and this IndexNode will be replaced by the + NameNode with specific entry just after analysis of expressions by + AnalyseExpressionsTransform. + """ + self.type = PyrexTypes.error_type + + self.is_fused_index = True + + base_type = self.base.type + positions = [] + + if self.index.is_name or self.index.is_attribute: + positions.append(self.index.pos) + elif isinstance(self.index, TupleNode): + for arg in self.index.args: + positions.append(arg.pos) + specific_types = self.parse_index_as_types(env, required=False) + + if specific_types is None: + self.index = self.index.analyse_types(env) + + if not self.base.entry.as_variable: + error(self.pos, "Can only index fused functions with types") + else: + # A cpdef function indexed with Python objects + self.base.entry = self.entry = self.base.entry.as_variable + self.base.type = self.type = self.entry.type + + self.base.is_temp = True + self.is_temp = True + + self.entry.used = True + + self.is_fused_index = False + return + + for i, type in enumerate(specific_types): + specific_types[i] = type.specialize_fused(env) + + fused_types = base_type.get_fused_types() + if len(specific_types) > len(fused_types): + return error(self.pos, "Too many types specified") + elif len(specific_types) < len(fused_types): + t = fused_types[len(specific_types)] + return error(self.pos, "Not enough types specified to specialize " + "the function, %s is still fused" % t) + + # See if our index types form valid specializations + for pos, specific_type, fused_type in zip(positions, + specific_types, + fused_types): + if not any([specific_type.same_as(t) for t in fused_type.types]): + return error(pos, "Type not in fused type") + + if specific_type is None or specific_type.is_error: + return + + fused_to_specific = dict(zip(fused_types, specific_types)) + type = base_type.specialize(fused_to_specific) + + if type.is_fused: + # Only partially specific, this is invalid + error(self.pos, + "Index operation makes function only partially specific") + else: + # Fully specific, find the signature with the specialized entry + for signature in self.base.type.get_all_specialized_function_types(): + if type.same_as(signature): + self.type = signature + + if self.base.is_attribute: + # Pretend to be a normal attribute, for cdef extension + # methods + self.entry = signature.entry + self.is_attribute = True + self.obj = self.base.obj + + self.type.entry.used = True + self.base.type = signature + self.base.entry = signature.entry + + break + else: + # This is a bug + raise InternalError("Couldn't find the right signature") + + gil_message = "Indexing Python object" + + def calculate_result_code(self): + if self.base.type in (list_type, tuple_type, bytearray_type): + if self.base.type is list_type: + index_code = "PyList_GET_ITEM(%s, %s)" + elif self.base.type is tuple_type: + index_code = "PyTuple_GET_ITEM(%s, %s)" + elif self.base.type is bytearray_type: + index_code = "((unsigned char)(PyByteArray_AS_STRING(%s)[%s]))" + else: + assert False, "unexpected base type in indexing: %s" % self.base.type + elif self.base.type.is_cfunction: + return "%s<%s>" % ( + self.base.result(), + ",".join([param.empty_declaration_code() for param in self.type_indices])) + elif self.base.type.is_ctuple: + index = self.index.constant_result + if index < 0: + index += self.base.type.size + return "%s.f%s" % (self.base.result(), index) + else: + if (self.type.is_ptr or self.type.is_array) and self.type == self.base.type: + error(self.pos, "Invalid use of pointer slice") + return + index_code = "(%s[%s])" + return index_code % (self.base.result(), self.index.result()) + + def extra_index_params(self, code): + if self.index.type.is_int: + is_list = self.base.type is list_type + wraparound = ( + bool(code.globalstate.directives['wraparound']) and + self.original_index_type.signed and + not (isinstance(self.index.constant_result, _py_int_types) + and self.index.constant_result >= 0)) + boundscheck = bool(code.globalstate.directives['boundscheck']) + return ", %s, %d, %s, %d, %d, %d" % ( + self.original_index_type.empty_declaration_code(), + self.original_index_type.signed and 1 or 0, + self.original_index_type.to_py_function, + is_list, wraparound, boundscheck) + else: + return "" + + def generate_result_code(self, code): + if not self.is_temp: + # all handled in self.calculate_result_code() + return + + utility_code = None + if self.type.is_pyobject: + error_value = 'NULL' + if self.index.type.is_int: + if self.base.type is list_type: + function = "__Pyx_GetItemInt_List" + elif self.base.type is tuple_type: + function = "__Pyx_GetItemInt_Tuple" + else: + function = "__Pyx_GetItemInt" + utility_code = TempitaUtilityCode.load_cached("GetItemInt", "ObjectHandling.c") + else: + if self.base.type is dict_type: + function = "__Pyx_PyDict_GetItem" + utility_code = UtilityCode.load_cached("DictGetItem", "ObjectHandling.c") + elif self.base.type is py_object_type and self.index.type in (str_type, unicode_type): + # obj[str] is probably doing a dict lookup + function = "__Pyx_PyObject_Dict_GetItem" + utility_code = UtilityCode.load_cached("DictGetItem", "ObjectHandling.c") + else: + function = "__Pyx_PyObject_GetItem" + code.globalstate.use_utility_code( + TempitaUtilityCode.load_cached("GetItemInt", "ObjectHandling.c")) + utility_code = UtilityCode.load_cached("ObjectGetItem", "ObjectHandling.c") + elif self.type.is_unicode_char and self.base.type is unicode_type: + assert self.index.type.is_int + function = "__Pyx_GetItemInt_Unicode" + error_value = '(Py_UCS4)-1' + utility_code = UtilityCode.load_cached("GetItemIntUnicode", "StringTools.c") + elif self.base.type is bytearray_type: + assert self.index.type.is_int + assert self.type.is_int + function = "__Pyx_GetItemInt_ByteArray" + error_value = '-1' + utility_code = UtilityCode.load_cached("GetItemIntByteArray", "StringTools.c") + elif not (self.base.type.is_cpp_class and self.exception_check): + assert False, "unexpected type %s and base type %s for indexing" % ( + self.type, self.base.type) + + if utility_code is not None: + code.globalstate.use_utility_code(utility_code) + + if self.index.type.is_int: + index_code = self.index.result() + else: + index_code = self.index.py_result() + + if self.base.type.is_cpp_class and self.exception_check: + translate_cpp_exception(code, self.pos, + "%s = %s[%s];" % (self.result(), self.base.result(), + self.index.result()), + self.result() if self.type.is_pyobject else None, + self.exception_value, self.in_nogil_context) + else: + error_check = '!%s' if error_value == 'NULL' else '%%s == %s' % error_value + code.putln( + "%s = %s(%s, %s%s); %s" % ( + self.result(), + function, + self.base.py_result(), + index_code, + self.extra_index_params(code), + code.error_goto_if(error_check % self.result(), self.pos))) + if self.type.is_pyobject: + code.put_gotref(self.py_result()) + + def generate_setitem_code(self, value_code, code): + if self.index.type.is_int: + if self.base.type is bytearray_type: + code.globalstate.use_utility_code( + UtilityCode.load_cached("SetItemIntByteArray", "StringTools.c")) + function = "__Pyx_SetItemInt_ByteArray" + else: + code.globalstate.use_utility_code( + UtilityCode.load_cached("SetItemInt", "ObjectHandling.c")) + function = "__Pyx_SetItemInt" + index_code = self.index.result() + else: + index_code = self.index.py_result() + if self.base.type is dict_type: + function = "PyDict_SetItem" + # It would seem that we could specialized lists/tuples, but that + # shouldn't happen here. + # Both PyList_SetItem() and PyTuple_SetItem() take a Py_ssize_t as + # index instead of an object, and bad conversion here would give + # the wrong exception. Also, tuples are supposed to be immutable, + # and raise a TypeError when trying to set their entries + # (PyTuple_SetItem() is for creating new tuples from scratch). + else: + function = "PyObject_SetItem" + code.putln(code.error_goto_if_neg( + "%s(%s, %s, %s%s)" % ( + function, + self.base.py_result(), + index_code, + value_code, + self.extra_index_params(code)), + self.pos)) + + def generate_assignment_code(self, rhs, code, overloaded_assignment=False, + exception_check=None, exception_value=None): + self.generate_subexpr_evaluation_code(code) + + if self.type.is_pyobject: + self.generate_setitem_code(rhs.py_result(), code) + elif self.base.type is bytearray_type: + value_code = self._check_byte_value(code, rhs) + self.generate_setitem_code(value_code, code) + elif self.base.type.is_cpp_class and self.exception_check and self.exception_check == '+': + if overloaded_assignment and exception_check and \ + self.exception_value != exception_value: + # Handle the case that both the index operator and the assignment + # operator have a c++ exception handler and they are not the same. + translate_double_cpp_exception(code, self.pos, self.type, + self.result(), rhs.result(), self.exception_value, + exception_value, self.in_nogil_context) + else: + # Handle the case that only the index operator has a + # c++ exception handler, or that + # both exception handlers are the same. + translate_cpp_exception(code, self.pos, + "%s = %s;" % (self.result(), rhs.result()), + self.result() if self.type.is_pyobject else None, + self.exception_value, self.in_nogil_context) + else: + code.putln( + "%s = %s;" % (self.result(), rhs.result())) + + self.generate_subexpr_disposal_code(code) + self.free_subexpr_temps(code) + rhs.generate_disposal_code(code) + rhs.free_temps(code) + + def _check_byte_value(self, code, rhs): + # TODO: should we do this generally on downcasts, or just here? + assert rhs.type.is_int, repr(rhs.type) + value_code = rhs.result() + if rhs.has_constant_result(): + if 0 <= rhs.constant_result < 256: + return value_code + needs_cast = True # make at least the C compiler happy + warning(rhs.pos, + "value outside of range(0, 256)" + " when assigning to byte: %s" % rhs.constant_result, + level=1) + else: + needs_cast = rhs.type != PyrexTypes.c_uchar_type + + if not self.nogil: + conditions = [] + if rhs.is_literal or rhs.type.signed: + conditions.append('%s < 0' % value_code) + if (rhs.is_literal or not + (rhs.is_temp and rhs.type in ( + PyrexTypes.c_uchar_type, PyrexTypes.c_char_type, + PyrexTypes.c_schar_type))): + conditions.append('%s > 255' % value_code) + if conditions: + code.putln("if (unlikely(%s)) {" % ' || '.join(conditions)) + code.putln( + 'PyErr_SetString(PyExc_ValueError,' + ' "byte must be in range(0, 256)"); %s' % + code.error_goto(self.pos)) + code.putln("}") + + if needs_cast: + value_code = '((unsigned char)%s)' % value_code + return value_code + + def generate_deletion_code(self, code, ignore_nonexisting=False): + self.generate_subexpr_evaluation_code(code) + #if self.type.is_pyobject: + if self.index.type.is_int: + function = "__Pyx_DelItemInt" + index_code = self.index.result() + code.globalstate.use_utility_code( + UtilityCode.load_cached("DelItemInt", "ObjectHandling.c")) + else: + index_code = self.index.py_result() + if self.base.type is dict_type: + function = "PyDict_DelItem" + else: + function = "PyObject_DelItem" + code.putln(code.error_goto_if_neg( + "%s(%s, %s%s)" % ( + function, + self.base.py_result(), + index_code, + self.extra_index_params(code)), + self.pos)) + self.generate_subexpr_disposal_code(code) + self.free_subexpr_temps(code) + + +class BufferIndexNode(_IndexingBaseNode): + """ + Indexing of buffers and memoryviews. This node is created during type + analysis from IndexNode and replaces it. + + Attributes: + base - base node being indexed + indices - list of indexing expressions + """ + + subexprs = ['base', 'indices'] + + is_buffer_access = True + + # Whether we're assigning to a buffer (in that case it needs to be writable) + writable_needed = False + + # Any indexing temp variables that we need to clean up. + index_temps = () + + def analyse_target_types(self, env): + self.analyse_types(env, getting=False) + + def analyse_types(self, env, getting=True): + """ + Analyse types for buffer indexing only. Overridden by memoryview + indexing and slicing subclasses + """ + # self.indices are already analyzed + if not self.base.is_name and not is_pythran_expr(self.base.type): + error(self.pos, "Can only index buffer variables") + self.type = error_type + return self + + if not getting: + if not self.base.entry.type.writable: + error(self.pos, "Writing to readonly buffer") + else: + self.writable_needed = True + if self.base.type.is_buffer: + self.base.entry.buffer_aux.writable_needed = True + + self.none_error_message = "'NoneType' object is not subscriptable" + self.analyse_buffer_index(env, getting) + self.wrap_in_nonecheck_node(env) + return self + + def analyse_buffer_index(self, env, getting): + if is_pythran_expr(self.base.type): + index_with_type_list = [(idx, idx.type) for idx in self.indices] + self.type = PythranExpr(pythran_indexing_type(self.base.type, index_with_type_list)) + else: + self.base = self.base.coerce_to_simple(env) + self.type = self.base.type.dtype + self.buffer_type = self.base.type + + if getting and (self.type.is_pyobject or self.type.is_pythran_expr): + self.is_temp = True + + def analyse_assignment(self, rhs): + """ + Called by IndexNode when this node is assigned to, + with the rhs of the assignment + """ + + def wrap_in_nonecheck_node(self, env): + if not env.directives['nonecheck'] or not self.base.may_be_none(): + return + self.base = self.base.as_none_safe_node(self.none_error_message) + + def nogil_check(self, env): + if self.is_buffer_access or self.is_memview_index: + if self.type.is_pyobject: + error(self.pos, "Cannot access buffer with object dtype without gil") + self.type = error_type + + def calculate_result_code(self): + return "(*%s)" % self.buffer_ptr_code + + def buffer_entry(self): + base = self.base + if self.base.is_nonecheck: + base = base.arg + return base.type.get_entry(base) + + def get_index_in_temp(self, code, ivar): + ret = code.funcstate.allocate_temp( + PyrexTypes.widest_numeric_type( + ivar.type, + PyrexTypes.c_ssize_t_type if ivar.type.signed else PyrexTypes.c_size_t_type), + manage_ref=False) + code.putln("%s = %s;" % (ret, ivar.result())) + return ret + + def buffer_lookup_code(self, code): + """ + ndarray[1, 2, 3] and memslice[1, 2, 3] + """ + if self.in_nogil_context: + if self.is_buffer_access or self.is_memview_index: + if code.globalstate.directives['boundscheck']: + warning(self.pos, "Use boundscheck(False) for faster access", level=1) + + # Assign indices to temps of at least (s)size_t to allow further index calculations. + self.index_temps = index_temps = [self.get_index_in_temp(code,ivar) for ivar in self.indices] + + # Generate buffer access code using these temps + from . import Buffer + buffer_entry = self.buffer_entry() + if buffer_entry.type.is_buffer: + negative_indices = buffer_entry.type.negative_indices + else: + negative_indices = Buffer.buffer_defaults['negative_indices'] + + return buffer_entry, Buffer.put_buffer_lookup_code( + entry=buffer_entry, + index_signeds=[ivar.type.signed for ivar in self.indices], + index_cnames=index_temps, + directives=code.globalstate.directives, + pos=self.pos, code=code, + negative_indices=negative_indices, + in_nogil_context=self.in_nogil_context) + + def generate_assignment_code(self, rhs, code, overloaded_assignment=False): + self.generate_subexpr_evaluation_code(code) + self.generate_buffer_setitem_code(rhs, code) + self.generate_subexpr_disposal_code(code) + self.free_subexpr_temps(code) + rhs.generate_disposal_code(code) + rhs.free_temps(code) + + def generate_buffer_setitem_code(self, rhs, code, op=""): + base_type = self.base.type + if is_pythran_expr(base_type) and is_pythran_supported_type(rhs.type): + obj = code.funcstate.allocate_temp(PythranExpr(pythran_type(self.base.type)), manage_ref=False) + # We have got to do this because we have to declare pythran objects + # at the beginning of the functions. + # Indeed, Cython uses "goto" statement for error management, and + # RAII doesn't work with that kind of construction. + # Moreover, the way Pythran expressions are made is that they don't + # support move-assignation easily. + # This, we explicitly destroy then in-place new objects in this + # case. + code.putln("__Pyx_call_destructor(%s);" % obj) + code.putln("new (&%s) decltype(%s){%s};" % (obj, obj, self.base.pythran_result())) + code.putln("%s%s %s= %s;" % ( + obj, + pythran_indexing_code(self.indices), + op, + rhs.pythran_result())) + return + + # Used from generate_assignment_code and InPlaceAssignmentNode + buffer_entry, ptrexpr = self.buffer_lookup_code(code) + + if self.buffer_type.dtype.is_pyobject: + # Must manage refcounts. Decref what is already there + # and incref what we put in. + ptr = code.funcstate.allocate_temp(buffer_entry.buf_ptr_type, + manage_ref=False) + rhs_code = rhs.result() + code.putln("%s = %s;" % (ptr, ptrexpr)) + code.put_gotref("*%s" % ptr) + code.putln("__Pyx_INCREF(%s); __Pyx_DECREF(*%s);" % ( + rhs_code, ptr)) + code.putln("*%s %s= %s;" % (ptr, op, rhs_code)) + code.put_giveref("*%s" % ptr) + code.funcstate.release_temp(ptr) + else: + # Simple case + code.putln("*%s %s= %s;" % (ptrexpr, op, rhs.result())) + + def generate_result_code(self, code): + if is_pythran_expr(self.base.type): + res = self.result() + code.putln("__Pyx_call_destructor(%s);" % res) + code.putln("new (&%s) decltype(%s){%s%s};" % ( + res, + res, + self.base.pythran_result(), + pythran_indexing_code(self.indices))) + return + buffer_entry, self.buffer_ptr_code = self.buffer_lookup_code(code) + if self.type.is_pyobject: + # is_temp is True, so must pull out value and incref it. + # NOTE: object temporary results for nodes are declared + # as PyObject *, so we need a cast + code.putln("%s = (PyObject *) *%s;" % (self.result(), self.buffer_ptr_code)) + code.putln("__Pyx_INCREF((PyObject*)%s);" % self.result()) + + def free_temps(self, code): + for temp in self.index_temps: + code.funcstate.release_temp(temp) + self.index_temps = () + super(BufferIndexNode, self).free_temps(code) + + +class MemoryViewIndexNode(BufferIndexNode): + + is_memview_index = True + is_buffer_access = False + warned_untyped_idx = False + + def analyse_types(self, env, getting=True): + # memoryviewslice indexing or slicing + from . import MemoryView + + self.is_pythran_mode = has_np_pythran(env) + indices = self.indices + have_slices, indices, newaxes = MemoryView.unellipsify(indices, self.base.type.ndim) + + if not getting: + self.writable_needed = True + if self.base.is_name or self.base.is_attribute: + self.base.entry.type.writable_needed = True + + self.memslice_index = (not newaxes and len(indices) == self.base.type.ndim) + axes = [] + + index_type = PyrexTypes.c_py_ssize_t_type + new_indices = [] + + if len(indices) - len(newaxes) > self.base.type.ndim: + self.type = error_type + error(indices[self.base.type.ndim].pos, + "Too many indices specified for type %s" % self.base.type) + return self + + axis_idx = 0 + for i, index in enumerate(indices[:]): + index = index.analyse_types(env) + if index.is_none: + self.is_memview_slice = True + new_indices.append(index) + axes.append(('direct', 'strided')) + continue + + access, packing = self.base.type.axes[axis_idx] + axis_idx += 1 + + if index.is_slice: + self.is_memview_slice = True + if index.step.is_none: + axes.append((access, packing)) + else: + axes.append((access, 'strided')) + + # Coerce start, stop and step to temps of the right type + for attr in ('start', 'stop', 'step'): + value = getattr(index, attr) + if not value.is_none: + value = value.coerce_to(index_type, env) + #value = value.coerce_to_temp(env) + setattr(index, attr, value) + new_indices.append(value) + + elif index.type.is_int or index.type.is_pyobject: + if index.type.is_pyobject and not self.warned_untyped_idx: + warning(index.pos, "Index should be typed for more efficient access", level=2) + MemoryViewIndexNode.warned_untyped_idx = True + + self.is_memview_index = True + index = index.coerce_to(index_type, env) + indices[i] = index + new_indices.append(index) + + else: + self.type = error_type + error(index.pos, "Invalid index for memoryview specified, type %s" % index.type) + return self + + ### FIXME: replace by MemoryViewSliceNode if is_memview_slice ? + self.is_memview_index = self.is_memview_index and not self.is_memview_slice + self.indices = new_indices + # All indices with all start/stop/step for slices. + # We need to keep this around. + self.original_indices = indices + self.nogil = env.nogil + + self.analyse_operation(env, getting, axes) + self.wrap_in_nonecheck_node(env) + return self + + def analyse_operation(self, env, getting, axes): + self.none_error_message = "Cannot index None memoryview slice" + self.analyse_buffer_index(env, getting) + + def analyse_broadcast_operation(self, rhs): + """ + Support broadcasting for slice assignment. + E.g. + m_2d[...] = m_1d # or, + m_1d[...] = m_2d # if the leading dimension has extent 1 + """ + if self.type.is_memoryviewslice: + lhs = self + if lhs.is_memview_broadcast or rhs.is_memview_broadcast: + lhs.is_memview_broadcast = True + rhs.is_memview_broadcast = True + + def analyse_as_memview_scalar_assignment(self, rhs): + lhs = self.analyse_assignment(rhs) + if lhs: + rhs.is_memview_copy_assignment = lhs.is_memview_copy_assignment + return lhs + return self + + +class MemoryViewSliceNode(MemoryViewIndexNode): + + is_memview_slice = True + + # No-op slicing operation, this node will be replaced + is_ellipsis_noop = False + is_memview_scalar_assignment = False + is_memview_index = False + is_memview_broadcast = False + + def analyse_ellipsis_noop(self, env, getting): + """Slicing operations needing no evaluation, i.e. m[...] or m[:, :]""" + ### FIXME: replace directly + self.is_ellipsis_noop = all( + index.is_slice and index.start.is_none and index.stop.is_none and index.step.is_none + for index in self.indices) + + if self.is_ellipsis_noop: + self.type = self.base.type + + def analyse_operation(self, env, getting, axes): + from . import MemoryView + + if not getting: + self.is_memview_broadcast = True + self.none_error_message = "Cannot assign to None memoryview slice" + else: + self.none_error_message = "Cannot slice None memoryview slice" + + self.analyse_ellipsis_noop(env, getting) + if self.is_ellipsis_noop: + return + + self.index = None + self.is_temp = True + self.use_managed_ref = True + + if not MemoryView.validate_axes(self.pos, axes): + self.type = error_type + return + + self.type = PyrexTypes.MemoryViewSliceType(self.base.type.dtype, axes) + + if not (self.base.is_simple() or self.base.result_in_temp()): + self.base = self.base.coerce_to_temp(env) + + def analyse_assignment(self, rhs): + if not rhs.type.is_memoryviewslice and ( + self.type.dtype.assignable_from(rhs.type) or + rhs.type.is_pyobject): + # scalar assignment + return MemoryCopyScalar(self.pos, self) + else: + return MemoryCopySlice(self.pos, self) + + def merged_indices(self, indices): + """Return a new list of indices/slices with 'indices' merged into the current ones + according to slicing rules. + Is used to implement "view[i][j]" => "view[i, j]". + Return None if the indices cannot (easily) be merged at compile time. + """ + if not indices: + return None + # NOTE: Need to evaluate "self.original_indices" here as they might differ from "self.indices". + new_indices = self.original_indices[:] + indices = indices[:] + for i, s in enumerate(self.original_indices): + if s.is_slice: + if s.start.is_none and s.stop.is_none and s.step.is_none: + # Full slice found, replace by index. + new_indices[i] = indices[0] + indices.pop(0) + if not indices: + return new_indices + else: + # Found something non-trivial, e.g. a partial slice. + return None + elif not s.type.is_int: + # Not a slice, not an integer index => could be anything... + return None + if indices: + if len(new_indices) + len(indices) > self.base.type.ndim: + return None + new_indices += indices + return new_indices + + def is_simple(self): + if self.is_ellipsis_noop: + # TODO: fix SimpleCallNode.is_simple() + return self.base.is_simple() or self.base.result_in_temp() + + return self.result_in_temp() + + def calculate_result_code(self): + """This is called in case this is a no-op slicing node""" + return self.base.result() + + def generate_result_code(self, code): + if self.is_ellipsis_noop: + return ### FIXME: remove + buffer_entry = self.buffer_entry() + have_gil = not self.in_nogil_context + + # TODO Mark: this is insane, do it better + have_slices = False + it = iter(self.indices) + for index in self.original_indices: + if index.is_slice: + have_slices = True + if not index.start.is_none: + index.start = next(it) + if not index.stop.is_none: + index.stop = next(it) + if not index.step.is_none: + index.step = next(it) + else: + next(it) + + assert not list(it) + + buffer_entry.generate_buffer_slice_code( + code, self.original_indices, self.result(), + have_gil=have_gil, have_slices=have_slices, + directives=code.globalstate.directives) + + def generate_assignment_code(self, rhs, code, overloaded_assignment=False): + if self.is_ellipsis_noop: + self.generate_subexpr_evaluation_code(code) + else: + self.generate_evaluation_code(code) + + if self.is_memview_scalar_assignment: + self.generate_memoryviewslice_assign_scalar_code(rhs, code) + else: + self.generate_memoryviewslice_setslice_code(rhs, code) + + if self.is_ellipsis_noop: + self.generate_subexpr_disposal_code(code) + else: + self.generate_disposal_code(code) + + rhs.generate_disposal_code(code) + rhs.free_temps(code) + + +class MemoryCopyNode(ExprNode): + """ + Wraps a memoryview slice for slice assignment. + + dst: destination mememoryview slice + """ + + subexprs = ['dst'] + + def __init__(self, pos, dst): + super(MemoryCopyNode, self).__init__(pos) + self.dst = dst + self.type = dst.type + + def generate_assignment_code(self, rhs, code, overloaded_assignment=False): + self.dst.generate_evaluation_code(code) + self._generate_assignment_code(rhs, code) + self.dst.generate_disposal_code(code) + rhs.generate_disposal_code(code) + rhs.free_temps(code) + + +class MemoryCopySlice(MemoryCopyNode): + """ + Copy the contents of slice src to slice dst. Does not support indirect + slices. + + memslice1[...] = memslice2 + memslice1[:] = memslice2 + """ + + is_memview_copy_assignment = True + copy_slice_cname = "__pyx_memoryview_copy_contents" + + def _generate_assignment_code(self, src, code): + dst = self.dst + + src.type.assert_direct_dims(src.pos) + dst.type.assert_direct_dims(dst.pos) + + code.putln(code.error_goto_if_neg( + "%s(%s, %s, %d, %d, %d)" % (self.copy_slice_cname, + src.result(), dst.result(), + src.type.ndim, dst.type.ndim, + dst.type.dtype.is_pyobject), + dst.pos)) + + +class MemoryCopyScalar(MemoryCopyNode): + """ + Assign a scalar to a slice. dst must be simple, scalar will be assigned + to a correct type and not just something assignable. + + memslice1[...] = 0.0 + memslice1[:] = 0.0 + """ + + def __init__(self, pos, dst): + super(MemoryCopyScalar, self).__init__(pos, dst) + self.type = dst.type.dtype + + def _generate_assignment_code(self, scalar, code): + from . import MemoryView + + self.dst.type.assert_direct_dims(self.dst.pos) + + dtype = self.dst.type.dtype + type_decl = dtype.declaration_code("") + slice_decl = self.dst.type.declaration_code("") + + code.begin_block() + code.putln("%s __pyx_temp_scalar = %s;" % (type_decl, scalar.result())) + if self.dst.result_in_temp() or self.dst.is_simple(): + dst_temp = self.dst.result() + else: + code.putln("%s __pyx_temp_slice = %s;" % (slice_decl, self.dst.result())) + dst_temp = "__pyx_temp_slice" + + slice_iter_obj = MemoryView.slice_iter(self.dst.type, dst_temp, + self.dst.type.ndim, code) + p = slice_iter_obj.start_loops() + + if dtype.is_pyobject: + code.putln("Py_DECREF(*(PyObject **) %s);" % p) + + code.putln("*((%s *) %s) = __pyx_temp_scalar;" % (type_decl, p)) + + if dtype.is_pyobject: + code.putln("Py_INCREF(__pyx_temp_scalar);") + + slice_iter_obj.end_loops() + code.end_block() + + +class SliceIndexNode(ExprNode): + # 2-element slice indexing + # + # base ExprNode + # start ExprNode or None + # stop ExprNode or None + # slice ExprNode or None constant slice object + + subexprs = ['base', 'start', 'stop', 'slice'] + + slice = None + + def infer_type(self, env): + base_type = self.base.infer_type(env) + if base_type.is_string or base_type.is_cpp_class: + return bytes_type + elif base_type.is_pyunicode_ptr: + return unicode_type + elif base_type in (bytes_type, bytearray_type, str_type, unicode_type, + basestring_type, list_type, tuple_type): + return base_type + elif base_type.is_ptr or base_type.is_array: + return PyrexTypes.c_array_type(base_type.base_type, None) + return py_object_type + + def inferable_item_node(self, index=0): + # slicing shouldn't change the result type of the base, but the index might + if index is not not_a_constant and self.start: + if self.start.has_constant_result(): + index += self.start.constant_result + else: + index = not_a_constant + return self.base.inferable_item_node(index) + + def may_be_none(self): + base_type = self.base.type + if base_type: + if base_type.is_string: + return False + if base_type in (bytes_type, str_type, unicode_type, + basestring_type, list_type, tuple_type): + return False + return ExprNode.may_be_none(self) + + def calculate_constant_result(self): + if self.start is None: + start = None + else: + start = self.start.constant_result + if self.stop is None: + stop = None + else: + stop = self.stop.constant_result + self.constant_result = self.base.constant_result[start:stop] + + def compile_time_value(self, denv): + base = self.base.compile_time_value(denv) + if self.start is None: + start = 0 + else: + start = self.start.compile_time_value(denv) + if self.stop is None: + stop = None + else: + stop = self.stop.compile_time_value(denv) + try: + return base[start:stop] + except Exception as e: + self.compile_time_value_error(e) + + def analyse_target_declaration(self, env): + pass + + def analyse_target_types(self, env): + node = self.analyse_types(env, getting=False) + # when assigning, we must accept any Python type + if node.type.is_pyobject: + node.type = py_object_type + return node + + def analyse_types(self, env, getting=True): + self.base = self.base.analyse_types(env) + + if self.base.type.is_buffer or self.base.type.is_pythran_expr or self.base.type.is_memoryviewslice: + none_node = NoneNode(self.pos) + index = SliceNode(self.pos, + start=self.start or none_node, + stop=self.stop or none_node, + step=none_node) + index_node = IndexNode(self.pos, index=index, base=self.base) + return index_node.analyse_base_and_index_types( + env, getting=getting, setting=not getting, + analyse_base=False) + + if self.start: + self.start = self.start.analyse_types(env) + if self.stop: + self.stop = self.stop.analyse_types(env) + + if not env.directives['wraparound']: + check_negative_indices(self.start, self.stop) + + base_type = self.base.type + if base_type.is_array and not getting: + # cannot assign directly to C array => try to assign by making a copy + if not self.start and not self.stop: + self.type = base_type + else: + self.type = PyrexTypes.CPtrType(base_type.base_type) + elif base_type.is_string or base_type.is_cpp_string: + self.type = default_str_type(env) + elif base_type.is_pyunicode_ptr: + self.type = unicode_type + elif base_type.is_ptr: + self.type = base_type + elif base_type.is_array: + # we need a ptr type here instead of an array type, as + # array types can result in invalid type casts in the C + # code + self.type = PyrexTypes.CPtrType(base_type.base_type) + else: + self.base = self.base.coerce_to_pyobject(env) + self.type = py_object_type + if base_type.is_builtin_type: + # slicing builtin types returns something of the same type + self.type = base_type + self.base = self.base.as_none_safe_node("'NoneType' object is not subscriptable") + + if self.type is py_object_type: + if (not self.start or self.start.is_literal) and \ + (not self.stop or self.stop.is_literal): + # cache the constant slice object, in case we need it + none_node = NoneNode(self.pos) + self.slice = SliceNode( + self.pos, + start=copy.deepcopy(self.start or none_node), + stop=copy.deepcopy(self.stop or none_node), + step=none_node + ).analyse_types(env) + else: + c_int = PyrexTypes.c_py_ssize_t_type + + def allow_none(node, default_value, env): + # Coerce to Py_ssize_t, but allow None as meaning the default slice bound. + from .UtilNodes import EvalWithTempExprNode, ResultRefNode + + node_ref = ResultRefNode(node) + new_expr = CondExprNode( + node.pos, + true_val=IntNode( + node.pos, + type=c_int, + value=default_value, + constant_result=int(default_value) if default_value.isdigit() else not_a_constant, + ), + false_val=node_ref.coerce_to(c_int, env), + test=PrimaryCmpNode( + node.pos, + operand1=node_ref, + operator='is', + operand2=NoneNode(node.pos), + ).analyse_types(env) + ).analyse_result_type(env) + return EvalWithTempExprNode(node_ref, new_expr) + + if self.start: + if self.start.type.is_pyobject: + self.start = allow_none(self.start, '0', env) + self.start = self.start.coerce_to(c_int, env) + if self.stop: + if self.stop.type.is_pyobject: + self.stop = allow_none(self.stop, 'PY_SSIZE_T_MAX', env) + self.stop = self.stop.coerce_to(c_int, env) + self.is_temp = 1 + return self + + def analyse_as_type(self, env): + base_type = self.base.analyse_as_type(env) + if base_type and not base_type.is_pyobject: + if not self.start and not self.stop: + # memory view + from . import MemoryView + env.use_utility_code(MemoryView.view_utility_code) + none_node = NoneNode(self.pos) + slice_node = SliceNode( + self.pos, + start=none_node, + stop=none_node, + step=none_node, + ) + return PyrexTypes.MemoryViewSliceType( + base_type, MemoryView.get_axes_specs(env, [slice_node])) + return None + + nogil_check = Node.gil_error + gil_message = "Slicing Python object" + + get_slice_utility_code = TempitaUtilityCode.load( + "SliceObject", "ObjectHandling.c", context={'access': 'Get'}) + + set_slice_utility_code = TempitaUtilityCode.load( + "SliceObject", "ObjectHandling.c", context={'access': 'Set'}) + + def coerce_to(self, dst_type, env): + if ((self.base.type.is_string or self.base.type.is_cpp_string) + and dst_type in (bytes_type, bytearray_type, str_type, unicode_type)): + if (dst_type not in (bytes_type, bytearray_type) + and not env.directives['c_string_encoding']): + error(self.pos, + "default encoding required for conversion from '%s' to '%s'" % + (self.base.type, dst_type)) + self.type = dst_type + if dst_type.is_array and self.base.type.is_array: + if not self.start and not self.stop: + # redundant slice building, copy C arrays directly + return self.base.coerce_to(dst_type, env) + # else: check array size if possible + return super(SliceIndexNode, self).coerce_to(dst_type, env) + + def generate_result_code(self, code): + if not self.type.is_pyobject: + error(self.pos, + "Slicing is not currently supported for '%s'." % self.type) + return + + base_result = self.base.result() + result = self.result() + start_code = self.start_code() + stop_code = self.stop_code() + if self.base.type.is_string: + base_result = self.base.result() + if self.base.type not in (PyrexTypes.c_char_ptr_type, PyrexTypes.c_const_char_ptr_type): + base_result = '((const char*)%s)' % base_result + if self.type is bytearray_type: + type_name = 'ByteArray' + else: + type_name = self.type.name.title() + if self.stop is None: + code.putln( + "%s = __Pyx_Py%s_FromString(%s + %s); %s" % ( + result, + type_name, + base_result, + start_code, + code.error_goto_if_null(result, self.pos))) + else: + code.putln( + "%s = __Pyx_Py%s_FromStringAndSize(%s + %s, %s - %s); %s" % ( + result, + type_name, + base_result, + start_code, + stop_code, + start_code, + code.error_goto_if_null(result, self.pos))) + elif self.base.type.is_pyunicode_ptr: + base_result = self.base.result() + if self.base.type != PyrexTypes.c_py_unicode_ptr_type: + base_result = '((const Py_UNICODE*)%s)' % base_result + if self.stop is None: + code.putln( + "%s = __Pyx_PyUnicode_FromUnicode(%s + %s); %s" % ( + result, + base_result, + start_code, + code.error_goto_if_null(result, self.pos))) + else: + code.putln( + "%s = __Pyx_PyUnicode_FromUnicodeAndLength(%s + %s, %s - %s); %s" % ( + result, + base_result, + start_code, + stop_code, + start_code, + code.error_goto_if_null(result, self.pos))) + + elif self.base.type is unicode_type: + code.globalstate.use_utility_code( + UtilityCode.load_cached("PyUnicode_Substring", "StringTools.c")) + code.putln( + "%s = __Pyx_PyUnicode_Substring(%s, %s, %s); %s" % ( + result, + base_result, + start_code, + stop_code, + code.error_goto_if_null(result, self.pos))) + elif self.type is py_object_type: + code.globalstate.use_utility_code(self.get_slice_utility_code) + (has_c_start, has_c_stop, c_start, c_stop, + py_start, py_stop, py_slice) = self.get_slice_config() + code.putln( + "%s = __Pyx_PyObject_GetSlice(%s, %s, %s, %s, %s, %s, %d, %d, %d); %s" % ( + result, + self.base.py_result(), + c_start, c_stop, + py_start, py_stop, py_slice, + has_c_start, has_c_stop, + bool(code.globalstate.directives['wraparound']), + code.error_goto_if_null(result, self.pos))) + else: + if self.base.type is list_type: + code.globalstate.use_utility_code( + TempitaUtilityCode.load_cached("SliceTupleAndList", "ObjectHandling.c")) + cfunc = '__Pyx_PyList_GetSlice' + elif self.base.type is tuple_type: + code.globalstate.use_utility_code( + TempitaUtilityCode.load_cached("SliceTupleAndList", "ObjectHandling.c")) + cfunc = '__Pyx_PyTuple_GetSlice' + else: + cfunc = 'PySequence_GetSlice' + code.putln( + "%s = %s(%s, %s, %s); %s" % ( + result, + cfunc, + self.base.py_result(), + start_code, + stop_code, + code.error_goto_if_null(result, self.pos))) + code.put_gotref(self.py_result()) + + def generate_assignment_code(self, rhs, code, overloaded_assignment=False, + exception_check=None, exception_value=None): + self.generate_subexpr_evaluation_code(code) + if self.type.is_pyobject: + code.globalstate.use_utility_code(self.set_slice_utility_code) + (has_c_start, has_c_stop, c_start, c_stop, + py_start, py_stop, py_slice) = self.get_slice_config() + code.put_error_if_neg(self.pos, + "__Pyx_PyObject_SetSlice(%s, %s, %s, %s, %s, %s, %s, %d, %d, %d)" % ( + self.base.py_result(), + rhs.py_result(), + c_start, c_stop, + py_start, py_stop, py_slice, + has_c_start, has_c_stop, + bool(code.globalstate.directives['wraparound']))) + else: + start_offset = self.start_code() if self.start else '0' + if rhs.type.is_array: + array_length = rhs.type.size + self.generate_slice_guard_code(code, array_length) + else: + array_length = '%s - %s' % (self.stop_code(), start_offset) + + code.globalstate.use_utility_code(UtilityCode.load_cached("IncludeStringH", "StringTools.c")) + code.putln("memcpy(&(%s[%s]), %s, sizeof(%s[0]) * (%s));" % ( + self.base.result(), start_offset, + rhs.result(), + self.base.result(), array_length + )) + + self.generate_subexpr_disposal_code(code) + self.free_subexpr_temps(code) + rhs.generate_disposal_code(code) + rhs.free_temps(code) + + def generate_deletion_code(self, code, ignore_nonexisting=False): + if not self.base.type.is_pyobject: + error(self.pos, + "Deleting slices is only supported for Python types, not '%s'." % self.type) + return + self.generate_subexpr_evaluation_code(code) + code.globalstate.use_utility_code(self.set_slice_utility_code) + (has_c_start, has_c_stop, c_start, c_stop, + py_start, py_stop, py_slice) = self.get_slice_config() + code.put_error_if_neg(self.pos, + "__Pyx_PyObject_DelSlice(%s, %s, %s, %s, %s, %s, %d, %d, %d)" % ( + self.base.py_result(), + c_start, c_stop, + py_start, py_stop, py_slice, + has_c_start, has_c_stop, + bool(code.globalstate.directives['wraparound']))) + self.generate_subexpr_disposal_code(code) + self.free_subexpr_temps(code) + + def get_slice_config(self): + has_c_start, c_start, py_start = False, '0', 'NULL' + if self.start: + has_c_start = not self.start.type.is_pyobject + if has_c_start: + c_start = self.start.result() + else: + py_start = '&%s' % self.start.py_result() + has_c_stop, c_stop, py_stop = False, '0', 'NULL' + if self.stop: + has_c_stop = not self.stop.type.is_pyobject + if has_c_stop: + c_stop = self.stop.result() + else: + py_stop = '&%s' % self.stop.py_result() + py_slice = self.slice and '&%s' % self.slice.py_result() or 'NULL' + return (has_c_start, has_c_stop, c_start, c_stop, + py_start, py_stop, py_slice) + + def generate_slice_guard_code(self, code, target_size): + if not self.base.type.is_array: + return + slice_size = self.base.type.size + try: + total_length = slice_size = int(slice_size) + except ValueError: + total_length = None + + start = stop = None + if self.stop: + stop = self.stop.result() + try: + stop = int(stop) + if stop < 0: + if total_length is None: + slice_size = '%s + %d' % (slice_size, stop) + else: + slice_size += stop + else: + slice_size = stop + stop = None + except ValueError: + pass + + if self.start: + start = self.start.result() + try: + start = int(start) + if start < 0: + if total_length is None: + start = '%s + %d' % (self.base.type.size, start) + else: + start += total_length + if isinstance(slice_size, _py_int_types): + slice_size -= start + else: + slice_size = '%s - (%s)' % (slice_size, start) + start = None + except ValueError: + pass + + runtime_check = None + compile_time_check = False + try: + int_target_size = int(target_size) + except ValueError: + int_target_size = None + else: + compile_time_check = isinstance(slice_size, _py_int_types) + + if compile_time_check and slice_size < 0: + if int_target_size > 0: + error(self.pos, "Assignment to empty slice.") + elif compile_time_check and start is None and stop is None: + # we know the exact slice length + if int_target_size != slice_size: + error(self.pos, "Assignment to slice of wrong length, expected %s, got %s" % ( + slice_size, target_size)) + elif start is not None: + if stop is None: + stop = slice_size + runtime_check = "(%s)-(%s)" % (stop, start) + elif stop is not None: + runtime_check = stop + else: + runtime_check = slice_size + + if runtime_check: + code.putln("if (unlikely((%s) != (%s))) {" % (runtime_check, target_size)) + code.putln( + 'PyErr_Format(PyExc_ValueError, "Assignment to slice of wrong length,' + ' expected %%" CYTHON_FORMAT_SSIZE_T "d, got %%" CYTHON_FORMAT_SSIZE_T "d",' + ' (Py_ssize_t)(%s), (Py_ssize_t)(%s));' % ( + target_size, runtime_check)) + code.putln(code.error_goto(self.pos)) + code.putln("}") + + def start_code(self): + if self.start: + return self.start.result() + else: + return "0" + + def stop_code(self): + if self.stop: + return self.stop.result() + elif self.base.type.is_array: + return self.base.type.size + else: + return "PY_SSIZE_T_MAX" + + def calculate_result_code(self): + # self.result() is not used, but this method must exist + return "" + + +class SliceNode(ExprNode): + # start:stop:step in subscript list + # + # start ExprNode + # stop ExprNode + # step ExprNode + + subexprs = ['start', 'stop', 'step'] + is_slice = True + type = slice_type + is_temp = 1 + + def calculate_constant_result(self): + self.constant_result = slice( + self.start.constant_result, + self.stop.constant_result, + self.step.constant_result) + + def compile_time_value(self, denv): + start = self.start.compile_time_value(denv) + stop = self.stop.compile_time_value(denv) + step = self.step.compile_time_value(denv) + try: + return slice(start, stop, step) + except Exception as e: + self.compile_time_value_error(e) + + def may_be_none(self): + return False + + def analyse_types(self, env): + start = self.start.analyse_types(env) + stop = self.stop.analyse_types(env) + step = self.step.analyse_types(env) + self.start = start.coerce_to_pyobject(env) + self.stop = stop.coerce_to_pyobject(env) + self.step = step.coerce_to_pyobject(env) + if self.start.is_literal and self.stop.is_literal and self.step.is_literal: + self.is_literal = True + self.is_temp = False + return self + + gil_message = "Constructing Python slice object" + + def calculate_result_code(self): + return self.result_code + + def generate_result_code(self, code): + if self.is_literal: + dedup_key = make_dedup_key(self.type, (self,)) + self.result_code = code.get_py_const(py_object_type, 'slice', cleanup_level=2, dedup_key=dedup_key) + code = code.get_cached_constants_writer(self.result_code) + if code is None: + return # already initialised + code.mark_pos(self.pos) + + code.putln( + "%s = PySlice_New(%s, %s, %s); %s" % ( + self.result(), + self.start.py_result(), + self.stop.py_result(), + self.step.py_result(), + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + if self.is_literal: + code.put_giveref(self.py_result()) + +class SliceIntNode(SliceNode): + # start:stop:step in subscript list + # This is just a node to hold start,stop and step nodes that can be + # converted to integers. This does not generate a slice python object. + # + # start ExprNode + # stop ExprNode + # step ExprNode + + is_temp = 0 + + def calculate_constant_result(self): + self.constant_result = slice( + self.start.constant_result, + self.stop.constant_result, + self.step.constant_result) + + def compile_time_value(self, denv): + start = self.start.compile_time_value(denv) + stop = self.stop.compile_time_value(denv) + step = self.step.compile_time_value(denv) + try: + return slice(start, stop, step) + except Exception as e: + self.compile_time_value_error(e) + + def may_be_none(self): + return False + + def analyse_types(self, env): + self.start = self.start.analyse_types(env) + self.stop = self.stop.analyse_types(env) + self.step = self.step.analyse_types(env) + + if not self.start.is_none: + self.start = self.start.coerce_to_integer(env) + if not self.stop.is_none: + self.stop = self.stop.coerce_to_integer(env) + if not self.step.is_none: + self.step = self.step.coerce_to_integer(env) + + if self.start.is_literal and self.stop.is_literal and self.step.is_literal: + self.is_literal = True + self.is_temp = False + return self + + def calculate_result_code(self): + pass + + def generate_result_code(self, code): + for a in self.start,self.stop,self.step: + if isinstance(a, CloneNode): + a.arg.result() + + +class CallNode(ExprNode): + + # allow overriding the default 'may_be_none' behaviour + may_return_none = None + + def infer_type(self, env): + # TODO(robertwb): Reduce redundancy with analyse_types. + function = self.function + func_type = function.infer_type(env) + if isinstance(function, NewExprNode): + # note: needs call to infer_type() above + return PyrexTypes.CPtrType(function.class_type) + if func_type is py_object_type: + # function might have lied for safety => try to find better type + entry = getattr(function, 'entry', None) + if entry is not None: + func_type = entry.type or func_type + if func_type.is_ptr: + func_type = func_type.base_type + if func_type.is_cfunction: + if getattr(self.function, 'entry', None) and hasattr(self, 'args'): + alternatives = self.function.entry.all_alternatives() + arg_types = [arg.infer_type(env) for arg in self.args] + func_entry = PyrexTypes.best_match(arg_types, alternatives) + if func_entry: + func_type = func_entry.type + if func_type.is_ptr: + func_type = func_type.base_type + return func_type.return_type + return func_type.return_type + elif func_type is type_type: + if function.is_name and function.entry and function.entry.type: + result_type = function.entry.type + if result_type.is_extension_type: + return result_type + elif result_type.is_builtin_type: + if function.entry.name == 'float': + return PyrexTypes.c_double_type + elif function.entry.name in Builtin.types_that_construct_their_instance: + return result_type + return py_object_type + + def type_dependencies(self, env): + # TODO: Update when Danilo's C++ code merged in to handle the + # the case of function overloading. + return self.function.type_dependencies(env) + + def is_simple(self): + # C function calls could be considered simple, but they may + # have side-effects that may hit when multiple operations must + # be effected in order, e.g. when constructing the argument + # sequence for a function call or comparing values. + return False + + def may_be_none(self): + if self.may_return_none is not None: + return self.may_return_none + func_type = self.function.type + if func_type is type_type and self.function.is_name: + entry = self.function.entry + if entry.type.is_extension_type: + return False + if (entry.type.is_builtin_type and + entry.name in Builtin.types_that_construct_their_instance): + return False + return ExprNode.may_be_none(self) + + def set_py_result_type(self, function, func_type=None): + if func_type is None: + func_type = function.type + if func_type is Builtin.type_type and ( + function.is_name and + function.entry and + function.entry.is_builtin and + function.entry.name in Builtin.types_that_construct_their_instance): + # calling a builtin type that returns a specific object type + if function.entry.name == 'float': + # the following will come true later on in a transform + self.type = PyrexTypes.c_double_type + self.result_ctype = PyrexTypes.c_double_type + else: + self.type = Builtin.builtin_types[function.entry.name] + self.result_ctype = py_object_type + self.may_return_none = False + elif function.is_name and function.type_entry: + # We are calling an extension type constructor. As long as we do not + # support __new__(), the result type is clear + self.type = function.type_entry.type + self.result_ctype = py_object_type + self.may_return_none = False + else: + self.type = py_object_type + + def analyse_as_type_constructor(self, env): + type = self.function.analyse_as_type(env) + if type and type.is_struct_or_union: + args, kwds = self.explicit_args_kwds() + items = [] + for arg, member in zip(args, type.scope.var_entries): + items.append(DictItemNode(pos=arg.pos, key=StringNode(pos=arg.pos, value=member.name), value=arg)) + if kwds: + items += kwds.key_value_pairs + self.key_value_pairs = items + self.__class__ = DictNode + self.analyse_types(env) # FIXME + self.coerce_to(type, env) + return True + elif type and type.is_cpp_class: + self.args = [ arg.analyse_types(env) for arg in self.args ] + constructor = type.scope.lookup("") + if not constructor: + error(self.function.pos, "no constructor found for C++ type '%s'" % self.function.name) + self.type = error_type + return self + self.function = RawCNameExprNode(self.function.pos, constructor.type) + self.function.entry = constructor + self.function.set_cname(type.empty_declaration_code()) + self.analyse_c_function_call(env) + self.type = type + return True + + def is_lvalue(self): + return self.type.is_reference + + def nogil_check(self, env): + func_type = self.function_type() + if func_type.is_pyobject: + self.gil_error() + elif not getattr(func_type, 'nogil', False): + self.gil_error() + + gil_message = "Calling gil-requiring function" + + +class SimpleCallNode(CallNode): + # Function call without keyword, * or ** args. + # + # function ExprNode + # args [ExprNode] + # arg_tuple ExprNode or None used internally + # self ExprNode or None used internally + # coerced_self ExprNode or None used internally + # wrapper_call bool used internally + # has_optional_args bool used internally + # nogil bool used internally + + subexprs = ['self', 'coerced_self', 'function', 'args', 'arg_tuple'] + + self = None + coerced_self = None + arg_tuple = None + wrapper_call = False + has_optional_args = False + nogil = False + analysed = False + overflowcheck = False + + def compile_time_value(self, denv): + function = self.function.compile_time_value(denv) + args = [arg.compile_time_value(denv) for arg in self.args] + try: + return function(*args) + except Exception as e: + self.compile_time_value_error(e) + + def analyse_as_type(self, env): + attr = self.function.as_cython_attribute() + if attr == 'pointer': + if len(self.args) != 1: + error(self.args.pos, "only one type allowed.") + else: + type = self.args[0].analyse_as_type(env) + if not type: + error(self.args[0].pos, "Unknown type") + else: + return PyrexTypes.CPtrType(type) + elif attr == 'typeof': + if len(self.args) != 1: + error(self.args.pos, "only one type allowed.") + operand = self.args[0].analyse_types(env) + return operand.type + + def explicit_args_kwds(self): + return self.args, None + + def analyse_types(self, env): + if self.analyse_as_type_constructor(env): + return self + if self.analysed: + return self + self.analysed = True + self.function.is_called = 1 + self.function = self.function.analyse_types(env) + function = self.function + + if function.is_attribute and function.entry and function.entry.is_cmethod: + # Take ownership of the object from which the attribute + # was obtained, because we need to pass it as 'self'. + self.self = function.obj + function.obj = CloneNode(self.self) + + func_type = self.function_type() + self.is_numpy_call_with_exprs = False + if (has_np_pythran(env) and function.is_numpy_attribute and + pythran_is_numpy_func_supported(function)): + has_pythran_args = True + self.arg_tuple = TupleNode(self.pos, args = self.args) + self.arg_tuple = self.arg_tuple.analyse_types(env) + for arg in self.arg_tuple.args: + has_pythran_args &= is_pythran_supported_node_or_none(arg) + self.is_numpy_call_with_exprs = bool(has_pythran_args) + if self.is_numpy_call_with_exprs: + env.add_include_file(pythran_get_func_include_file(function)) + return NumPyMethodCallNode.from_node( + self, + function=function, + arg_tuple=self.arg_tuple, + type=PythranExpr(pythran_func_type(function, self.arg_tuple.args)), + ) + elif func_type.is_pyobject: + self.arg_tuple = TupleNode(self.pos, args = self.args) + self.arg_tuple = self.arg_tuple.analyse_types(env).coerce_to_pyobject(env) + self.args = None + self.set_py_result_type(function, func_type) + self.is_temp = 1 + else: + self.args = [ arg.analyse_types(env) for arg in self.args ] + self.analyse_c_function_call(env) + if func_type.exception_check == '+': + self.is_temp = True + return self + + def function_type(self): + # Return the type of the function being called, coercing a function + # pointer to a function if necessary. If the function has fused + # arguments, return the specific type. + func_type = self.function.type + + if func_type.is_ptr: + func_type = func_type.base_type + + return func_type + + def analyse_c_function_call(self, env): + func_type = self.function.type + if func_type is error_type: + self.type = error_type + return + + if func_type.is_cfunction and func_type.is_static_method: + if self.self and self.self.type.is_extension_type: + # To support this we'd need to pass self to determine whether + # it was overloaded in Python space (possibly via a Cython + # superclass turning a cdef method into a cpdef one). + error(self.pos, "Cannot call a static method on an instance variable.") + args = self.args + elif self.self: + args = [self.self] + self.args + else: + args = self.args + + if func_type.is_cpp_class: + overloaded_entry = self.function.type.scope.lookup("operator()") + if overloaded_entry is None: + self.type = PyrexTypes.error_type + self.result_code = "" + return + elif hasattr(self.function, 'entry'): + overloaded_entry = self.function.entry + elif self.function.is_subscript and self.function.is_fused_index: + overloaded_entry = self.function.type.entry + else: + overloaded_entry = None + + if overloaded_entry: + if self.function.type.is_fused: + functypes = self.function.type.get_all_specialized_function_types() + alternatives = [f.entry for f in functypes] + else: + alternatives = overloaded_entry.all_alternatives() + + entry = PyrexTypes.best_match( + [arg.type for arg in args], alternatives, self.pos, env, args) + + if not entry: + self.type = PyrexTypes.error_type + self.result_code = "" + return + + entry.used = True + if not func_type.is_cpp_class: + self.function.entry = entry + self.function.type = entry.type + func_type = self.function_type() + else: + entry = None + func_type = self.function_type() + if not func_type.is_cfunction: + error(self.pos, "Calling non-function type '%s'" % func_type) + self.type = PyrexTypes.error_type + self.result_code = "" + return + + # Check no. of args + max_nargs = len(func_type.args) + expected_nargs = max_nargs - func_type.optional_arg_count + actual_nargs = len(args) + if func_type.optional_arg_count and expected_nargs != actual_nargs: + self.has_optional_args = 1 + self.is_temp = 1 + + # check 'self' argument + if entry and entry.is_cmethod and func_type.args and not func_type.is_static_method: + formal_arg = func_type.args[0] + arg = args[0] + if formal_arg.not_none: + if self.self: + self.self = self.self.as_none_safe_node( + "'NoneType' object has no attribute '%{0}s'".format('.30' if len(entry.name) <= 30 else ''), + error='PyExc_AttributeError', + format_args=[entry.name]) + else: + # unbound method + arg = arg.as_none_safe_node( + "descriptor '%s' requires a '%s' object but received a 'NoneType'", + format_args=[entry.name, formal_arg.type.name]) + if self.self: + if formal_arg.accept_builtin_subtypes: + arg = CMethodSelfCloneNode(self.self) + else: + arg = CloneNode(self.self) + arg = self.coerced_self = arg.coerce_to(formal_arg.type, env) + elif formal_arg.type.is_builtin_type: + # special case: unbound methods of builtins accept subtypes + arg = arg.coerce_to(formal_arg.type, env) + if arg.type.is_builtin_type and isinstance(arg, PyTypeTestNode): + arg.exact_builtin_type = False + args[0] = arg + + # Coerce arguments + some_args_in_temps = False + for i in range(min(max_nargs, actual_nargs)): + formal_arg = func_type.args[i] + formal_type = formal_arg.type + arg = args[i].coerce_to(formal_type, env) + if formal_arg.not_none: + # C methods must do the None checks at *call* time + arg = arg.as_none_safe_node( + "cannot pass None into a C function argument that is declared 'not None'") + if arg.is_temp: + if i > 0: + # first argument in temp doesn't impact subsequent arguments + some_args_in_temps = True + elif arg.type.is_pyobject and not env.nogil: + if i == 0 and self.self is not None: + # a method's cloned "self" argument is ok + pass + elif arg.nonlocally_immutable(): + # plain local variables are ok + pass + else: + # we do not safely own the argument's reference, + # but we must make sure it cannot be collected + # before we return from the function, so we create + # an owned temp reference to it + if i > 0: # first argument doesn't matter + some_args_in_temps = True + arg = arg.coerce_to_temp(env) + args[i] = arg + + # handle additional varargs parameters + for i in range(max_nargs, actual_nargs): + arg = args[i] + if arg.type.is_pyobject: + if arg.type is str_type: + arg_ctype = PyrexTypes.c_char_ptr_type + else: + arg_ctype = arg.type.default_coerced_ctype() + if arg_ctype is None: + error(self.args[i].pos, + "Python object cannot be passed as a varargs parameter") + else: + args[i] = arg = arg.coerce_to(arg_ctype, env) + if arg.is_temp and i > 0: + some_args_in_temps = True + + if some_args_in_temps: + # if some args are temps and others are not, they may get + # constructed in the wrong order (temps first) => make + # sure they are either all temps or all not temps (except + # for the last argument, which is evaluated last in any + # case) + for i in range(actual_nargs-1): + if i == 0 and self.self is not None: + continue # self is ok + arg = args[i] + if arg.nonlocally_immutable(): + # locals, C functions, unassignable types are safe. + pass + elif arg.type.is_cpp_class: + # Assignment has side effects, avoid. + pass + elif env.nogil and arg.type.is_pyobject: + # can't copy a Python reference into a temp in nogil + # env (this is safe: a construction would fail in + # nogil anyway) + pass + else: + #self.args[i] = arg.coerce_to_temp(env) + # instead: issue a warning + if i > 0 or i == 1 and self.self is not None: # skip first arg + warning(arg.pos, "Argument evaluation order in C function call is undefined and may not be as expected", 0) + break + + self.args[:] = args + + # Calc result type and code fragment + if isinstance(self.function, NewExprNode): + self.type = PyrexTypes.CPtrType(self.function.class_type) + else: + self.type = func_type.return_type + + if self.function.is_name or self.function.is_attribute: + func_entry = self.function.entry + if func_entry and (func_entry.utility_code or func_entry.utility_code_definition): + self.is_temp = 1 # currently doesn't work for self.calculate_result_code() + + if self.type.is_pyobject: + self.result_ctype = py_object_type + self.is_temp = 1 + elif func_type.exception_value is not None or func_type.exception_check: + self.is_temp = 1 + elif self.type.is_memoryviewslice: + self.is_temp = 1 + # func_type.exception_check = True + + if self.is_temp and self.type.is_reference: + self.type = PyrexTypes.CFakeReferenceType(self.type.ref_base_type) + + # Called in 'nogil' context? + self.nogil = env.nogil + if (self.nogil and + func_type.exception_check and + func_type.exception_check != '+'): + env.use_utility_code(pyerr_occurred_withgil_utility_code) + # C++ exception handler + if func_type.exception_check == '+': + if func_type.exception_value is None: + env.use_utility_code(UtilityCode.load_cached("CppExceptionConversion", "CppSupport.cpp")) + + self.overflowcheck = env.directives['overflowcheck'] + + def calculate_result_code(self): + return self.c_call_code() + + def c_call_code(self): + func_type = self.function_type() + if self.type is PyrexTypes.error_type or not func_type.is_cfunction: + return "" + formal_args = func_type.args + arg_list_code = [] + args = list(zip(formal_args, self.args)) + max_nargs = len(func_type.args) + expected_nargs = max_nargs - func_type.optional_arg_count + actual_nargs = len(self.args) + for formal_arg, actual_arg in args[:expected_nargs]: + arg_code = actual_arg.result_as(formal_arg.type) + arg_list_code.append(arg_code) + + if func_type.is_overridable: + arg_list_code.append(str(int(self.wrapper_call or self.function.entry.is_unbound_cmethod))) + + if func_type.optional_arg_count: + if expected_nargs == actual_nargs: + optional_args = 'NULL' + else: + optional_args = "&%s" % self.opt_arg_struct + arg_list_code.append(optional_args) + + for actual_arg in self.args[len(formal_args):]: + arg_list_code.append(actual_arg.result()) + + result = "%s(%s)" % (self.function.result(), ', '.join(arg_list_code)) + return result + + def is_c_result_required(self): + func_type = self.function_type() + if not func_type.exception_value or func_type.exception_check == '+': + return False # skip allocation of unused result temp + return True + + def generate_evaluation_code(self, code): + function = self.function + if function.is_name or function.is_attribute: + code.globalstate.use_entry_utility_code(function.entry) + + if not function.type.is_pyobject or len(self.arg_tuple.args) > 1 or ( + self.arg_tuple.args and self.arg_tuple.is_literal): + super(SimpleCallNode, self).generate_evaluation_code(code) + return + + # Special case 0-args and try to avoid explicit tuple creation for Python calls with 1 arg. + arg = self.arg_tuple.args[0] if self.arg_tuple.args else None + subexprs = (self.self, self.coerced_self, function, arg) + for subexpr in subexprs: + if subexpr is not None: + subexpr.generate_evaluation_code(code) + + code.mark_pos(self.pos) + assert self.is_temp + self.allocate_temp_result(code) + + if arg is None: + code.globalstate.use_utility_code(UtilityCode.load_cached( + "PyObjectCallNoArg", "ObjectHandling.c")) + code.putln( + "%s = __Pyx_PyObject_CallNoArg(%s); %s" % ( + self.result(), + function.py_result(), + code.error_goto_if_null(self.result(), self.pos))) + else: + code.globalstate.use_utility_code(UtilityCode.load_cached( + "PyObjectCallOneArg", "ObjectHandling.c")) + code.putln( + "%s = __Pyx_PyObject_CallOneArg(%s, %s); %s" % ( + self.result(), + function.py_result(), + arg.py_result(), + code.error_goto_if_null(self.result(), self.pos))) + + code.put_gotref(self.py_result()) + + for subexpr in subexprs: + if subexpr is not None: + subexpr.generate_disposal_code(code) + subexpr.free_temps(code) + + def generate_result_code(self, code): + func_type = self.function_type() + if func_type.is_pyobject: + arg_code = self.arg_tuple.py_result() + code.globalstate.use_utility_code(UtilityCode.load_cached( + "PyObjectCall", "ObjectHandling.c")) + code.putln( + "%s = __Pyx_PyObject_Call(%s, %s, NULL); %s" % ( + self.result(), + self.function.py_result(), + arg_code, + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + elif func_type.is_cfunction: + if self.has_optional_args: + actual_nargs = len(self.args) + expected_nargs = len(func_type.args) - func_type.optional_arg_count + self.opt_arg_struct = code.funcstate.allocate_temp( + func_type.op_arg_struct.base_type, manage_ref=True) + code.putln("%s.%s = %s;" % ( + self.opt_arg_struct, + Naming.pyrex_prefix + "n", + len(self.args) - expected_nargs)) + args = list(zip(func_type.args, self.args)) + for formal_arg, actual_arg in args[expected_nargs:actual_nargs]: + code.putln("%s.%s = %s;" % ( + self.opt_arg_struct, + func_type.opt_arg_cname(formal_arg.name), + actual_arg.result_as(formal_arg.type))) + exc_checks = [] + if self.type.is_pyobject and self.is_temp: + exc_checks.append("!%s" % self.result()) + elif self.type.is_memoryviewslice: + assert self.is_temp + exc_checks.append(self.type.error_condition(self.result())) + elif func_type.exception_check != '+': + exc_val = func_type.exception_value + exc_check = func_type.exception_check + if exc_val is not None: + exc_checks.append("%s == %s" % (self.result(), func_type.return_type.cast_code(exc_val))) + if exc_check: + if self.nogil: + exc_checks.append("__Pyx_ErrOccurredWithGIL()") + else: + exc_checks.append("PyErr_Occurred()") + if self.is_temp or exc_checks: + rhs = self.c_call_code() + if self.result(): + lhs = "%s = " % self.result() + if self.is_temp and self.type.is_pyobject: + #return_type = self.type # func_type.return_type + #print "SimpleCallNode.generate_result_code: casting", rhs, \ + # "from", return_type, "to pyobject" ### + rhs = typecast(py_object_type, self.type, rhs) + else: + lhs = "" + if func_type.exception_check == '+': + translate_cpp_exception(code, self.pos, '%s%s;' % (lhs, rhs), + self.result() if self.type.is_pyobject else None, + func_type.exception_value, self.nogil) + else: + if (self.overflowcheck + and self.type.is_int + and self.type.signed + and self.function.result() in ('abs', 'labs', '__Pyx_abs_longlong')): + goto_error = 'if (unlikely(%s < 0)) { PyErr_SetString(PyExc_OverflowError, "value too large"); %s; }' % ( + self.result(), code.error_goto(self.pos)) + elif exc_checks: + goto_error = code.error_goto_if(" && ".join(exc_checks), self.pos) + else: + goto_error = "" + code.putln("%s%s; %s" % (lhs, rhs, goto_error)) + if self.type.is_pyobject and self.result(): + code.put_gotref(self.py_result()) + if self.has_optional_args: + code.funcstate.release_temp(self.opt_arg_struct) + + +class NumPyMethodCallNode(SimpleCallNode): + # Pythran call to a NumPy function or method. + # + # function ExprNode the function/method to call + # arg_tuple TupleNode the arguments as an args tuple + + subexprs = ['function', 'arg_tuple'] + is_temp = True + may_return_none = True + + def generate_evaluation_code(self, code): + code.mark_pos(self.pos) + self.allocate_temp_result(code) + + self.function.generate_evaluation_code(code) + assert self.arg_tuple.mult_factor is None + args = self.arg_tuple.args + for arg in args: + arg.generate_evaluation_code(code) + + code.putln("// function evaluation code for numpy function") + code.putln("__Pyx_call_destructor(%s);" % self.result()) + code.putln("new (&%s) decltype(%s){%s{}(%s)};" % ( + self.result(), + self.result(), + pythran_functor(self.function), + ", ".join(a.pythran_result() for a in args))) + + +class PyMethodCallNode(SimpleCallNode): + # Specialised call to a (potential) PyMethodObject with non-constant argument tuple. + # Allows the self argument to be injected directly instead of repacking a tuple for it. + # + # function ExprNode the function/method object to call + # arg_tuple TupleNode the arguments for the args tuple + + subexprs = ['function', 'arg_tuple'] + is_temp = True + + def generate_evaluation_code(self, code): + code.mark_pos(self.pos) + self.allocate_temp_result(code) + + self.function.generate_evaluation_code(code) + assert self.arg_tuple.mult_factor is None + args = self.arg_tuple.args + for arg in args: + arg.generate_evaluation_code(code) + + # make sure function is in temp so that we can replace the reference below if it's a method + reuse_function_temp = self.function.is_temp + if reuse_function_temp: + function = self.function.result() + else: + function = code.funcstate.allocate_temp(py_object_type, manage_ref=True) + self.function.make_owned_reference(code) + code.put("%s = %s; " % (function, self.function.py_result())) + self.function.generate_disposal_code(code) + self.function.free_temps(code) + + self_arg = code.funcstate.allocate_temp(py_object_type, manage_ref=True) + code.putln("%s = NULL;" % self_arg) + arg_offset_cname = None + if len(args) > 1: + arg_offset_cname = code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False) + code.putln("%s = 0;" % arg_offset_cname) + + def attribute_is_likely_method(attr): + obj = attr.obj + if obj.is_name and obj.entry.is_pyglobal: + return False # more likely to be a function + return True + + if self.function.is_attribute: + likely_method = 'likely' if attribute_is_likely_method(self.function) else 'unlikely' + elif self.function.is_name and self.function.cf_state: + # not an attribute itself, but might have been assigned from one (e.g. bound method) + for assignment in self.function.cf_state: + value = assignment.rhs + if value and value.is_attribute and value.obj.type.is_pyobject: + if attribute_is_likely_method(value): + likely_method = 'likely' + break + else: + likely_method = 'unlikely' + else: + likely_method = 'unlikely' + + code.putln("if (CYTHON_UNPACK_METHODS && %s(PyMethod_Check(%s))) {" % (likely_method, function)) + code.putln("%s = PyMethod_GET_SELF(%s);" % (self_arg, function)) + # the following is always true in Py3 (kept only for safety), + # but is false for unbound methods in Py2 + code.putln("if (likely(%s)) {" % self_arg) + code.putln("PyObject* function = PyMethod_GET_FUNCTION(%s);" % function) + code.put_incref(self_arg, py_object_type) + code.put_incref("function", py_object_type) + # free method object as early to possible to enable reuse from CPython's freelist + code.put_decref_set(function, "function") + if len(args) > 1: + code.putln("%s = 1;" % arg_offset_cname) + code.putln("}") + code.putln("}") + + if not args: + # fastest special case: try to avoid tuple creation + code.globalstate.use_utility_code( + UtilityCode.load_cached("PyObjectCallNoArg", "ObjectHandling.c")) + code.globalstate.use_utility_code( + UtilityCode.load_cached("PyObjectCallOneArg", "ObjectHandling.c")) + code.putln( + "%s = (%s) ? __Pyx_PyObject_CallOneArg(%s, %s) : __Pyx_PyObject_CallNoArg(%s);" % ( + self.result(), self_arg, + function, self_arg, + function)) + code.put_xdecref_clear(self_arg, py_object_type) + code.funcstate.release_temp(self_arg) + code.putln(code.error_goto_if_null(self.result(), self.pos)) + code.put_gotref(self.py_result()) + elif len(args) == 1: + # fastest special case: try to avoid tuple creation + code.globalstate.use_utility_code( + UtilityCode.load_cached("PyObjectCall2Args", "ObjectHandling.c")) + code.globalstate.use_utility_code( + UtilityCode.load_cached("PyObjectCallOneArg", "ObjectHandling.c")) + arg = args[0] + code.putln( + "%s = (%s) ? __Pyx_PyObject_Call2Args(%s, %s, %s) : __Pyx_PyObject_CallOneArg(%s, %s);" % ( + self.result(), self_arg, + function, self_arg, arg.py_result(), + function, arg.py_result())) + code.put_xdecref_clear(self_arg, py_object_type) + code.funcstate.release_temp(self_arg) + arg.generate_disposal_code(code) + arg.free_temps(code) + code.putln(code.error_goto_if_null(self.result(), self.pos)) + code.put_gotref(self.py_result()) + else: + code.globalstate.use_utility_code( + UtilityCode.load_cached("PyFunctionFastCall", "ObjectHandling.c")) + code.globalstate.use_utility_code( + UtilityCode.load_cached("PyCFunctionFastCall", "ObjectHandling.c")) + for test_func, call_prefix in [('PyFunction_Check', 'Py'), ('__Pyx_PyFastCFunction_Check', 'PyC')]: + code.putln("#if CYTHON_FAST_%sCALL" % call_prefix.upper()) + code.putln("if (%s(%s)) {" % (test_func, function)) + code.putln("PyObject *%s[%d] = {%s, %s};" % ( + Naming.quick_temp_cname, + len(args)+1, + self_arg, + ', '.join(arg.py_result() for arg in args))) + code.putln("%s = __Pyx_%sFunction_FastCall(%s, %s+1-%s, %d+%s); %s" % ( + self.result(), + call_prefix, + function, + Naming.quick_temp_cname, + arg_offset_cname, + len(args), + arg_offset_cname, + code.error_goto_if_null(self.result(), self.pos))) + code.put_xdecref_clear(self_arg, py_object_type) + code.put_gotref(self.py_result()) + for arg in args: + arg.generate_disposal_code(code) + code.putln("} else") + code.putln("#endif") + + code.putln("{") + args_tuple = code.funcstate.allocate_temp(py_object_type, manage_ref=True) + code.putln("%s = PyTuple_New(%d+%s); %s" % ( + args_tuple, len(args), arg_offset_cname, + code.error_goto_if_null(args_tuple, self.pos))) + code.put_gotref(args_tuple) + + if len(args) > 1: + code.putln("if (%s) {" % self_arg) + code.putln("__Pyx_GIVEREF(%s); PyTuple_SET_ITEM(%s, 0, %s); %s = NULL;" % ( + self_arg, args_tuple, self_arg, self_arg)) # stealing owned ref in this case + code.funcstate.release_temp(self_arg) + if len(args) > 1: + code.putln("}") + + for i, arg in enumerate(args): + arg.make_owned_reference(code) + code.put_giveref(arg.py_result()) + code.putln("PyTuple_SET_ITEM(%s, %d+%s, %s);" % ( + args_tuple, i, arg_offset_cname, arg.py_result())) + if len(args) > 1: + code.funcstate.release_temp(arg_offset_cname) + + for arg in args: + arg.generate_post_assignment_code(code) + arg.free_temps(code) + + code.globalstate.use_utility_code( + UtilityCode.load_cached("PyObjectCall", "ObjectHandling.c")) + code.putln( + "%s = __Pyx_PyObject_Call(%s, %s, NULL); %s" % ( + self.result(), + function, args_tuple, + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + + code.put_decref_clear(args_tuple, py_object_type) + code.funcstate.release_temp(args_tuple) + + if len(args) == 1: + code.putln("}") + code.putln("}") # !CYTHON_FAST_PYCALL + + if reuse_function_temp: + self.function.generate_disposal_code(code) + self.function.free_temps(code) + else: + code.put_decref_clear(function, py_object_type) + code.funcstate.release_temp(function) + + +class InlinedDefNodeCallNode(CallNode): + # Inline call to defnode + # + # function PyCFunctionNode + # function_name NameNode + # args [ExprNode] + + subexprs = ['args', 'function_name'] + is_temp = 1 + type = py_object_type + function = None + function_name = None + + def can_be_inlined(self): + func_type= self.function.def_node + if func_type.star_arg or func_type.starstar_arg: + return False + if len(func_type.args) != len(self.args): + return False + if func_type.num_kwonly_args: + return False # actually wrong number of arguments + return True + + def analyse_types(self, env): + self.function_name = self.function_name.analyse_types(env) + + self.args = [ arg.analyse_types(env) for arg in self.args ] + func_type = self.function.def_node + actual_nargs = len(self.args) + + # Coerce arguments + some_args_in_temps = False + for i in range(actual_nargs): + formal_type = func_type.args[i].type + arg = self.args[i].coerce_to(formal_type, env) + if arg.is_temp: + if i > 0: + # first argument in temp doesn't impact subsequent arguments + some_args_in_temps = True + elif arg.type.is_pyobject and not env.nogil: + if arg.nonlocally_immutable(): + # plain local variables are ok + pass + else: + # we do not safely own the argument's reference, + # but we must make sure it cannot be collected + # before we return from the function, so we create + # an owned temp reference to it + if i > 0: # first argument doesn't matter + some_args_in_temps = True + arg = arg.coerce_to_temp(env) + self.args[i] = arg + + if some_args_in_temps: + # if some args are temps and others are not, they may get + # constructed in the wrong order (temps first) => make + # sure they are either all temps or all not temps (except + # for the last argument, which is evaluated last in any + # case) + for i in range(actual_nargs-1): + arg = self.args[i] + if arg.nonlocally_immutable(): + # locals, C functions, unassignable types are safe. + pass + elif arg.type.is_cpp_class: + # Assignment has side effects, avoid. + pass + elif env.nogil and arg.type.is_pyobject: + # can't copy a Python reference into a temp in nogil + # env (this is safe: a construction would fail in + # nogil anyway) + pass + else: + #self.args[i] = arg.coerce_to_temp(env) + # instead: issue a warning + if i > 0: + warning(arg.pos, "Argument evaluation order in C function call is undefined and may not be as expected", 0) + break + return self + + def generate_result_code(self, code): + arg_code = [self.function_name.py_result()] + func_type = self.function.def_node + for arg, proto_arg in zip(self.args, func_type.args): + if arg.type.is_pyobject: + arg_code.append(arg.result_as(proto_arg.type)) + else: + arg_code.append(arg.result()) + arg_code = ', '.join(arg_code) + code.putln( + "%s = %s(%s); %s" % ( + self.result(), + self.function.def_node.entry.pyfunc_cname, + arg_code, + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + + +class PythonCapiFunctionNode(ExprNode): + subexprs = [] + + def __init__(self, pos, py_name, cname, func_type, utility_code = None): + ExprNode.__init__(self, pos, name=py_name, cname=cname, + type=func_type, utility_code=utility_code) + + def analyse_types(self, env): + return self + + def generate_result_code(self, code): + if self.utility_code: + code.globalstate.use_utility_code(self.utility_code) + + def calculate_result_code(self): + return self.cname + + +class PythonCapiCallNode(SimpleCallNode): + # Python C-API Function call (only created in transforms) + + # By default, we assume that the call never returns None, as this + # is true for most C-API functions in CPython. If this does not + # apply to a call, set the following to True (or None to inherit + # the default behaviour). + may_return_none = False + + def __init__(self, pos, function_name, func_type, + utility_code = None, py_name=None, **kwargs): + self.type = func_type.return_type + self.result_ctype = self.type + self.function = PythonCapiFunctionNode( + pos, py_name, function_name, func_type, + utility_code = utility_code) + # call this last so that we can override the constructed + # attributes above with explicit keyword arguments if required + SimpleCallNode.__init__(self, pos, **kwargs) + + +class CachedBuiltinMethodCallNode(CallNode): + # Python call to a method of a known Python builtin (only created in transforms) + + subexprs = ['obj', 'args'] + is_temp = True + + def __init__(self, call_node, obj, method_name, args): + super(CachedBuiltinMethodCallNode, self).__init__( + call_node.pos, + obj=obj, method_name=method_name, args=args, + may_return_none=call_node.may_return_none, + type=call_node.type) + + def may_be_none(self): + if self.may_return_none is not None: + return self.may_return_none + return ExprNode.may_be_none(self) + + def generate_result_code(self, code): + type_cname = self.obj.type.cname + obj_cname = self.obj.py_result() + args = [arg.py_result() for arg in self.args] + call_code = code.globalstate.cached_unbound_method_call_code( + obj_cname, type_cname, self.method_name, args) + code.putln("%s = %s; %s" % ( + self.result(), call_code, + code.error_goto_if_null(self.result(), self.pos) + )) + code.put_gotref(self.result()) + + +class GeneralCallNode(CallNode): + # General Python function call, including keyword, + # * and ** arguments. + # + # function ExprNode + # positional_args ExprNode Tuple of positional arguments + # keyword_args ExprNode or None Dict of keyword arguments + + type = py_object_type + + subexprs = ['function', 'positional_args', 'keyword_args'] + + nogil_check = Node.gil_error + + def compile_time_value(self, denv): + function = self.function.compile_time_value(denv) + positional_args = self.positional_args.compile_time_value(denv) + keyword_args = self.keyword_args.compile_time_value(denv) + try: + return function(*positional_args, **keyword_args) + except Exception as e: + self.compile_time_value_error(e) + + def explicit_args_kwds(self): + if (self.keyword_args and not self.keyword_args.is_dict_literal or + not self.positional_args.is_sequence_constructor): + raise CompileError(self.pos, + 'Compile-time keyword arguments must be explicit.') + return self.positional_args.args, self.keyword_args + + def analyse_types(self, env): + if self.analyse_as_type_constructor(env): + return self + self.function = self.function.analyse_types(env) + if not self.function.type.is_pyobject: + if self.function.type.is_error: + self.type = error_type + return self + if hasattr(self.function, 'entry'): + node = self.map_to_simple_call_node() + if node is not None and node is not self: + return node.analyse_types(env) + elif self.function.entry.as_variable: + self.function = self.function.coerce_to_pyobject(env) + elif node is self: + error(self.pos, + "Non-trivial keyword arguments and starred " + "arguments not allowed in cdef functions.") + else: + # error was already reported + pass + else: + self.function = self.function.coerce_to_pyobject(env) + if self.keyword_args: + self.keyword_args = self.keyword_args.analyse_types(env) + self.positional_args = self.positional_args.analyse_types(env) + self.positional_args = \ + self.positional_args.coerce_to_pyobject(env) + self.set_py_result_type(self.function) + self.is_temp = 1 + return self + + def map_to_simple_call_node(self): + """ + Tries to map keyword arguments to declared positional arguments. + Returns self to try a Python call, None to report an error + or a SimpleCallNode if the mapping succeeds. + """ + if not isinstance(self.positional_args, TupleNode): + # has starred argument + return self + if not self.keyword_args.is_dict_literal: + # keywords come from arbitrary expression => nothing to do here + return self + function = self.function + entry = getattr(function, 'entry', None) + if not entry: + return self + function_type = entry.type + if function_type.is_ptr: + function_type = function_type.base_type + if not function_type.is_cfunction: + return self + + pos_args = self.positional_args.args + kwargs = self.keyword_args + declared_args = function_type.args + if entry.is_cmethod: + declared_args = declared_args[1:] # skip 'self' + + if len(pos_args) > len(declared_args): + error(self.pos, "function call got too many positional arguments, " + "expected %d, got %s" % (len(declared_args), + len(pos_args))) + return None + + matched_args = set([ arg.name for arg in declared_args[:len(pos_args)] + if arg.name ]) + unmatched_args = declared_args[len(pos_args):] + matched_kwargs_count = 0 + args = list(pos_args) + + # check for duplicate keywords + seen = set(matched_args) + has_errors = False + for arg in kwargs.key_value_pairs: + name = arg.key.value + if name in seen: + error(arg.pos, "argument '%s' passed twice" % name) + has_errors = True + # continue to report more errors if there are any + seen.add(name) + + # match keywords that are passed in order + for decl_arg, arg in zip(unmatched_args, kwargs.key_value_pairs): + name = arg.key.value + if decl_arg.name == name: + matched_args.add(name) + matched_kwargs_count += 1 + args.append(arg.value) + else: + break + + # match keyword arguments that are passed out-of-order, but keep + # the evaluation of non-simple arguments in order by moving them + # into temps + from .UtilNodes import EvalWithTempExprNode, LetRefNode + temps = [] + if len(kwargs.key_value_pairs) > matched_kwargs_count: + unmatched_args = declared_args[len(args):] + keywords = dict([ (arg.key.value, (i+len(pos_args), arg)) + for i, arg in enumerate(kwargs.key_value_pairs) ]) + first_missing_keyword = None + for decl_arg in unmatched_args: + name = decl_arg.name + if name not in keywords: + # missing keyword argument => either done or error + if not first_missing_keyword: + first_missing_keyword = name + continue + elif first_missing_keyword: + if entry.as_variable: + # we might be able to convert the function to a Python + # object, which then allows full calling semantics + # with default values in gaps - currently, we only + # support optional arguments at the end + return self + # wasn't the last keyword => gaps are not supported + error(self.pos, "C function call is missing " + "argument '%s'" % first_missing_keyword) + return None + pos, arg = keywords[name] + matched_args.add(name) + matched_kwargs_count += 1 + if arg.value.is_simple(): + args.append(arg.value) + else: + temp = LetRefNode(arg.value) + assert temp.is_simple() + args.append(temp) + temps.append((pos, temp)) + + if temps: + # may have to move preceding non-simple args into temps + final_args = [] + new_temps = [] + first_temp_arg = temps[0][-1] + for arg_value in args: + if arg_value is first_temp_arg: + break # done + if arg_value.is_simple(): + final_args.append(arg_value) + else: + temp = LetRefNode(arg_value) + new_temps.append(temp) + final_args.append(temp) + if new_temps: + args = final_args + temps = new_temps + [ arg for i,arg in sorted(temps) ] + + # check for unexpected keywords + for arg in kwargs.key_value_pairs: + name = arg.key.value + if name not in matched_args: + has_errors = True + error(arg.pos, + "C function got unexpected keyword argument '%s'" % + name) + + if has_errors: + # error was reported already + return None + + # all keywords mapped to positional arguments + # if we are missing arguments, SimpleCallNode will figure it out + node = SimpleCallNode(self.pos, function=function, args=args) + for temp in temps[::-1]: + node = EvalWithTempExprNode(temp, node) + return node + + def generate_result_code(self, code): + if self.type.is_error: return + if self.keyword_args: + kwargs = self.keyword_args.py_result() + else: + kwargs = 'NULL' + code.globalstate.use_utility_code(UtilityCode.load_cached( + "PyObjectCall", "ObjectHandling.c")) + code.putln( + "%s = __Pyx_PyObject_Call(%s, %s, %s); %s" % ( + self.result(), + self.function.py_result(), + self.positional_args.py_result(), + kwargs, + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + + +class AsTupleNode(ExprNode): + # Convert argument to tuple. Used for normalising + # the * argument of a function call. + # + # arg ExprNode + + subexprs = ['arg'] + is_temp = 1 + + def calculate_constant_result(self): + self.constant_result = tuple(self.arg.constant_result) + + def compile_time_value(self, denv): + arg = self.arg.compile_time_value(denv) + try: + return tuple(arg) + except Exception as e: + self.compile_time_value_error(e) + + def analyse_types(self, env): + self.arg = self.arg.analyse_types(env).coerce_to_pyobject(env) + if self.arg.type is tuple_type: + return self.arg.as_none_safe_node("'NoneType' object is not iterable") + self.type = tuple_type + return self + + def may_be_none(self): + return False + + nogil_check = Node.gil_error + gil_message = "Constructing Python tuple" + + def generate_result_code(self, code): + cfunc = "__Pyx_PySequence_Tuple" if self.arg.type in (py_object_type, tuple_type) else "PySequence_Tuple" + code.putln( + "%s = %s(%s); %s" % ( + self.result(), + cfunc, self.arg.py_result(), + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + + +class MergedDictNode(ExprNode): + # Helper class for keyword arguments and other merged dicts. + # + # keyword_args [DictNode or other ExprNode] + + subexprs = ['keyword_args'] + is_temp = 1 + type = dict_type + reject_duplicates = True + + def calculate_constant_result(self): + result = {} + reject_duplicates = self.reject_duplicates + for item in self.keyword_args: + if item.is_dict_literal: + # process items in order + items = ((key.constant_result, value.constant_result) + for key, value in item.key_value_pairs) + else: + items = item.constant_result.iteritems() + + for key, value in items: + if reject_duplicates and key in result: + raise ValueError("duplicate keyword argument found: %s" % key) + result[key] = value + + self.constant_result = result + + def compile_time_value(self, denv): + result = {} + reject_duplicates = self.reject_duplicates + for item in self.keyword_args: + if item.is_dict_literal: + # process items in order + items = [(key.compile_time_value(denv), value.compile_time_value(denv)) + for key, value in item.key_value_pairs] + else: + items = item.compile_time_value(denv).iteritems() + + try: + for key, value in items: + if reject_duplicates and key in result: + raise ValueError("duplicate keyword argument found: %s" % key) + result[key] = value + except Exception as e: + self.compile_time_value_error(e) + return result + + def type_dependencies(self, env): + return () + + def infer_type(self, env): + return dict_type + + def analyse_types(self, env): + args = [ + arg.analyse_types(env).coerce_to_pyobject(env).as_none_safe_node( + # FIXME: CPython's error message starts with the runtime function name + 'argument after ** must be a mapping, not NoneType') + for arg in self.keyword_args + ] + + if len(args) == 1 and args[0].type is dict_type: + # strip this intermediate node and use the bare dict + arg = args[0] + if arg.is_name and arg.entry.is_arg and len(arg.entry.cf_assignments) == 1: + # passing **kwargs through to function call => allow NULL + arg.allow_null = True + return arg + + self.keyword_args = args + return self + + def may_be_none(self): + return False + + gil_message = "Constructing Python dict" + + def generate_evaluation_code(self, code): + code.mark_pos(self.pos) + self.allocate_temp_result(code) + + args = iter(self.keyword_args) + item = next(args) + item.generate_evaluation_code(code) + if item.type is not dict_type: + # CPython supports calling functions with non-dicts, so do we + code.putln('if (likely(PyDict_CheckExact(%s))) {' % + item.py_result()) + + if item.is_dict_literal: + item.make_owned_reference(code) + code.putln("%s = %s;" % (self.result(), item.py_result())) + item.generate_post_assignment_code(code) + else: + code.putln("%s = PyDict_Copy(%s); %s" % ( + self.result(), + item.py_result(), + code.error_goto_if_null(self.result(), item.pos))) + code.put_gotref(self.result()) + item.generate_disposal_code(code) + + if item.type is not dict_type: + code.putln('} else {') + code.putln("%s = PyObject_CallFunctionObjArgs((PyObject*)&PyDict_Type, %s, NULL); %s" % ( + self.result(), + item.py_result(), + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + item.generate_disposal_code(code) + code.putln('}') + item.free_temps(code) + + helpers = set() + for item in args: + if item.is_dict_literal: + # inline update instead of creating an intermediate dict + for arg in item.key_value_pairs: + arg.generate_evaluation_code(code) + if self.reject_duplicates: + code.putln("if (unlikely(PyDict_Contains(%s, %s))) {" % ( + self.result(), + arg.key.py_result())) + helpers.add("RaiseDoubleKeywords") + # FIXME: find out function name at runtime! + code.putln('__Pyx_RaiseDoubleKeywordsError("function", %s); %s' % ( + arg.key.py_result(), + code.error_goto(self.pos))) + code.putln("}") + code.put_error_if_neg(arg.key.pos, "PyDict_SetItem(%s, %s, %s)" % ( + self.result(), + arg.key.py_result(), + arg.value.py_result())) + arg.generate_disposal_code(code) + arg.free_temps(code) + else: + item.generate_evaluation_code(code) + if self.reject_duplicates: + # merge mapping into kwdict one by one as we need to check for duplicates + helpers.add("MergeKeywords") + code.put_error_if_neg(item.pos, "__Pyx_MergeKeywords(%s, %s)" % ( + self.result(), item.py_result())) + else: + # simple case, just add all entries + helpers.add("RaiseMappingExpected") + code.putln("if (unlikely(PyDict_Update(%s, %s) < 0)) {" % ( + self.result(), item.py_result())) + code.putln("if (PyErr_ExceptionMatches(PyExc_AttributeError)) " + "__Pyx_RaiseMappingExpectedError(%s);" % item.py_result()) + code.putln(code.error_goto(item.pos)) + code.putln("}") + item.generate_disposal_code(code) + item.free_temps(code) + + for helper in sorted(helpers): + code.globalstate.use_utility_code(UtilityCode.load_cached(helper, "FunctionArguments.c")) + + def annotate(self, code): + for item in self.keyword_args: + item.annotate(code) + + +class AttributeNode(ExprNode): + # obj.attribute + # + # obj ExprNode + # attribute string + # needs_none_check boolean Used if obj is an extension type. + # If set to True, it is known that the type is not None. + # + # Used internally: + # + # is_py_attr boolean Is a Python getattr operation + # member string C name of struct member + # is_called boolean Function call is being done on result + # entry Entry Symbol table entry of attribute + + is_attribute = 1 + subexprs = ['obj'] + + type = PyrexTypes.error_type + entry = None + is_called = 0 + needs_none_check = True + is_memslice_transpose = False + is_special_lookup = False + is_py_attr = 0 + + def as_cython_attribute(self): + if (isinstance(self.obj, NameNode) and + self.obj.is_cython_module and not + self.attribute == u"parallel"): + return self.attribute + + cy = self.obj.as_cython_attribute() + if cy: + return "%s.%s" % (cy, self.attribute) + return None + + def coerce_to(self, dst_type, env): + # If coercing to a generic pyobject and this is a cpdef function + # we can create the corresponding attribute + if dst_type is py_object_type: + entry = self.entry + if entry and entry.is_cfunction and entry.as_variable: + # must be a cpdef function + self.is_temp = 1 + self.entry = entry.as_variable + self.analyse_as_python_attribute(env) + return self + return ExprNode.coerce_to(self, dst_type, env) + + def calculate_constant_result(self): + attr = self.attribute + if attr.startswith("__") and attr.endswith("__"): + return + self.constant_result = getattr(self.obj.constant_result, attr) + + def compile_time_value(self, denv): + attr = self.attribute + if attr.startswith("__") and attr.endswith("__"): + error(self.pos, + "Invalid attribute name '%s' in compile-time expression" % attr) + return None + obj = self.obj.compile_time_value(denv) + try: + return getattr(obj, attr) + except Exception as e: + self.compile_time_value_error(e) + + def type_dependencies(self, env): + return self.obj.type_dependencies(env) + + def infer_type(self, env): + # FIXME: this is way too redundant with analyse_types() + node = self.analyse_as_cimported_attribute_node(env, target=False) + if node is not None: + return node.entry.type + node = self.analyse_as_type_attribute(env) + if node is not None: + return node.entry.type + obj_type = self.obj.infer_type(env) + self.analyse_attribute(env, obj_type=obj_type) + if obj_type.is_builtin_type and self.type.is_cfunction: + # special case: C-API replacements for C methods of + # builtin types cannot be inferred as C functions as + # that would prevent their use as bound methods + return py_object_type + elif self.entry and self.entry.is_cmethod: + # special case: bound methods should not be inferred + # as their unbound method types + return py_object_type + return self.type + + def analyse_target_declaration(self, env): + pass + + def analyse_target_types(self, env): + node = self.analyse_types(env, target = 1) + if node.type.is_const: + error(self.pos, "Assignment to const attribute '%s'" % self.attribute) + if not node.is_lvalue(): + error(self.pos, "Assignment to non-lvalue of type '%s'" % self.type) + return node + + def analyse_types(self, env, target = 0): + self.initialized_check = env.directives['initializedcheck'] + node = self.analyse_as_cimported_attribute_node(env, target) + if node is None and not target: + node = self.analyse_as_type_attribute(env) + if node is None: + node = self.analyse_as_ordinary_attribute_node(env, target) + assert node is not None + if node.entry: + node.entry.used = True + if node.is_attribute: + node.wrap_obj_in_nonecheck(env) + return node + + def analyse_as_cimported_attribute_node(self, env, target): + # Try to interpret this as a reference to an imported + # C const, type, var or function. If successful, mutates + # this node into a NameNode and returns 1, otherwise + # returns 0. + module_scope = self.obj.analyse_as_module(env) + if module_scope: + entry = module_scope.lookup_here(self.attribute) + if entry and ( + entry.is_cglobal or entry.is_cfunction + or entry.is_type or entry.is_const): + return self.as_name_node(env, entry, target) + if self.is_cimported_module_without_shadow(env): + error(self.pos, "cimported module has no attribute '%s'" % self.attribute) + return self + return None + + def analyse_as_type_attribute(self, env): + # Try to interpret this as a reference to an unbound + # C method of an extension type or builtin type. If successful, + # creates a corresponding NameNode and returns it, otherwise + # returns None. + if self.obj.is_string_literal: + return + type = self.obj.analyse_as_type(env) + if type: + if type.is_extension_type or type.is_builtin_type or type.is_cpp_class: + entry = type.scope.lookup_here(self.attribute) + if entry and (entry.is_cmethod or type.is_cpp_class and entry.type.is_cfunction): + if type.is_builtin_type: + if not self.is_called: + # must handle this as Python object + return None + ubcm_entry = entry + else: + # Create a temporary entry describing the C method + # as an ordinary function. + if entry.func_cname and not hasattr(entry.type, 'op_arg_struct'): + cname = entry.func_cname + if entry.type.is_static_method or ( + env.parent_scope and env.parent_scope.is_cpp_class_scope): + ctype = entry.type + elif type.is_cpp_class: + error(self.pos, "%s not a static member of %s" % (entry.name, type)) + ctype = PyrexTypes.error_type + else: + # Fix self type. + ctype = copy.copy(entry.type) + ctype.args = ctype.args[:] + ctype.args[0] = PyrexTypes.CFuncTypeArg('self', type, 'self', None) + else: + cname = "%s->%s" % (type.vtabptr_cname, entry.cname) + ctype = entry.type + ubcm_entry = Symtab.Entry(entry.name, cname, ctype) + ubcm_entry.is_cfunction = 1 + ubcm_entry.func_cname = entry.func_cname + ubcm_entry.is_unbound_cmethod = 1 + ubcm_entry.scope = entry.scope + return self.as_name_node(env, ubcm_entry, target=False) + elif type.is_enum: + if self.attribute in type.values: + for entry in type.entry.enum_values: + if entry.name == self.attribute: + return self.as_name_node(env, entry, target=False) + else: + error(self.pos, "%s not a known value of %s" % (self.attribute, type)) + else: + error(self.pos, "%s not a known value of %s" % (self.attribute, type)) + return None + + def analyse_as_type(self, env): + module_scope = self.obj.analyse_as_module(env) + if module_scope: + return module_scope.lookup_type(self.attribute) + if not self.obj.is_string_literal: + base_type = self.obj.analyse_as_type(env) + if base_type and hasattr(base_type, 'scope') and base_type.scope is not None: + return base_type.scope.lookup_type(self.attribute) + return None + + def analyse_as_extension_type(self, env): + # Try to interpret this as a reference to an extension type + # in a cimported module. Returns the extension type, or None. + module_scope = self.obj.analyse_as_module(env) + if module_scope: + entry = module_scope.lookup_here(self.attribute) + if entry and entry.is_type: + if entry.type.is_extension_type or entry.type.is_builtin_type: + return entry.type + return None + + def analyse_as_module(self, env): + # Try to interpret this as a reference to a cimported module + # in another cimported module. Returns the module scope, or None. + module_scope = self.obj.analyse_as_module(env) + if module_scope: + entry = module_scope.lookup_here(self.attribute) + if entry and entry.as_module: + return entry.as_module + return None + + def as_name_node(self, env, entry, target): + # Create a corresponding NameNode from this node and complete the + # analyse_types phase. + node = NameNode.from_node(self, name=self.attribute, entry=entry) + if target: + node = node.analyse_target_types(env) + else: + node = node.analyse_rvalue_entry(env) + node.entry.used = 1 + return node + + def analyse_as_ordinary_attribute_node(self, env, target): + self.obj = self.obj.analyse_types(env) + self.analyse_attribute(env) + if self.entry and self.entry.is_cmethod and not self.is_called: +# error(self.pos, "C method can only be called") + pass + ## Reference to C array turns into pointer to first element. + #while self.type.is_array: + # self.type = self.type.element_ptr_type() + if self.is_py_attr: + if not target: + self.is_temp = 1 + self.result_ctype = py_object_type + elif target and self.obj.type.is_builtin_type: + error(self.pos, "Assignment to an immutable object field") + #elif self.type.is_memoryviewslice and not target: + # self.is_temp = True + return self + + def analyse_attribute(self, env, obj_type = None): + # Look up attribute and set self.type and self.member. + immutable_obj = obj_type is not None # used during type inference + self.is_py_attr = 0 + self.member = self.attribute + if obj_type is None: + if self.obj.type.is_string or self.obj.type.is_pyunicode_ptr: + self.obj = self.obj.coerce_to_pyobject(env) + obj_type = self.obj.type + else: + if obj_type.is_string or obj_type.is_pyunicode_ptr: + obj_type = py_object_type + if obj_type.is_ptr or obj_type.is_array: + obj_type = obj_type.base_type + self.op = "->" + elif obj_type.is_extension_type or obj_type.is_builtin_type: + self.op = "->" + elif obj_type.is_reference and obj_type.is_fake_reference: + self.op = "->" + else: + self.op = "." + if obj_type.has_attributes: + if obj_type.attributes_known(): + entry = obj_type.scope.lookup_here(self.attribute) + if obj_type.is_memoryviewslice and not entry: + if self.attribute == 'T': + self.is_memslice_transpose = True + self.is_temp = True + self.use_managed_ref = True + self.type = self.obj.type.transpose(self.pos) + return + else: + obj_type.declare_attribute(self.attribute, env, self.pos) + entry = obj_type.scope.lookup_here(self.attribute) + if entry and entry.is_member: + entry = None + else: + error(self.pos, + "Cannot select attribute of incomplete type '%s'" + % obj_type) + self.type = PyrexTypes.error_type + return + self.entry = entry + if entry: + if obj_type.is_extension_type and entry.name == "__weakref__": + error(self.pos, "Illegal use of special attribute __weakref__") + + # def methods need the normal attribute lookup + # because they do not have struct entries + # fused function go through assignment synthesis + # (foo = pycfunction(foo_func_obj)) and need to go through + # regular Python lookup as well + if (entry.is_variable and not entry.fused_cfunction) or entry.is_cmethod: + self.type = entry.type + self.member = entry.cname + return + else: + # If it's not a variable or C method, it must be a Python + # method of an extension type, so we treat it like a Python + # attribute. + pass + # If we get here, the base object is not a struct/union/extension + # type, or it is an extension type and the attribute is either not + # declared or is declared as a Python method. Treat it as a Python + # attribute reference. + self.analyse_as_python_attribute(env, obj_type, immutable_obj) + + def analyse_as_python_attribute(self, env, obj_type=None, immutable_obj=False): + if obj_type is None: + obj_type = self.obj.type + # mangle private '__*' Python attributes used inside of a class + self.attribute = env.mangle_class_private_name(self.attribute) + self.member = self.attribute + self.type = py_object_type + self.is_py_attr = 1 + + if not obj_type.is_pyobject and not obj_type.is_error: + # Expose python methods for immutable objects. + if (obj_type.is_string or obj_type.is_cpp_string + or obj_type.is_buffer or obj_type.is_memoryviewslice + or obj_type.is_numeric + or (obj_type.is_ctuple and obj_type.can_coerce_to_pyobject(env)) + or (obj_type.is_struct and obj_type.can_coerce_to_pyobject(env))): + if not immutable_obj: + self.obj = self.obj.coerce_to_pyobject(env) + elif (obj_type.is_cfunction and (self.obj.is_name or self.obj.is_attribute) + and self.obj.entry.as_variable + and self.obj.entry.as_variable.type.is_pyobject): + # might be an optimised builtin function => unpack it + if not immutable_obj: + self.obj = self.obj.coerce_to_pyobject(env) + else: + error(self.pos, + "Object of type '%s' has no attribute '%s'" % + (obj_type, self.attribute)) + + def wrap_obj_in_nonecheck(self, env): + if not env.directives['nonecheck']: + return + + msg = None + format_args = () + if (self.obj.type.is_extension_type and self.needs_none_check and not + self.is_py_attr): + msg = "'NoneType' object has no attribute '%{0}s'".format('.30' if len(self.attribute) <= 30 else '') + format_args = (self.attribute,) + elif self.obj.type.is_memoryviewslice: + if self.is_memslice_transpose: + msg = "Cannot transpose None memoryview slice" + else: + entry = self.obj.type.scope.lookup_here(self.attribute) + if entry: + # copy/is_c_contig/shape/strides etc + msg = "Cannot access '%s' attribute of None memoryview slice" + format_args = (entry.name,) + + if msg: + self.obj = self.obj.as_none_safe_node(msg, 'PyExc_AttributeError', + format_args=format_args) + + def nogil_check(self, env): + if self.is_py_attr: + self.gil_error() + + gil_message = "Accessing Python attribute" + + def is_cimported_module_without_shadow(self, env): + return self.obj.is_cimported_module_without_shadow(env) + + def is_simple(self): + if self.obj: + return self.result_in_temp() or self.obj.is_simple() + else: + return NameNode.is_simple(self) + + def is_lvalue(self): + if self.obj: + return True + else: + return NameNode.is_lvalue(self) + + def is_ephemeral(self): + if self.obj: + return self.obj.is_ephemeral() + else: + return NameNode.is_ephemeral(self) + + def calculate_result_code(self): + #print "AttributeNode.calculate_result_code:", self.member ### + #print "...obj node =", self.obj, "code", self.obj.result() ### + #print "...obj type", self.obj.type, "ctype", self.obj.ctype() ### + obj = self.obj + obj_code = obj.result_as(obj.type) + #print "...obj_code =", obj_code ### + if self.entry and self.entry.is_cmethod: + if obj.type.is_extension_type and not self.entry.is_builtin_cmethod: + if self.entry.final_func_cname: + return self.entry.final_func_cname + + if self.type.from_fused: + # If the attribute was specialized through indexing, make + # sure to get the right fused name, as our entry was + # replaced by our parent index node + # (AnalyseExpressionsTransform) + self.member = self.entry.cname + + return "((struct %s *)%s%s%s)->%s" % ( + obj.type.vtabstruct_cname, obj_code, self.op, + obj.type.vtabslot_cname, self.member) + elif self.result_is_used: + return self.member + # Generating no code at all for unused access to optimised builtin + # methods fixes the problem that some optimisations only exist as + # macros, i.e. there is no function pointer to them, so we would + # generate invalid C code here. + return + elif obj.type.is_complex: + return "__Pyx_C%s(%s)" % (self.member.upper(), obj_code) + else: + if obj.type.is_builtin_type and self.entry and self.entry.is_variable: + # accessing a field of a builtin type, need to cast better than result_as() does + obj_code = obj.type.cast_code(obj.result(), to_object_struct = True) + return "%s%s%s" % (obj_code, self.op, self.member) + + def generate_result_code(self, code): + if self.is_py_attr: + if self.is_special_lookup: + code.globalstate.use_utility_code( + UtilityCode.load_cached("PyObjectLookupSpecial", "ObjectHandling.c")) + lookup_func_name = '__Pyx_PyObject_LookupSpecial' + else: + code.globalstate.use_utility_code( + UtilityCode.load_cached("PyObjectGetAttrStr", "ObjectHandling.c")) + lookup_func_name = '__Pyx_PyObject_GetAttrStr' + code.putln( + '%s = %s(%s, %s); %s' % ( + self.result(), + lookup_func_name, + self.obj.py_result(), + code.intern_identifier(self.attribute), + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + elif self.type.is_memoryviewslice: + if self.is_memslice_transpose: + # transpose the slice + for access, packing in self.type.axes: + if access == 'ptr': + error(self.pos, "Transposing not supported for slices " + "with indirect dimensions") + return + + code.putln("%s = %s;" % (self.result(), self.obj.result())) + code.put_incref_memoryviewslice(self.result(), have_gil=True) + + T = "__pyx_memslice_transpose(&%s) == 0" + code.putln(code.error_goto_if(T % self.result(), self.pos)) + elif self.initialized_check: + code.putln( + 'if (unlikely(!%s.memview)) {' + 'PyErr_SetString(PyExc_AttributeError,' + '"Memoryview is not initialized");' + '%s' + '}' % (self.result(), code.error_goto(self.pos))) + else: + # result_code contains what is needed, but we may need to insert + # a check and raise an exception + if self.obj.type and self.obj.type.is_extension_type: + pass + elif self.entry and self.entry.is_cmethod: + # C method implemented as function call with utility code + code.globalstate.use_entry_utility_code(self.entry) + + def generate_disposal_code(self, code): + if self.is_temp and self.type.is_memoryviewslice and self.is_memslice_transpose: + # mirror condition for putting the memview incref here: + code.put_xdecref_memoryviewslice( + self.result(), have_gil=True) + code.putln("%s.memview = NULL;" % self.result()) + code.putln("%s.data = NULL;" % self.result()) + else: + ExprNode.generate_disposal_code(self, code) + + def generate_assignment_code(self, rhs, code, overloaded_assignment=False, + exception_check=None, exception_value=None): + self.obj.generate_evaluation_code(code) + if self.is_py_attr: + code.globalstate.use_utility_code( + UtilityCode.load_cached("PyObjectSetAttrStr", "ObjectHandling.c")) + code.put_error_if_neg(self.pos, + '__Pyx_PyObject_SetAttrStr(%s, %s, %s)' % ( + self.obj.py_result(), + code.intern_identifier(self.attribute), + rhs.py_result())) + rhs.generate_disposal_code(code) + rhs.free_temps(code) + elif self.obj.type.is_complex: + code.putln("__Pyx_SET_C%s(%s, %s);" % ( + self.member.upper(), + self.obj.result_as(self.obj.type), + rhs.result_as(self.ctype()))) + else: + select_code = self.result() + if self.type.is_pyobject and self.use_managed_ref: + rhs.make_owned_reference(code) + code.put_giveref(rhs.py_result()) + code.put_gotref(select_code) + code.put_decref(select_code, self.ctype()) + elif self.type.is_memoryviewslice: + from . import MemoryView + MemoryView.put_assign_to_memviewslice( + select_code, rhs, rhs.result(), self.type, code) + + if not self.type.is_memoryviewslice: + code.putln( + "%s = %s;" % ( + select_code, + rhs.result_as(self.ctype()))) + #rhs.result())) + rhs.generate_post_assignment_code(code) + rhs.free_temps(code) + self.obj.generate_disposal_code(code) + self.obj.free_temps(code) + + def generate_deletion_code(self, code, ignore_nonexisting=False): + self.obj.generate_evaluation_code(code) + if self.is_py_attr or (self.entry.scope.is_property_scope + and u'__del__' in self.entry.scope.entries): + code.globalstate.use_utility_code( + UtilityCode.load_cached("PyObjectSetAttrStr", "ObjectHandling.c")) + code.put_error_if_neg(self.pos, + '__Pyx_PyObject_DelAttrStr(%s, %s)' % ( + self.obj.py_result(), + code.intern_identifier(self.attribute))) + else: + error(self.pos, "Cannot delete C attribute of extension type") + self.obj.generate_disposal_code(code) + self.obj.free_temps(code) + + def annotate(self, code): + if self.is_py_attr: + style, text = 'py_attr', 'python attribute (%s)' + else: + style, text = 'c_attr', 'c attribute (%s)' + code.annotate(self.pos, AnnotationItem(style, text % self.type, size=len(self.attribute))) + + +#------------------------------------------------------------------- +# +# Constructor nodes +# +#------------------------------------------------------------------- + +class StarredUnpackingNode(ExprNode): + # A starred expression like "*a" + # + # This is only allowed in sequence assignment or construction such as + # + # a, *b = (1,2,3,4) => a = 1 ; b = [2,3,4] + # + # and will be special cased during type analysis (or generate an error + # if it's found at unexpected places). + # + # target ExprNode + + subexprs = ['target'] + is_starred = 1 + type = py_object_type + is_temp = 1 + starred_expr_allowed_here = False + + def __init__(self, pos, target): + ExprNode.__init__(self, pos, target=target) + + def analyse_declarations(self, env): + if not self.starred_expr_allowed_here: + error(self.pos, "starred expression is not allowed here") + self.target.analyse_declarations(env) + + def infer_type(self, env): + return self.target.infer_type(env) + + def analyse_types(self, env): + if not self.starred_expr_allowed_here: + error(self.pos, "starred expression is not allowed here") + self.target = self.target.analyse_types(env) + self.type = self.target.type + return self + + def analyse_target_declaration(self, env): + self.target.analyse_target_declaration(env) + + def analyse_target_types(self, env): + self.target = self.target.analyse_target_types(env) + self.type = self.target.type + return self + + def calculate_result_code(self): + return "" + + def generate_result_code(self, code): + pass + + +class SequenceNode(ExprNode): + # Base class for list and tuple constructor nodes. + # Contains common code for performing sequence unpacking. + # + # args [ExprNode] + # unpacked_items [ExprNode] or None + # coerced_unpacked_items [ExprNode] or None + # mult_factor ExprNode the integer number of content repetitions ([1,2]*3) + + subexprs = ['args', 'mult_factor'] + + is_sequence_constructor = 1 + unpacked_items = None + mult_factor = None + slow = False # trade speed for code size (e.g. use PyTuple_Pack()) + + def compile_time_value_list(self, denv): + return [arg.compile_time_value(denv) for arg in self.args] + + def replace_starred_target_node(self): + # replace a starred node in the targets by the contained expression + self.starred_assignment = False + args = [] + for arg in self.args: + if arg.is_starred: + if self.starred_assignment: + error(arg.pos, "more than 1 starred expression in assignment") + self.starred_assignment = True + arg = arg.target + arg.is_starred = True + args.append(arg) + self.args = args + + def analyse_target_declaration(self, env): + self.replace_starred_target_node() + for arg in self.args: + arg.analyse_target_declaration(env) + + def analyse_types(self, env, skip_children=False): + for i, arg in enumerate(self.args): + if not skip_children: + arg = arg.analyse_types(env) + self.args[i] = arg.coerce_to_pyobject(env) + if self.mult_factor: + self.mult_factor = self.mult_factor.analyse_types(env) + if not self.mult_factor.type.is_int: + self.mult_factor = self.mult_factor.coerce_to_pyobject(env) + self.is_temp = 1 + # not setting self.type here, subtypes do this + return self + + def coerce_to_ctuple(self, dst_type, env): + if self.type == dst_type: + return self + assert not self.mult_factor + if len(self.args) != dst_type.size: + error(self.pos, "trying to coerce sequence to ctuple of wrong length, expected %d, got %d" % ( + dst_type.size, len(self.args))) + coerced_args = [arg.coerce_to(type, env) for arg, type in zip(self.args, dst_type.components)] + return TupleNode(self.pos, args=coerced_args, type=dst_type, is_temp=True) + + def _create_merge_node_if_necessary(self, env): + self._flatten_starred_args() + if not any(arg.is_starred for arg in self.args): + return self + # convert into MergedSequenceNode by building partial sequences + args = [] + values = [] + for arg in self.args: + if arg.is_starred: + if values: + args.append(TupleNode(values[0].pos, args=values).analyse_types(env, skip_children=True)) + values = [] + args.append(arg.target) + else: + values.append(arg) + if values: + args.append(TupleNode(values[0].pos, args=values).analyse_types(env, skip_children=True)) + node = MergedSequenceNode(self.pos, args, self.type) + if self.mult_factor: + node = binop_node( + self.pos, '*', node, self.mult_factor.coerce_to_pyobject(env), + inplace=True, type=self.type, is_temp=True) + return node + + def _flatten_starred_args(self): + args = [] + for arg in self.args: + if arg.is_starred and arg.target.is_sequence_constructor and not arg.target.mult_factor: + args.extend(arg.target.args) + else: + args.append(arg) + self.args[:] = args + + def may_be_none(self): + return False + + def analyse_target_types(self, env): + if self.mult_factor: + error(self.pos, "can't assign to multiplied sequence") + self.unpacked_items = [] + self.coerced_unpacked_items = [] + self.any_coerced_items = False + for i, arg in enumerate(self.args): + arg = self.args[i] = arg.analyse_target_types(env) + if arg.is_starred: + if not arg.type.assignable_from(list_type): + error(arg.pos, + "starred target must have Python object (list) type") + if arg.type is py_object_type: + arg.type = list_type + unpacked_item = PyTempNode(self.pos, env) + coerced_unpacked_item = unpacked_item.coerce_to(arg.type, env) + if unpacked_item is not coerced_unpacked_item: + self.any_coerced_items = True + self.unpacked_items.append(unpacked_item) + self.coerced_unpacked_items.append(coerced_unpacked_item) + self.type = py_object_type + return self + + def generate_result_code(self, code): + self.generate_operation_code(code) + + def generate_sequence_packing_code(self, code, target=None, plain=False): + if target is None: + target = self.result() + size_factor = c_mult = '' + mult_factor = None + + if self.mult_factor and not plain: + mult_factor = self.mult_factor + if mult_factor.type.is_int: + c_mult = mult_factor.result() + if (isinstance(mult_factor.constant_result, _py_int_types) and + mult_factor.constant_result > 0): + size_factor = ' * %s' % mult_factor.constant_result + elif mult_factor.type.signed: + size_factor = ' * ((%s<0) ? 0:%s)' % (c_mult, c_mult) + else: + size_factor = ' * (%s)' % (c_mult,) + + if self.type is tuple_type and (self.is_literal or self.slow) and not c_mult: + # use PyTuple_Pack() to avoid generating huge amounts of one-time code + code.putln('%s = PyTuple_Pack(%d, %s); %s' % ( + target, + len(self.args), + ', '.join(arg.py_result() for arg in self.args), + code.error_goto_if_null(target, self.pos))) + code.put_gotref(target) + elif self.type.is_ctuple: + for i, arg in enumerate(self.args): + code.putln("%s.f%s = %s;" % ( + target, i, arg.result())) + else: + # build the tuple/list step by step, potentially multiplying it as we go + if self.type is list_type: + create_func, set_item_func = 'PyList_New', 'PyList_SET_ITEM' + elif self.type is tuple_type: + create_func, set_item_func = 'PyTuple_New', 'PyTuple_SET_ITEM' + else: + raise InternalError("sequence packing for unexpected type %s" % self.type) + arg_count = len(self.args) + code.putln("%s = %s(%s%s); %s" % ( + target, create_func, arg_count, size_factor, + code.error_goto_if_null(target, self.pos))) + code.put_gotref(target) + + if c_mult: + # FIXME: can't use a temp variable here as the code may + # end up in the constant building function. Temps + # currently don't work there. + + #counter = code.funcstate.allocate_temp(mult_factor.type, manage_ref=False) + counter = Naming.quick_temp_cname + code.putln('{ Py_ssize_t %s;' % counter) + if arg_count == 1: + offset = counter + else: + offset = '%s * %s' % (counter, arg_count) + code.putln('for (%s=0; %s < %s; %s++) {' % ( + counter, counter, c_mult, counter + )) + else: + offset = '' + + for i in range(arg_count): + arg = self.args[i] + if c_mult or not arg.result_in_temp(): + code.put_incref(arg.result(), arg.ctype()) + code.put_giveref(arg.py_result()) + code.putln("%s(%s, %s, %s);" % ( + set_item_func, + target, + (offset and i) and ('%s + %s' % (offset, i)) or (offset or i), + arg.py_result())) + + if c_mult: + code.putln('}') + #code.funcstate.release_temp(counter) + code.putln('}') + + if mult_factor is not None and mult_factor.type.is_pyobject: + code.putln('{ PyObject* %s = PyNumber_InPlaceMultiply(%s, %s); %s' % ( + Naming.quick_temp_cname, target, mult_factor.py_result(), + code.error_goto_if_null(Naming.quick_temp_cname, self.pos) + )) + code.put_gotref(Naming.quick_temp_cname) + code.put_decref(target, py_object_type) + code.putln('%s = %s;' % (target, Naming.quick_temp_cname)) + code.putln('}') + + def generate_subexpr_disposal_code(self, code): + if self.mult_factor and self.mult_factor.type.is_int: + super(SequenceNode, self).generate_subexpr_disposal_code(code) + elif self.type is tuple_type and (self.is_literal or self.slow): + super(SequenceNode, self).generate_subexpr_disposal_code(code) + else: + # We call generate_post_assignment_code here instead + # of generate_disposal_code, because values were stored + # in the tuple using a reference-stealing operation. + for arg in self.args: + arg.generate_post_assignment_code(code) + # Should NOT call free_temps -- this is invoked by the default + # generate_evaluation_code which will do that. + if self.mult_factor: + self.mult_factor.generate_disposal_code(code) + + def generate_assignment_code(self, rhs, code, overloaded_assignment=False, + exception_check=None, exception_value=None): + if self.starred_assignment: + self.generate_starred_assignment_code(rhs, code) + else: + self.generate_parallel_assignment_code(rhs, code) + + for item in self.unpacked_items: + item.release(code) + rhs.free_temps(code) + + _func_iternext_type = PyrexTypes.CPtrType(PyrexTypes.CFuncType( + PyrexTypes.py_object_type, [ + PyrexTypes.CFuncTypeArg("it", PyrexTypes.py_object_type, None), + ])) + + def generate_parallel_assignment_code(self, rhs, code): + # Need to work around the fact that generate_evaluation_code + # allocates the temps in a rather hacky way -- the assignment + # is evaluated twice, within each if-block. + for item in self.unpacked_items: + item.allocate(code) + special_unpack = (rhs.type is py_object_type + or rhs.type in (tuple_type, list_type) + or not rhs.type.is_builtin_type) + long_enough_for_a_loop = len(self.unpacked_items) > 3 + + if special_unpack: + self.generate_special_parallel_unpacking_code( + code, rhs, use_loop=long_enough_for_a_loop) + else: + code.putln("{") + self.generate_generic_parallel_unpacking_code( + code, rhs, self.unpacked_items, use_loop=long_enough_for_a_loop) + code.putln("}") + + for value_node in self.coerced_unpacked_items: + value_node.generate_evaluation_code(code) + for i in range(len(self.args)): + self.args[i].generate_assignment_code( + self.coerced_unpacked_items[i], code) + + def generate_special_parallel_unpacking_code(self, code, rhs, use_loop): + sequence_type_test = '1' + none_check = "likely(%s != Py_None)" % rhs.py_result() + if rhs.type is list_type: + sequence_types = ['List'] + if rhs.may_be_none(): + sequence_type_test = none_check + elif rhs.type is tuple_type: + sequence_types = ['Tuple'] + if rhs.may_be_none(): + sequence_type_test = none_check + else: + sequence_types = ['Tuple', 'List'] + tuple_check = 'likely(PyTuple_CheckExact(%s))' % rhs.py_result() + list_check = 'PyList_CheckExact(%s)' % rhs.py_result() + sequence_type_test = "(%s) || (%s)" % (tuple_check, list_check) + + code.putln("if (%s) {" % sequence_type_test) + code.putln("PyObject* sequence = %s;" % rhs.py_result()) + + # list/tuple => check size + code.putln("Py_ssize_t size = __Pyx_PySequence_SIZE(sequence);") + code.putln("if (unlikely(size != %d)) {" % len(self.args)) + code.globalstate.use_utility_code(raise_too_many_values_to_unpack) + code.putln("if (size > %d) __Pyx_RaiseTooManyValuesError(%d);" % ( + len(self.args), len(self.args))) + code.globalstate.use_utility_code(raise_need_more_values_to_unpack) + code.putln("else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size);") + # < 0 => exception + code.putln(code.error_goto(self.pos)) + code.putln("}") + + code.putln("#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS") + # unpack items from list/tuple in unrolled loop (can't fail) + if len(sequence_types) == 2: + code.putln("if (likely(Py%s_CheckExact(sequence))) {" % sequence_types[0]) + for i, item in enumerate(self.unpacked_items): + code.putln("%s = Py%s_GET_ITEM(sequence, %d); " % ( + item.result(), sequence_types[0], i)) + if len(sequence_types) == 2: + code.putln("} else {") + for i, item in enumerate(self.unpacked_items): + code.putln("%s = Py%s_GET_ITEM(sequence, %d); " % ( + item.result(), sequence_types[1], i)) + code.putln("}") + for item in self.unpacked_items: + code.put_incref(item.result(), item.ctype()) + + code.putln("#else") + # in non-CPython, use the PySequence protocol (which can fail) + if not use_loop: + for i, item in enumerate(self.unpacked_items): + code.putln("%s = PySequence_ITEM(sequence, %d); %s" % ( + item.result(), i, + code.error_goto_if_null(item.result(), self.pos))) + code.put_gotref(item.result()) + else: + code.putln("{") + code.putln("Py_ssize_t i;") + code.putln("PyObject** temps[%s] = {%s};" % ( + len(self.unpacked_items), + ','.join(['&%s' % item.result() for item in self.unpacked_items]))) + code.putln("for (i=0; i < %s; i++) {" % len(self.unpacked_items)) + code.putln("PyObject* item = PySequence_ITEM(sequence, i); %s" % ( + code.error_goto_if_null('item', self.pos))) + code.put_gotref('item') + code.putln("*(temps[i]) = item;") + code.putln("}") + code.putln("}") + + code.putln("#endif") + rhs.generate_disposal_code(code) + + if sequence_type_test == '1': + code.putln("}") # all done + elif sequence_type_test == none_check: + # either tuple/list or None => save some code by generating the error directly + code.putln("} else {") + code.globalstate.use_utility_code( + UtilityCode.load_cached("RaiseNoneIterError", "ObjectHandling.c")) + code.putln("__Pyx_RaiseNoneNotIterableError(); %s" % code.error_goto(self.pos)) + code.putln("}") # all done + else: + code.putln("} else {") # needs iteration fallback code + self.generate_generic_parallel_unpacking_code( + code, rhs, self.unpacked_items, use_loop=use_loop) + code.putln("}") + + def generate_generic_parallel_unpacking_code(self, code, rhs, unpacked_items, use_loop, terminate=True): + code.globalstate.use_utility_code(raise_need_more_values_to_unpack) + code.globalstate.use_utility_code(UtilityCode.load_cached("IterFinish", "ObjectHandling.c")) + code.putln("Py_ssize_t index = -1;") # must be at the start of a C block! + + if use_loop: + code.putln("PyObject** temps[%s] = {%s};" % ( + len(self.unpacked_items), + ','.join(['&%s' % item.result() for item in unpacked_items]))) + + iterator_temp = code.funcstate.allocate_temp(py_object_type, manage_ref=True) + code.putln( + "%s = PyObject_GetIter(%s); %s" % ( + iterator_temp, + rhs.py_result(), + code.error_goto_if_null(iterator_temp, self.pos))) + code.put_gotref(iterator_temp) + rhs.generate_disposal_code(code) + + iternext_func = code.funcstate.allocate_temp(self._func_iternext_type, manage_ref=False) + code.putln("%s = Py_TYPE(%s)->tp_iternext;" % ( + iternext_func, iterator_temp)) + + unpacking_error_label = code.new_label('unpacking_failed') + unpack_code = "%s(%s)" % (iternext_func, iterator_temp) + if use_loop: + code.putln("for (index=0; index < %s; index++) {" % len(unpacked_items)) + code.put("PyObject* item = %s; if (unlikely(!item)) " % unpack_code) + code.put_goto(unpacking_error_label) + code.put_gotref("item") + code.putln("*(temps[index]) = item;") + code.putln("}") + else: + for i, item in enumerate(unpacked_items): + code.put( + "index = %d; %s = %s; if (unlikely(!%s)) " % ( + i, + item.result(), + unpack_code, + item.result())) + code.put_goto(unpacking_error_label) + code.put_gotref(item.py_result()) + + if terminate: + code.globalstate.use_utility_code( + UtilityCode.load_cached("UnpackItemEndCheck", "ObjectHandling.c")) + code.put_error_if_neg(self.pos, "__Pyx_IternextUnpackEndCheck(%s, %d)" % ( + unpack_code, + len(unpacked_items))) + code.putln("%s = NULL;" % iternext_func) + code.put_decref_clear(iterator_temp, py_object_type) + + unpacking_done_label = code.new_label('unpacking_done') + code.put_goto(unpacking_done_label) + + code.put_label(unpacking_error_label) + code.put_decref_clear(iterator_temp, py_object_type) + code.putln("%s = NULL;" % iternext_func) + code.putln("if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index);") + code.putln(code.error_goto(self.pos)) + code.put_label(unpacking_done_label) + + code.funcstate.release_temp(iternext_func) + if terminate: + code.funcstate.release_temp(iterator_temp) + iterator_temp = None + + return iterator_temp + + def generate_starred_assignment_code(self, rhs, code): + for i, arg in enumerate(self.args): + if arg.is_starred: + starred_target = self.unpacked_items[i] + unpacked_fixed_items_left = self.unpacked_items[:i] + unpacked_fixed_items_right = self.unpacked_items[i+1:] + break + else: + assert False + + iterator_temp = None + if unpacked_fixed_items_left: + for item in unpacked_fixed_items_left: + item.allocate(code) + code.putln('{') + iterator_temp = self.generate_generic_parallel_unpacking_code( + code, rhs, unpacked_fixed_items_left, + use_loop=True, terminate=False) + for i, item in enumerate(unpacked_fixed_items_left): + value_node = self.coerced_unpacked_items[i] + value_node.generate_evaluation_code(code) + code.putln('}') + + starred_target.allocate(code) + target_list = starred_target.result() + code.putln("%s = PySequence_List(%s); %s" % ( + target_list, + iterator_temp or rhs.py_result(), + code.error_goto_if_null(target_list, self.pos))) + code.put_gotref(target_list) + + if iterator_temp: + code.put_decref_clear(iterator_temp, py_object_type) + code.funcstate.release_temp(iterator_temp) + else: + rhs.generate_disposal_code(code) + + if unpacked_fixed_items_right: + code.globalstate.use_utility_code(raise_need_more_values_to_unpack) + length_temp = code.funcstate.allocate_temp(PyrexTypes.c_py_ssize_t_type, manage_ref=False) + code.putln('%s = PyList_GET_SIZE(%s);' % (length_temp, target_list)) + code.putln("if (unlikely(%s < %d)) {" % (length_temp, len(unpacked_fixed_items_right))) + code.putln("__Pyx_RaiseNeedMoreValuesError(%d+%s); %s" % ( + len(unpacked_fixed_items_left), length_temp, + code.error_goto(self.pos))) + code.putln('}') + + for item in unpacked_fixed_items_right[::-1]: + item.allocate(code) + for i, (item, coerced_arg) in enumerate(zip(unpacked_fixed_items_right[::-1], + self.coerced_unpacked_items[::-1])): + code.putln('#if CYTHON_COMPILING_IN_CPYTHON') + code.putln("%s = PyList_GET_ITEM(%s, %s-%d); " % ( + item.py_result(), target_list, length_temp, i+1)) + # resize the list the hard way + code.putln("((PyVarObject*)%s)->ob_size--;" % target_list) + code.putln('#else') + code.putln("%s = PySequence_ITEM(%s, %s-%d); " % ( + item.py_result(), target_list, length_temp, i+1)) + code.putln('#endif') + code.put_gotref(item.py_result()) + coerced_arg.generate_evaluation_code(code) + + code.putln('#if !CYTHON_COMPILING_IN_CPYTHON') + sublist_temp = code.funcstate.allocate_temp(py_object_type, manage_ref=True) + code.putln('%s = PySequence_GetSlice(%s, 0, %s-%d); %s' % ( + sublist_temp, target_list, length_temp, len(unpacked_fixed_items_right), + code.error_goto_if_null(sublist_temp, self.pos))) + code.put_gotref(sublist_temp) + code.funcstate.release_temp(length_temp) + code.put_decref(target_list, py_object_type) + code.putln('%s = %s; %s = NULL;' % (target_list, sublist_temp, sublist_temp)) + code.putln('#else') + code.putln('(void)%s;' % sublist_temp) # avoid warning about unused variable + code.funcstate.release_temp(sublist_temp) + code.putln('#endif') + + for i, arg in enumerate(self.args): + arg.generate_assignment_code(self.coerced_unpacked_items[i], code) + + def annotate(self, code): + for arg in self.args: + arg.annotate(code) + if self.unpacked_items: + for arg in self.unpacked_items: + arg.annotate(code) + for arg in self.coerced_unpacked_items: + arg.annotate(code) + + +class TupleNode(SequenceNode): + # Tuple constructor. + + type = tuple_type + is_partly_literal = False + + gil_message = "Constructing Python tuple" + + def infer_type(self, env): + if self.mult_factor or not self.args: + return tuple_type + arg_types = [arg.infer_type(env) for arg in self.args] + if any(type.is_pyobject or type.is_memoryviewslice or type.is_unspecified or type.is_fused + for type in arg_types): + return tuple_type + return env.declare_tuple_type(self.pos, arg_types).type + + def analyse_types(self, env, skip_children=False): + if len(self.args) == 0: + self.is_temp = False + self.is_literal = True + return self + + if not skip_children: + for i, arg in enumerate(self.args): + if arg.is_starred: + arg.starred_expr_allowed_here = True + self.args[i] = arg.analyse_types(env) + if (not self.mult_factor and + not any((arg.is_starred or arg.type.is_pyobject or arg.type.is_memoryviewslice or arg.type.is_fused) + for arg in self.args)): + self.type = env.declare_tuple_type(self.pos, (arg.type for arg in self.args)).type + self.is_temp = 1 + return self + + node = SequenceNode.analyse_types(self, env, skip_children=True) + node = node._create_merge_node_if_necessary(env) + if not node.is_sequence_constructor: + return node + + if not all(child.is_literal for child in node.args): + return node + if not node.mult_factor or ( + node.mult_factor.is_literal and + isinstance(node.mult_factor.constant_result, _py_int_types)): + node.is_temp = False + node.is_literal = True + else: + if not node.mult_factor.type.is_pyobject: + node.mult_factor = node.mult_factor.coerce_to_pyobject(env) + node.is_temp = True + node.is_partly_literal = True + return node + + def analyse_as_type(self, env): + # ctuple type + if not self.args: + return None + item_types = [arg.analyse_as_type(env) for arg in self.args] + if any(t is None for t in item_types): + return None + entry = env.declare_tuple_type(self.pos, item_types) + return entry.type + + def coerce_to(self, dst_type, env): + if self.type.is_ctuple: + if dst_type.is_ctuple and self.type.size == dst_type.size: + return self.coerce_to_ctuple(dst_type, env) + elif dst_type is tuple_type or dst_type is py_object_type: + coerced_args = [arg.coerce_to_pyobject(env) for arg in self.args] + return TupleNode(self.pos, args=coerced_args, type=tuple_type, is_temp=1).analyse_types(env, skip_children=True) + else: + return self.coerce_to_pyobject(env).coerce_to(dst_type, env) + elif dst_type.is_ctuple and not self.mult_factor: + return self.coerce_to_ctuple(dst_type, env) + else: + return SequenceNode.coerce_to(self, dst_type, env) + + def as_list(self): + t = ListNode(self.pos, args=self.args, mult_factor=self.mult_factor) + if isinstance(self.constant_result, tuple): + t.constant_result = list(self.constant_result) + return t + + def is_simple(self): + # either temp or constant => always simple + return True + + def nonlocally_immutable(self): + # either temp or constant => always safe + return True + + def calculate_result_code(self): + if len(self.args) > 0: + return self.result_code + else: + return Naming.empty_tuple + + def calculate_constant_result(self): + self.constant_result = tuple([ + arg.constant_result for arg in self.args]) + + def compile_time_value(self, denv): + values = self.compile_time_value_list(denv) + try: + return tuple(values) + except Exception as e: + self.compile_time_value_error(e) + + def generate_operation_code(self, code): + if len(self.args) == 0: + # result_code is Naming.empty_tuple + return + + if self.is_literal or self.is_partly_literal: + # The "mult_factor" is part of the deduplication if it is also constant, i.e. when + # we deduplicate the multiplied result. Otherwise, only deduplicate the constant part. + dedup_key = make_dedup_key(self.type, [self.mult_factor if self.is_literal else None] + self.args) + tuple_target = code.get_py_const(py_object_type, 'tuple', cleanup_level=2, dedup_key=dedup_key) + const_code = code.get_cached_constants_writer(tuple_target) + if const_code is not None: + # constant is not yet initialised + const_code.mark_pos(self.pos) + self.generate_sequence_packing_code(const_code, tuple_target, plain=not self.is_literal) + const_code.put_giveref(tuple_target) + if self.is_literal: + self.result_code = tuple_target + else: + code.putln('%s = PyNumber_Multiply(%s, %s); %s' % ( + self.result(), tuple_target, self.mult_factor.py_result(), + code.error_goto_if_null(self.result(), self.pos) + )) + code.put_gotref(self.py_result()) + else: + self.type.entry.used = True + self.generate_sequence_packing_code(code) + + +class ListNode(SequenceNode): + # List constructor. + + # obj_conversion_errors [PyrexError] used internally + # orignial_args [ExprNode] used internally + + obj_conversion_errors = [] + type = list_type + in_module_scope = False + + gil_message = "Constructing Python list" + + def type_dependencies(self, env): + return () + + def infer_type(self, env): + # TODO: Infer non-object list arrays. + return list_type + + def analyse_expressions(self, env): + for arg in self.args: + if arg.is_starred: + arg.starred_expr_allowed_here = True + node = SequenceNode.analyse_expressions(self, env) + return node.coerce_to_pyobject(env) + + def analyse_types(self, env): + with local_errors(ignore=True) as errors: + self.original_args = list(self.args) + node = SequenceNode.analyse_types(self, env) + node.obj_conversion_errors = errors + if env.is_module_scope: + self.in_module_scope = True + node = node._create_merge_node_if_necessary(env) + return node + + def coerce_to(self, dst_type, env): + if dst_type.is_pyobject: + for err in self.obj_conversion_errors: + report_error(err) + self.obj_conversion_errors = [] + if not self.type.subtype_of(dst_type): + error(self.pos, "Cannot coerce list to type '%s'" % dst_type) + elif (dst_type.is_array or dst_type.is_ptr) and dst_type.base_type is not PyrexTypes.c_void_type: + array_length = len(self.args) + if self.mult_factor: + if isinstance(self.mult_factor.constant_result, _py_int_types): + if self.mult_factor.constant_result <= 0: + error(self.pos, "Cannot coerce non-positively multiplied list to '%s'" % dst_type) + else: + array_length *= self.mult_factor.constant_result + else: + error(self.pos, "Cannot coerce dynamically multiplied list to '%s'" % dst_type) + base_type = dst_type.base_type + self.type = PyrexTypes.CArrayType(base_type, array_length) + for i in range(len(self.original_args)): + arg = self.args[i] + if isinstance(arg, CoerceToPyTypeNode): + arg = arg.arg + self.args[i] = arg.coerce_to(base_type, env) + elif dst_type.is_cpp_class: + # TODO(robertwb): Avoid object conversion for vector/list/set. + return TypecastNode(self.pos, operand=self, type=PyrexTypes.py_object_type).coerce_to(dst_type, env) + elif self.mult_factor: + error(self.pos, "Cannot coerce multiplied list to '%s'" % dst_type) + elif dst_type.is_struct: + if len(self.args) > len(dst_type.scope.var_entries): + error(self.pos, "Too many members for '%s'" % dst_type) + else: + if len(self.args) < len(dst_type.scope.var_entries): + warning(self.pos, "Too few members for '%s'" % dst_type, 1) + for i, (arg, member) in enumerate(zip(self.original_args, dst_type.scope.var_entries)): + if isinstance(arg, CoerceToPyTypeNode): + arg = arg.arg + self.args[i] = arg.coerce_to(member.type, env) + self.type = dst_type + elif dst_type.is_ctuple: + return self.coerce_to_ctuple(dst_type, env) + else: + self.type = error_type + error(self.pos, "Cannot coerce list to type '%s'" % dst_type) + return self + + def as_list(self): # dummy for compatibility with TupleNode + return self + + def as_tuple(self): + t = TupleNode(self.pos, args=self.args, mult_factor=self.mult_factor) + if isinstance(self.constant_result, list): + t.constant_result = tuple(self.constant_result) + return t + + def allocate_temp_result(self, code): + if self.type.is_array and self.in_module_scope: + self.temp_code = code.funcstate.allocate_temp( + self.type, manage_ref=False, static=True) + else: + SequenceNode.allocate_temp_result(self, code) + + def release_temp_result(self, env): + if self.type.is_array: + # To be valid C++, we must allocate the memory on the stack + # manually and be sure not to reuse it for something else. + # Yes, this means that we leak a temp array variable. + pass + else: + SequenceNode.release_temp_result(self, env) + + def calculate_constant_result(self): + if self.mult_factor: + raise ValueError() # may exceed the compile time memory + self.constant_result = [ + arg.constant_result for arg in self.args] + + def compile_time_value(self, denv): + l = self.compile_time_value_list(denv) + if self.mult_factor: + l *= self.mult_factor.compile_time_value(denv) + return l + + def generate_operation_code(self, code): + if self.type.is_pyobject: + for err in self.obj_conversion_errors: + report_error(err) + self.generate_sequence_packing_code(code) + elif self.type.is_array: + if self.mult_factor: + code.putln("{") + code.putln("Py_ssize_t %s;" % Naming.quick_temp_cname) + code.putln("for ({i} = 0; {i} < {count}; {i}++) {{".format( + i=Naming.quick_temp_cname, count=self.mult_factor.result())) + offset = '+ (%d * %s)' % (len(self.args), Naming.quick_temp_cname) + else: + offset = '' + for i, arg in enumerate(self.args): + if arg.type.is_array: + code.globalstate.use_utility_code(UtilityCode.load_cached("IncludeStringH", "StringTools.c")) + code.putln("memcpy(&(%s[%s%s]), %s, sizeof(%s[0]));" % ( + self.result(), i, offset, + arg.result(), self.result() + )) + else: + code.putln("%s[%s%s] = %s;" % ( + self.result(), + i, + offset, + arg.result())) + if self.mult_factor: + code.putln("}") + code.putln("}") + elif self.type.is_struct: + for arg, member in zip(self.args, self.type.scope.var_entries): + code.putln("%s.%s = %s;" % ( + self.result(), + member.cname, + arg.result())) + else: + raise InternalError("List type never specified") + + +class ScopedExprNode(ExprNode): + # Abstract base class for ExprNodes that have their own local + # scope, such as generator expressions. + # + # expr_scope Scope the inner scope of the expression + + subexprs = [] + expr_scope = None + + # does this node really have a local scope, e.g. does it leak loop + # variables or not? non-leaking Py3 behaviour is default, except + # for list comprehensions where the behaviour differs in Py2 and + # Py3 (set in Parsing.py based on parser context) + has_local_scope = True + + def init_scope(self, outer_scope, expr_scope=None): + if expr_scope is not None: + self.expr_scope = expr_scope + elif self.has_local_scope: + self.expr_scope = Symtab.GeneratorExpressionScope(outer_scope) + else: + self.expr_scope = None + + def analyse_declarations(self, env): + self.init_scope(env) + + def analyse_scoped_declarations(self, env): + # this is called with the expr_scope as env + pass + + def analyse_types(self, env): + # no recursion here, the children will be analysed separately below + return self + + def analyse_scoped_expressions(self, env): + # this is called with the expr_scope as env + return self + + def generate_evaluation_code(self, code): + # set up local variables and free their references on exit + generate_inner_evaluation_code = super(ScopedExprNode, self).generate_evaluation_code + if not self.has_local_scope or not self.expr_scope.var_entries: + # no local variables => delegate, done + generate_inner_evaluation_code(code) + return + + code.putln('{ /* enter inner scope */') + py_entries = [] + for _, entry in sorted(item for item in self.expr_scope.entries.items() if item[0]): + if not entry.in_closure: + if entry.type.is_pyobject and entry.used: + py_entries.append(entry) + if not py_entries: + # no local Python references => no cleanup required + generate_inner_evaluation_code(code) + code.putln('} /* exit inner scope */') + return + + # must free all local Python references at each exit point + old_loop_labels = code.new_loop_labels() + old_error_label = code.new_error_label() + + generate_inner_evaluation_code(code) + + # normal (non-error) exit + self._generate_vars_cleanup(code, py_entries) + + # error/loop body exit points + exit_scope = code.new_label('exit_scope') + code.put_goto(exit_scope) + for label, old_label in ([(code.error_label, old_error_label)] + + list(zip(code.get_loop_labels(), old_loop_labels))): + if code.label_used(label): + code.put_label(label) + self._generate_vars_cleanup(code, py_entries) + code.put_goto(old_label) + code.put_label(exit_scope) + code.putln('} /* exit inner scope */') + + code.set_loop_labels(old_loop_labels) + code.error_label = old_error_label + + def _generate_vars_cleanup(self, code, py_entries): + for entry in py_entries: + if entry.is_cglobal: + code.put_var_gotref(entry) + code.put_decref_set(entry.cname, "Py_None") + else: + code.put_var_xdecref_clear(entry) + + +class ComprehensionNode(ScopedExprNode): + # A list/set/dict comprehension + + child_attrs = ["loop"] + + is_temp = True + constant_result = not_a_constant + + def infer_type(self, env): + return self.type + + def analyse_declarations(self, env): + self.append.target = self # this is used in the PyList_Append of the inner loop + self.init_scope(env) + + def analyse_scoped_declarations(self, env): + self.loop.analyse_declarations(env) + + def analyse_types(self, env): + if not self.has_local_scope: + self.loop = self.loop.analyse_expressions(env) + return self + + def analyse_scoped_expressions(self, env): + if self.has_local_scope: + self.loop = self.loop.analyse_expressions(env) + return self + + def may_be_none(self): + return False + + def generate_result_code(self, code): + self.generate_operation_code(code) + + def generate_operation_code(self, code): + if self.type is Builtin.list_type: + create_code = 'PyList_New(0)' + elif self.type is Builtin.set_type: + create_code = 'PySet_New(NULL)' + elif self.type is Builtin.dict_type: + create_code = 'PyDict_New()' + else: + raise InternalError("illegal type for comprehension: %s" % self.type) + code.putln('%s = %s; %s' % ( + self.result(), create_code, + code.error_goto_if_null(self.result(), self.pos))) + + code.put_gotref(self.result()) + self.loop.generate_execution_code(code) + + def annotate(self, code): + self.loop.annotate(code) + + +class ComprehensionAppendNode(Node): + # Need to be careful to avoid infinite recursion: + # target must not be in child_attrs/subexprs + + child_attrs = ['expr'] + target = None + + type = PyrexTypes.c_int_type + + def analyse_expressions(self, env): + self.expr = self.expr.analyse_expressions(env) + if not self.expr.type.is_pyobject: + self.expr = self.expr.coerce_to_pyobject(env) + return self + + def generate_execution_code(self, code): + if self.target.type is list_type: + code.globalstate.use_utility_code( + UtilityCode.load_cached("ListCompAppend", "Optimize.c")) + function = "__Pyx_ListComp_Append" + elif self.target.type is set_type: + function = "PySet_Add" + else: + raise InternalError( + "Invalid type for comprehension node: %s" % self.target.type) + + self.expr.generate_evaluation_code(code) + code.putln(code.error_goto_if("%s(%s, (PyObject*)%s)" % ( + function, + self.target.result(), + self.expr.result() + ), self.pos)) + self.expr.generate_disposal_code(code) + self.expr.free_temps(code) + + def generate_function_definitions(self, env, code): + self.expr.generate_function_definitions(env, code) + + def annotate(self, code): + self.expr.annotate(code) + +class DictComprehensionAppendNode(ComprehensionAppendNode): + child_attrs = ['key_expr', 'value_expr'] + + def analyse_expressions(self, env): + self.key_expr = self.key_expr.analyse_expressions(env) + if not self.key_expr.type.is_pyobject: + self.key_expr = self.key_expr.coerce_to_pyobject(env) + self.value_expr = self.value_expr.analyse_expressions(env) + if not self.value_expr.type.is_pyobject: + self.value_expr = self.value_expr.coerce_to_pyobject(env) + return self + + def generate_execution_code(self, code): + self.key_expr.generate_evaluation_code(code) + self.value_expr.generate_evaluation_code(code) + code.putln(code.error_goto_if("PyDict_SetItem(%s, (PyObject*)%s, (PyObject*)%s)" % ( + self.target.result(), + self.key_expr.result(), + self.value_expr.result() + ), self.pos)) + self.key_expr.generate_disposal_code(code) + self.key_expr.free_temps(code) + self.value_expr.generate_disposal_code(code) + self.value_expr.free_temps(code) + + def generate_function_definitions(self, env, code): + self.key_expr.generate_function_definitions(env, code) + self.value_expr.generate_function_definitions(env, code) + + def annotate(self, code): + self.key_expr.annotate(code) + self.value_expr.annotate(code) + + +class InlinedGeneratorExpressionNode(ExprNode): + # An inlined generator expression for which the result is calculated + # inside of the loop and returned as a single, first and only Generator + # return value. + # This will only be created by transforms when replacing safe builtin + # calls on generator expressions. + # + # gen GeneratorExpressionNode the generator, not containing any YieldExprNodes + # orig_func String the name of the builtin function this node replaces + # target ExprNode or None a 'target' for a ComprehensionAppend node + + subexprs = ["gen"] + orig_func = None + target = None + is_temp = True + type = py_object_type + + def __init__(self, pos, gen, comprehension_type=None, **kwargs): + gbody = gen.def_node.gbody + gbody.is_inlined = True + if comprehension_type is not None: + assert comprehension_type in (list_type, set_type, dict_type), comprehension_type + gbody.inlined_comprehension_type = comprehension_type + kwargs.update( + target=RawCNameExprNode(pos, comprehension_type, Naming.retval_cname), + type=comprehension_type, + ) + super(InlinedGeneratorExpressionNode, self).__init__(pos, gen=gen, **kwargs) + + def may_be_none(self): + return self.orig_func not in ('any', 'all', 'sorted') + + def infer_type(self, env): + return self.type + + def analyse_types(self, env): + self.gen = self.gen.analyse_expressions(env) + return self + + def generate_result_code(self, code): + code.putln("%s = __Pyx_Generator_Next(%s); %s" % ( + self.result(), self.gen.result(), + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.result()) + + +class MergedSequenceNode(ExprNode): + """ + Merge a sequence of iterables into a set/list/tuple. + + The target collection is determined by self.type, which must be set externally. + + args [ExprNode] + """ + subexprs = ['args'] + is_temp = True + gil_message = "Constructing Python collection" + + def __init__(self, pos, args, type): + if type in (list_type, tuple_type) and args and args[0].is_sequence_constructor: + # construct a list directly from the first argument that we can then extend + if args[0].type is not list_type: + args[0] = ListNode(args[0].pos, args=args[0].args, is_temp=True) + ExprNode.__init__(self, pos, args=args, type=type) + + def calculate_constant_result(self): + result = [] + for item in self.args: + if item.is_sequence_constructor and item.mult_factor: + if item.mult_factor.constant_result <= 0: + continue + # otherwise, adding each item once should be enough + if item.is_set_literal or item.is_sequence_constructor: + # process items in order + items = (arg.constant_result for arg in item.args) + else: + items = item.constant_result + result.extend(items) + if self.type is set_type: + result = set(result) + elif self.type is tuple_type: + result = tuple(result) + else: + assert self.type is list_type + self.constant_result = result + + def compile_time_value(self, denv): + result = [] + for item in self.args: + if item.is_sequence_constructor and item.mult_factor: + if item.mult_factor.compile_time_value(denv) <= 0: + continue + if item.is_set_literal or item.is_sequence_constructor: + # process items in order + items = (arg.compile_time_value(denv) for arg in item.args) + else: + items = item.compile_time_value(denv) + result.extend(items) + if self.type is set_type: + try: + result = set(result) + except Exception as e: + self.compile_time_value_error(e) + elif self.type is tuple_type: + result = tuple(result) + else: + assert self.type is list_type + return result + + def type_dependencies(self, env): + return () + + def infer_type(self, env): + return self.type + + def analyse_types(self, env): + args = [ + arg.analyse_types(env).coerce_to_pyobject(env).as_none_safe_node( + # FIXME: CPython's error message starts with the runtime function name + 'argument after * must be an iterable, not NoneType') + for arg in self.args + ] + + if len(args) == 1 and args[0].type is self.type: + # strip this intermediate node and use the bare collection + return args[0] + + assert self.type in (set_type, list_type, tuple_type) + + self.args = args + return self + + def may_be_none(self): + return False + + def generate_evaluation_code(self, code): + code.mark_pos(self.pos) + self.allocate_temp_result(code) + + is_set = self.type is set_type + + args = iter(self.args) + item = next(args) + item.generate_evaluation_code(code) + if (is_set and item.is_set_literal or + not is_set and item.is_sequence_constructor and item.type is list_type): + code.putln("%s = %s;" % (self.result(), item.py_result())) + item.generate_post_assignment_code(code) + else: + code.putln("%s = %s(%s); %s" % ( + self.result(), + 'PySet_New' if is_set else 'PySequence_List', + item.py_result(), + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + item.generate_disposal_code(code) + item.free_temps(code) + + helpers = set() + if is_set: + add_func = "PySet_Add" + extend_func = "__Pyx_PySet_Update" + else: + add_func = "__Pyx_ListComp_Append" + extend_func = "__Pyx_PyList_Extend" + + for item in args: + if (is_set and (item.is_set_literal or item.is_sequence_constructor) or + (item.is_sequence_constructor and not item.mult_factor)): + if not is_set and item.args: + helpers.add(("ListCompAppend", "Optimize.c")) + for arg in item.args: + arg.generate_evaluation_code(code) + code.put_error_if_neg(arg.pos, "%s(%s, %s)" % ( + add_func, + self.result(), + arg.py_result())) + arg.generate_disposal_code(code) + arg.free_temps(code) + continue + + if is_set: + helpers.add(("PySet_Update", "Builtins.c")) + else: + helpers.add(("ListExtend", "Optimize.c")) + + item.generate_evaluation_code(code) + code.put_error_if_neg(item.pos, "%s(%s, %s)" % ( + extend_func, + self.result(), + item.py_result())) + item.generate_disposal_code(code) + item.free_temps(code) + + if self.type is tuple_type: + code.putln("{") + code.putln("PyObject *%s = PyList_AsTuple(%s);" % ( + Naming.quick_temp_cname, + self.result())) + code.put_decref(self.result(), py_object_type) + code.putln("%s = %s; %s" % ( + self.result(), + Naming.quick_temp_cname, + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.result()) + code.putln("}") + + for helper in sorted(helpers): + code.globalstate.use_utility_code(UtilityCode.load_cached(*helper)) + + def annotate(self, code): + for item in self.args: + item.annotate(code) + + +class SetNode(ExprNode): + """ + Set constructor. + """ + subexprs = ['args'] + type = set_type + is_set_literal = True + gil_message = "Constructing Python set" + + def analyse_types(self, env): + for i in range(len(self.args)): + arg = self.args[i] + arg = arg.analyse_types(env) + self.args[i] = arg.coerce_to_pyobject(env) + self.type = set_type + self.is_temp = 1 + return self + + def may_be_none(self): + return False + + def calculate_constant_result(self): + self.constant_result = set([arg.constant_result for arg in self.args]) + + def compile_time_value(self, denv): + values = [arg.compile_time_value(denv) for arg in self.args] + try: + return set(values) + except Exception as e: + self.compile_time_value_error(e) + + def generate_evaluation_code(self, code): + for arg in self.args: + arg.generate_evaluation_code(code) + self.allocate_temp_result(code) + code.putln( + "%s = PySet_New(0); %s" % ( + self.result(), + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + for arg in self.args: + code.put_error_if_neg( + self.pos, + "PySet_Add(%s, %s)" % (self.result(), arg.py_result())) + arg.generate_disposal_code(code) + arg.free_temps(code) + + +class DictNode(ExprNode): + # Dictionary constructor. + # + # key_value_pairs [DictItemNode] + # exclude_null_values [boolean] Do not add NULL values to dict + # + # obj_conversion_errors [PyrexError] used internally + + subexprs = ['key_value_pairs'] + is_temp = 1 + exclude_null_values = False + type = dict_type + is_dict_literal = True + reject_duplicates = False + + obj_conversion_errors = [] + + @classmethod + def from_pairs(cls, pos, pairs): + return cls(pos, key_value_pairs=[ + DictItemNode(pos, key=k, value=v) for k, v in pairs]) + + def calculate_constant_result(self): + self.constant_result = dict([ + item.constant_result for item in self.key_value_pairs]) + + def compile_time_value(self, denv): + pairs = [(item.key.compile_time_value(denv), item.value.compile_time_value(denv)) + for item in self.key_value_pairs] + try: + return dict(pairs) + except Exception as e: + self.compile_time_value_error(e) + + def type_dependencies(self, env): + return () + + def infer_type(self, env): + # TODO: Infer struct constructors. + return dict_type + + def analyse_types(self, env): + with local_errors(ignore=True) as errors: + self.key_value_pairs = [ + item.analyse_types(env) + for item in self.key_value_pairs + ] + self.obj_conversion_errors = errors + return self + + def may_be_none(self): + return False + + def coerce_to(self, dst_type, env): + if dst_type.is_pyobject: + self.release_errors() + if self.type.is_struct_or_union: + if not dict_type.subtype_of(dst_type): + error(self.pos, "Cannot interpret struct as non-dict type '%s'" % dst_type) + return DictNode(self.pos, key_value_pairs=[ + DictItemNode(item.pos, key=item.key.coerce_to_pyobject(env), + value=item.value.coerce_to_pyobject(env)) + for item in self.key_value_pairs]) + if not self.type.subtype_of(dst_type): + error(self.pos, "Cannot interpret dict as type '%s'" % dst_type) + elif dst_type.is_struct_or_union: + self.type = dst_type + if not dst_type.is_struct and len(self.key_value_pairs) != 1: + error(self.pos, "Exactly one field must be specified to convert to union '%s'" % dst_type) + elif dst_type.is_struct and len(self.key_value_pairs) < len(dst_type.scope.var_entries): + warning(self.pos, "Not all members given for struct '%s'" % dst_type, 1) + for item in self.key_value_pairs: + if isinstance(item.key, CoerceToPyTypeNode): + item.key = item.key.arg + if not item.key.is_string_literal: + error(item.key.pos, "Invalid struct field identifier") + item.key = StringNode(item.key.pos, value="") + else: + key = str(item.key.value) # converts string literals to unicode in Py3 + member = dst_type.scope.lookup_here(key) + if not member: + error(item.key.pos, "struct '%s' has no field '%s'" % (dst_type, key)) + else: + value = item.value + if isinstance(value, CoerceToPyTypeNode): + value = value.arg + item.value = value.coerce_to(member.type, env) + else: + self.type = error_type + error(self.pos, "Cannot interpret dict as type '%s'" % dst_type) + return self + + def release_errors(self): + for err in self.obj_conversion_errors: + report_error(err) + self.obj_conversion_errors = [] + + gil_message = "Constructing Python dict" + + def generate_evaluation_code(self, code): + # Custom method used here because key-value + # pairs are evaluated and used one at a time. + code.mark_pos(self.pos) + self.allocate_temp_result(code) + + is_dict = self.type.is_pyobject + if is_dict: + self.release_errors() + code.putln( + "%s = __Pyx_PyDict_NewPresized(%d); %s" % ( + self.result(), + len(self.key_value_pairs), + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + + keys_seen = set() + key_type = None + needs_error_helper = False + + for item in self.key_value_pairs: + item.generate_evaluation_code(code) + if is_dict: + if self.exclude_null_values: + code.putln('if (%s) {' % item.value.py_result()) + key = item.key + if self.reject_duplicates: + if keys_seen is not None: + # avoid runtime 'in' checks for literals that we can do at compile time + if not key.is_string_literal: + keys_seen = None + elif key.value in keys_seen: + # FIXME: this could be a compile time error, at least in Cython code + keys_seen = None + elif key_type is not type(key.value): + if key_type is None: + key_type = type(key.value) + keys_seen.add(key.value) + else: + # different types => may not be able to compare at compile time + keys_seen = None + else: + keys_seen.add(key.value) + + if keys_seen is None: + code.putln('if (unlikely(PyDict_Contains(%s, %s))) {' % ( + self.result(), key.py_result())) + # currently only used in function calls + needs_error_helper = True + code.putln('__Pyx_RaiseDoubleKeywordsError("function", %s); %s' % ( + key.py_result(), + code.error_goto(item.pos))) + code.putln("} else {") + + code.put_error_if_neg(self.pos, "PyDict_SetItem(%s, %s, %s)" % ( + self.result(), + item.key.py_result(), + item.value.py_result())) + if self.reject_duplicates and keys_seen is None: + code.putln('}') + if self.exclude_null_values: + code.putln('}') + else: + code.putln("%s.%s = %s;" % ( + self.result(), + item.key.value, + item.value.result())) + item.generate_disposal_code(code) + item.free_temps(code) + + if needs_error_helper: + code.globalstate.use_utility_code( + UtilityCode.load_cached("RaiseDoubleKeywords", "FunctionArguments.c")) + + def annotate(self, code): + for item in self.key_value_pairs: + item.annotate(code) + + +class DictItemNode(ExprNode): + # Represents a single item in a DictNode + # + # key ExprNode + # value ExprNode + subexprs = ['key', 'value'] + + nogil_check = None # Parent DictNode takes care of it + + def calculate_constant_result(self): + self.constant_result = ( + self.key.constant_result, self.value.constant_result) + + def analyse_types(self, env): + self.key = self.key.analyse_types(env) + self.value = self.value.analyse_types(env) + self.key = self.key.coerce_to_pyobject(env) + self.value = self.value.coerce_to_pyobject(env) + return self + + def generate_evaluation_code(self, code): + self.key.generate_evaluation_code(code) + self.value.generate_evaluation_code(code) + + def generate_disposal_code(self, code): + self.key.generate_disposal_code(code) + self.value.generate_disposal_code(code) + + def free_temps(self, code): + self.key.free_temps(code) + self.value.free_temps(code) + + def __iter__(self): + return iter([self.key, self.value]) + + +class SortedDictKeysNode(ExprNode): + # build sorted list of dict keys, e.g. for dir() + subexprs = ['arg'] + + is_temp = True + + def __init__(self, arg): + ExprNode.__init__(self, arg.pos, arg=arg) + self.type = Builtin.list_type + + def analyse_types(self, env): + arg = self.arg.analyse_types(env) + if arg.type is Builtin.dict_type: + arg = arg.as_none_safe_node( + "'NoneType' object is not iterable") + self.arg = arg + return self + + def may_be_none(self): + return False + + def generate_result_code(self, code): + dict_result = self.arg.py_result() + if self.arg.type is Builtin.dict_type: + code.putln('%s = PyDict_Keys(%s); %s' % ( + self.result(), dict_result, + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + else: + # originally used PyMapping_Keys() here, but that may return a tuple + code.globalstate.use_utility_code(UtilityCode.load_cached( + 'PyObjectCallMethod0', 'ObjectHandling.c')) + keys_cname = code.intern_identifier(StringEncoding.EncodedString("keys")) + code.putln('%s = __Pyx_PyObject_CallMethod0(%s, %s); %s' % ( + self.result(), dict_result, keys_cname, + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + code.putln("if (unlikely(!PyList_Check(%s))) {" % self.result()) + code.put_decref_set(self.result(), "PySequence_List(%s)" % self.result()) + code.putln(code.error_goto_if_null(self.result(), self.pos)) + code.put_gotref(self.py_result()) + code.putln("}") + code.put_error_if_neg( + self.pos, 'PyList_Sort(%s)' % self.py_result()) + + +class ModuleNameMixin(object): + def get_py_mod_name(self, code): + return code.get_py_string_const( + self.module_name, identifier=True) + + def get_py_qualified_name(self, code): + return code.get_py_string_const( + self.qualname, identifier=True) + + +class ClassNode(ExprNode, ModuleNameMixin): + # Helper class used in the implementation of Python + # class definitions. Constructs a class object given + # a name, tuple of bases and class dictionary. + # + # name EncodedString Name of the class + # class_def_node PyClassDefNode PyClassDefNode defining this class + # doc ExprNode or None Doc string + # module_name EncodedString Name of defining module + + subexprs = ['doc'] + type = py_object_type + is_temp = True + + def infer_type(self, env): + # TODO: could return 'type' in some cases + return py_object_type + + def analyse_types(self, env): + if self.doc: + self.doc = self.doc.analyse_types(env) + self.doc = self.doc.coerce_to_pyobject(env) + env.use_utility_code(UtilityCode.load_cached("CreateClass", "ObjectHandling.c")) + return self + + def may_be_none(self): + return True + + gil_message = "Constructing Python class" + + def generate_result_code(self, code): + class_def_node = self.class_def_node + cname = code.intern_identifier(self.name) + + if self.doc: + code.put_error_if_neg(self.pos, + 'PyDict_SetItem(%s, %s, %s)' % ( + class_def_node.dict.py_result(), + code.intern_identifier( + StringEncoding.EncodedString("__doc__")), + self.doc.py_result())) + py_mod_name = self.get_py_mod_name(code) + qualname = self.get_py_qualified_name(code) + code.putln( + '%s = __Pyx_CreateClass(%s, %s, %s, %s, %s); %s' % ( + self.result(), + class_def_node.bases.py_result(), + class_def_node.dict.py_result(), + cname, + qualname, + py_mod_name, + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + + +class Py3ClassNode(ExprNode): + # Helper class used in the implementation of Python3+ + # class definitions. Constructs a class object given + # a name, tuple of bases and class dictionary. + # + # name EncodedString Name of the class + # module_name EncodedString Name of defining module + # class_def_node PyClassDefNode PyClassDefNode defining this class + # calculate_metaclass bool should call CalculateMetaclass() + # allow_py2_metaclass bool should look for Py2 metaclass + + subexprs = [] + type = py_object_type + is_temp = True + + def infer_type(self, env): + # TODO: could return 'type' in some cases + return py_object_type + + def analyse_types(self, env): + return self + + def may_be_none(self): + return True + + gil_message = "Constructing Python class" + + def generate_result_code(self, code): + code.globalstate.use_utility_code(UtilityCode.load_cached("Py3ClassCreate", "ObjectHandling.c")) + cname = code.intern_identifier(self.name) + class_def_node = self.class_def_node + mkw = class_def_node.mkw.py_result() if class_def_node.mkw else 'NULL' + if class_def_node.metaclass: + metaclass = class_def_node.metaclass.py_result() + else: + metaclass = "((PyObject*)&__Pyx_DefaultClassType)" + code.putln( + '%s = __Pyx_Py3ClassCreate(%s, %s, %s, %s, %s, %d, %d); %s' % ( + self.result(), + metaclass, + cname, + class_def_node.bases.py_result(), + class_def_node.dict.py_result(), + mkw, + self.calculate_metaclass, + self.allow_py2_metaclass, + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + + +class PyClassMetaclassNode(ExprNode): + # Helper class holds Python3 metaclass object + # + # class_def_node PyClassDefNode PyClassDefNode defining this class + + subexprs = [] + + def analyse_types(self, env): + self.type = py_object_type + self.is_temp = True + return self + + def may_be_none(self): + return True + + def generate_result_code(self, code): + bases = self.class_def_node.bases + mkw = self.class_def_node.mkw + if mkw: + code.globalstate.use_utility_code( + UtilityCode.load_cached("Py3MetaclassGet", "ObjectHandling.c")) + call = "__Pyx_Py3MetaclassGet(%s, %s)" % ( + bases.result(), + mkw.result()) + else: + code.globalstate.use_utility_code( + UtilityCode.load_cached("CalculateMetaclass", "ObjectHandling.c")) + call = "__Pyx_CalculateMetaclass(NULL, %s)" % ( + bases.result()) + code.putln( + "%s = %s; %s" % ( + self.result(), call, + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + + +class PyClassNamespaceNode(ExprNode, ModuleNameMixin): + # Helper class holds Python3 namespace object + # + # All this are not owned by this node + # class_def_node PyClassDefNode PyClassDefNode defining this class + # doc ExprNode or None Doc string (owned) + + subexprs = ['doc'] + + def analyse_types(self, env): + if self.doc: + self.doc = self.doc.analyse_types(env).coerce_to_pyobject(env) + self.type = py_object_type + self.is_temp = 1 + return self + + def may_be_none(self): + return True + + def generate_result_code(self, code): + cname = code.intern_identifier(self.name) + py_mod_name = self.get_py_mod_name(code) + qualname = self.get_py_qualified_name(code) + class_def_node = self.class_def_node + null = "(PyObject *) NULL" + doc_code = self.doc.result() if self.doc else null + mkw = class_def_node.mkw.py_result() if class_def_node.mkw else null + metaclass = class_def_node.metaclass.py_result() if class_def_node.metaclass else null + code.putln( + "%s = __Pyx_Py3MetaclassPrepare(%s, %s, %s, %s, %s, %s, %s); %s" % ( + self.result(), + metaclass, + class_def_node.bases.result(), + cname, + qualname, + mkw, + py_mod_name, + doc_code, + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + + +class ClassCellInjectorNode(ExprNode): + # Initialize CyFunction.func_classobj + is_temp = True + type = py_object_type + subexprs = [] + is_active = False + + def analyse_expressions(self, env): + return self + + def generate_result_code(self, code): + assert self.is_active + code.putln( + '%s = PyList_New(0); %s' % ( + self.result(), + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.result()) + + def generate_injection_code(self, code, classobj_cname): + assert self.is_active + code.globalstate.use_utility_code( + UtilityCode.load_cached("CyFunctionClassCell", "CythonFunction.c")) + code.put_error_if_neg(self.pos, '__Pyx_CyFunction_InitClassCell(%s, %s)' % ( + self.result(), classobj_cname)) + + +class ClassCellNode(ExprNode): + # Class Cell for noargs super() + subexprs = [] + is_temp = True + is_generator = False + type = py_object_type + + def analyse_types(self, env): + return self + + def generate_result_code(self, code): + if not self.is_generator: + code.putln('%s = __Pyx_CyFunction_GetClassObj(%s);' % ( + self.result(), + Naming.self_cname)) + else: + code.putln('%s = %s->classobj;' % ( + self.result(), Naming.generator_cname)) + code.putln( + 'if (!%s) { PyErr_SetString(PyExc_SystemError, ' + '"super(): empty __class__ cell"); %s }' % ( + self.result(), + code.error_goto(self.pos))) + code.put_incref(self.result(), py_object_type) + + +class PyCFunctionNode(ExprNode, ModuleNameMixin): + # Helper class used in the implementation of Python + # functions. Constructs a PyCFunction object + # from a PyMethodDef struct. + # + # pymethdef_cname string PyMethodDef structure + # self_object ExprNode or None + # binding bool + # def_node DefNode the Python function node + # module_name EncodedString Name of defining module + # code_object CodeObjectNode the PyCodeObject creator node + + subexprs = ['code_object', 'defaults_tuple', 'defaults_kwdict', + 'annotations_dict'] + + self_object = None + code_object = None + binding = False + def_node = None + defaults = None + defaults_struct = None + defaults_pyobjects = 0 + defaults_tuple = None + defaults_kwdict = None + annotations_dict = None + + type = py_object_type + is_temp = 1 + + specialized_cpdefs = None + is_specialization = False + + @classmethod + def from_defnode(cls, node, binding): + return cls(node.pos, + def_node=node, + pymethdef_cname=node.entry.pymethdef_cname, + binding=binding or node.specialized_cpdefs, + specialized_cpdefs=node.specialized_cpdefs, + code_object=CodeObjectNode(node)) + + def analyse_types(self, env): + if self.binding: + self.analyse_default_args(env) + return self + + def analyse_default_args(self, env): + """ + Handle non-literal function's default arguments. + """ + nonliteral_objects = [] + nonliteral_other = [] + default_args = [] + default_kwargs = [] + annotations = [] + + # For global cpdef functions and def/cpdef methods in cdef classes, we must use global constants + # for default arguments to avoid the dependency on the CyFunction object as 'self' argument + # in the underlying C function. Basically, cpdef functions/methods are static C functions, + # so their optional arguments must be static, too. + # TODO: change CyFunction implementation to pass both function object and owning object for method calls + must_use_constants = env.is_c_class_scope or (self.def_node.is_wrapper and env.is_module_scope) + + for arg in self.def_node.args: + if arg.default and not must_use_constants: + if not arg.default.is_literal: + arg.is_dynamic = True + if arg.type.is_pyobject: + nonliteral_objects.append(arg) + else: + nonliteral_other.append(arg) + else: + arg.default = DefaultLiteralArgNode(arg.pos, arg.default) + if arg.kw_only: + default_kwargs.append(arg) + else: + default_args.append(arg) + if arg.annotation: + arg.annotation = self.analyse_annotation(env, arg.annotation) + annotations.append((arg.pos, arg.name, arg.annotation)) + + for arg in (self.def_node.star_arg, self.def_node.starstar_arg): + if arg and arg.annotation: + arg.annotation = self.analyse_annotation(env, arg.annotation) + annotations.append((arg.pos, arg.name, arg.annotation)) + + annotation = self.def_node.return_type_annotation + if annotation: + annotation = self.analyse_annotation(env, annotation) + self.def_node.return_type_annotation = annotation + annotations.append((annotation.pos, StringEncoding.EncodedString("return"), annotation)) + + if nonliteral_objects or nonliteral_other: + module_scope = env.global_scope() + cname = module_scope.next_id(Naming.defaults_struct_prefix) + scope = Symtab.StructOrUnionScope(cname) + self.defaults = [] + for arg in nonliteral_objects: + entry = scope.declare_var(arg.name, arg.type, None, + Naming.arg_prefix + arg.name, + allow_pyobject=True) + self.defaults.append((arg, entry)) + for arg in nonliteral_other: + entry = scope.declare_var(arg.name, arg.type, None, + Naming.arg_prefix + arg.name, + allow_pyobject=False, allow_memoryview=True) + self.defaults.append((arg, entry)) + entry = module_scope.declare_struct_or_union( + None, 'struct', scope, 1, None, cname=cname) + self.defaults_struct = scope + self.defaults_pyobjects = len(nonliteral_objects) + for arg, entry in self.defaults: + arg.default_value = '%s->%s' % ( + Naming.dynamic_args_cname, entry.cname) + self.def_node.defaults_struct = self.defaults_struct.name + + if default_args or default_kwargs: + if self.defaults_struct is None: + if default_args: + defaults_tuple = TupleNode(self.pos, args=[ + arg.default for arg in default_args]) + self.defaults_tuple = defaults_tuple.analyse_types(env).coerce_to_pyobject(env) + if default_kwargs: + defaults_kwdict = DictNode(self.pos, key_value_pairs=[ + DictItemNode( + arg.pos, + key=IdentifierStringNode(arg.pos, value=arg.name), + value=arg.default) + for arg in default_kwargs]) + self.defaults_kwdict = defaults_kwdict.analyse_types(env) + else: + if default_args: + defaults_tuple = DefaultsTupleNode( + self.pos, default_args, self.defaults_struct) + else: + defaults_tuple = NoneNode(self.pos) + if default_kwargs: + defaults_kwdict = DefaultsKwDictNode( + self.pos, default_kwargs, self.defaults_struct) + else: + defaults_kwdict = NoneNode(self.pos) + + defaults_getter = Nodes.DefNode( + self.pos, args=[], star_arg=None, starstar_arg=None, + body=Nodes.ReturnStatNode( + self.pos, return_type=py_object_type, + value=TupleNode( + self.pos, args=[defaults_tuple, defaults_kwdict])), + decorators=None, + name=StringEncoding.EncodedString("__defaults__")) + # defaults getter must never live in class scopes, it's always a module function + module_scope = env.global_scope() + defaults_getter.analyse_declarations(module_scope) + defaults_getter = defaults_getter.analyse_expressions(module_scope) + defaults_getter.body = defaults_getter.body.analyse_expressions( + defaults_getter.local_scope) + defaults_getter.py_wrapper_required = False + defaults_getter.pymethdef_required = False + self.def_node.defaults_getter = defaults_getter + if annotations: + annotations_dict = DictNode(self.pos, key_value_pairs=[ + DictItemNode( + pos, key=IdentifierStringNode(pos, value=name), + value=value) + for pos, name, value in annotations]) + self.annotations_dict = annotations_dict.analyse_types(env) + + def analyse_annotation(self, env, annotation): + if annotation is None: + return None + atype = annotation.analyse_as_type(env) + if atype is not None: + # Keep parsed types as strings as they might not be Python representable. + annotation = UnicodeNode( + annotation.pos, + value=StringEncoding.EncodedString(atype.declaration_code('', for_display=True))) + annotation = annotation.analyse_types(env) + if not annotation.type.is_pyobject: + annotation = annotation.coerce_to_pyobject(env) + return annotation + + def may_be_none(self): + return False + + gil_message = "Constructing Python function" + + def self_result_code(self): + if self.self_object is None: + self_result = "NULL" + else: + self_result = self.self_object.py_result() + return self_result + + def generate_result_code(self, code): + if self.binding: + self.generate_cyfunction_code(code) + else: + self.generate_pycfunction_code(code) + + def generate_pycfunction_code(self, code): + py_mod_name = self.get_py_mod_name(code) + code.putln( + '%s = PyCFunction_NewEx(&%s, %s, %s); %s' % ( + self.result(), + self.pymethdef_cname, + self.self_result_code(), + py_mod_name, + code.error_goto_if_null(self.result(), self.pos))) + + code.put_gotref(self.py_result()) + + def generate_cyfunction_code(self, code): + if self.specialized_cpdefs: + def_node = self.specialized_cpdefs[0] + else: + def_node = self.def_node + + if self.specialized_cpdefs or self.is_specialization: + code.globalstate.use_utility_code( + UtilityCode.load_cached("FusedFunction", "CythonFunction.c")) + constructor = "__pyx_FusedFunction_New" + else: + code.globalstate.use_utility_code( + UtilityCode.load_cached("CythonFunction", "CythonFunction.c")) + constructor = "__Pyx_CyFunction_New" + + if self.code_object: + code_object_result = self.code_object.py_result() + else: + code_object_result = 'NULL' + + flags = [] + if def_node.is_staticmethod: + flags.append('__Pyx_CYFUNCTION_STATICMETHOD') + elif def_node.is_classmethod: + flags.append('__Pyx_CYFUNCTION_CLASSMETHOD') + + if def_node.local_scope.parent_scope.is_c_class_scope and not def_node.entry.is_anonymous: + flags.append('__Pyx_CYFUNCTION_CCLASS') + + if flags: + flags = ' | '.join(flags) + else: + flags = '0' + + code.putln( + '%s = %s(&%s, %s, %s, %s, %s, %s, %s); %s' % ( + self.result(), + constructor, + self.pymethdef_cname, + flags, + self.get_py_qualified_name(code), + self.self_result_code(), + self.get_py_mod_name(code), + Naming.moddict_cname, + code_object_result, + code.error_goto_if_null(self.result(), self.pos))) + + code.put_gotref(self.py_result()) + + if def_node.requires_classobj: + assert code.pyclass_stack, "pyclass_stack is empty" + class_node = code.pyclass_stack[-1] + code.put_incref(self.py_result(), py_object_type) + code.putln( + 'PyList_Append(%s, %s);' % ( + class_node.class_cell.result(), + self.result())) + code.put_giveref(self.py_result()) + + if self.defaults: + code.putln( + 'if (!__Pyx_CyFunction_InitDefaults(%s, sizeof(%s), %d)) %s' % ( + self.result(), self.defaults_struct.name, + self.defaults_pyobjects, code.error_goto(self.pos))) + defaults = '__Pyx_CyFunction_Defaults(%s, %s)' % ( + self.defaults_struct.name, self.result()) + for arg, entry in self.defaults: + arg.generate_assignment_code(code, target='%s->%s' % ( + defaults, entry.cname)) + + if self.defaults_tuple: + code.putln('__Pyx_CyFunction_SetDefaultsTuple(%s, %s);' % ( + self.result(), self.defaults_tuple.py_result())) + if self.defaults_kwdict: + code.putln('__Pyx_CyFunction_SetDefaultsKwDict(%s, %s);' % ( + self.result(), self.defaults_kwdict.py_result())) + if def_node.defaults_getter and not self.specialized_cpdefs: + # Fused functions do not support dynamic defaults, only their specialisations can have them for now. + code.putln('__Pyx_CyFunction_SetDefaultsGetter(%s, %s);' % ( + self.result(), def_node.defaults_getter.entry.pyfunc_cname)) + if self.annotations_dict: + code.putln('__Pyx_CyFunction_SetAnnotationsDict(%s, %s);' % ( + self.result(), self.annotations_dict.py_result())) + + +class InnerFunctionNode(PyCFunctionNode): + # Special PyCFunctionNode that depends on a closure class + # + + binding = True + needs_self_code = True + + def self_result_code(self): + if self.needs_self_code: + return "((PyObject*)%s)" % Naming.cur_scope_cname + return "NULL" + + +class CodeObjectNode(ExprNode): + # Create a PyCodeObject for a CyFunction instance. + # + # def_node DefNode the Python function node + # varnames TupleNode a tuple with all local variable names + + subexprs = ['varnames'] + is_temp = False + result_code = None + + def __init__(self, def_node): + ExprNode.__init__(self, def_node.pos, def_node=def_node) + args = list(def_node.args) + # if we have args/kwargs, then the first two in var_entries are those + local_vars = [arg for arg in def_node.local_scope.var_entries if arg.name] + self.varnames = TupleNode( + def_node.pos, + args=[IdentifierStringNode(arg.pos, value=arg.name) + for arg in args + local_vars], + is_temp=0, + is_literal=1) + + def may_be_none(self): + return False + + def calculate_result_code(self, code=None): + if self.result_code is None: + self.result_code = code.get_py_const(py_object_type, 'codeobj', cleanup_level=2) + return self.result_code + + def generate_result_code(self, code): + if self.result_code is None: + self.result_code = code.get_py_const(py_object_type, 'codeobj', cleanup_level=2) + + code = code.get_cached_constants_writer(self.result_code) + if code is None: + return # already initialised + code.mark_pos(self.pos) + func = self.def_node + func_name = code.get_py_string_const( + func.name, identifier=True, is_str=False, unicode_value=func.name) + # FIXME: better way to get the module file path at module init time? Encoding to use? + file_path = StringEncoding.bytes_literal(func.pos[0].get_filenametable_entry().encode('utf8'), 'utf8') + file_path_const = code.get_py_string_const(file_path, identifier=False, is_str=True) + + # This combination makes CPython create a new dict for "frame.f_locals" (see GH #1836). + flags = ['CO_OPTIMIZED', 'CO_NEWLOCALS'] + + if self.def_node.star_arg: + flags.append('CO_VARARGS') + if self.def_node.starstar_arg: + flags.append('CO_VARKEYWORDS') + + code.putln("%s = (PyObject*)__Pyx_PyCode_New(%d, %d, %d, 0, %s, %s, %s, %s, %s, %s, %s, %s, %s, %d, %s); %s" % ( + self.result_code, + len(func.args) - func.num_kwonly_args, # argcount + func.num_kwonly_args, # kwonlyargcount (Py3 only) + len(self.varnames.args), # nlocals + '|'.join(flags) or '0', # flags + Naming.empty_bytes, # code + Naming.empty_tuple, # consts + Naming.empty_tuple, # names (FIXME) + self.varnames.result(), # varnames + Naming.empty_tuple, # freevars (FIXME) + Naming.empty_tuple, # cellvars (FIXME) + file_path_const, # filename + func_name, # name + self.pos[1], # firstlineno + Naming.empty_bytes, # lnotab + code.error_goto_if_null(self.result_code, self.pos), + )) + + +class DefaultLiteralArgNode(ExprNode): + # CyFunction's literal argument default value + # + # Evaluate literal only once. + + subexprs = [] + is_literal = True + is_temp = False + + def __init__(self, pos, arg): + super(DefaultLiteralArgNode, self).__init__(pos) + self.arg = arg + self.type = self.arg.type + self.evaluated = False + + def analyse_types(self, env): + return self + + def generate_result_code(self, code): + pass + + def generate_evaluation_code(self, code): + if not self.evaluated: + self.arg.generate_evaluation_code(code) + self.evaluated = True + + def result(self): + return self.type.cast_code(self.arg.result()) + + +class DefaultNonLiteralArgNode(ExprNode): + # CyFunction's non-literal argument default value + + subexprs = [] + + def __init__(self, pos, arg, defaults_struct): + super(DefaultNonLiteralArgNode, self).__init__(pos) + self.arg = arg + self.defaults_struct = defaults_struct + + def analyse_types(self, env): + self.type = self.arg.type + self.is_temp = False + return self + + def generate_result_code(self, code): + pass + + def result(self): + return '__Pyx_CyFunction_Defaults(%s, %s)->%s' % ( + self.defaults_struct.name, Naming.self_cname, + self.defaults_struct.lookup(self.arg.name).cname) + + +class DefaultsTupleNode(TupleNode): + # CyFunction's __defaults__ tuple + + def __init__(self, pos, defaults, defaults_struct): + args = [] + for arg in defaults: + if not arg.default.is_literal: + arg = DefaultNonLiteralArgNode(pos, arg, defaults_struct) + else: + arg = arg.default + args.append(arg) + super(DefaultsTupleNode, self).__init__(pos, args=args) + + def analyse_types(self, env, skip_children=False): + return super(DefaultsTupleNode, self).analyse_types(env, skip_children).coerce_to_pyobject(env) + + +class DefaultsKwDictNode(DictNode): + # CyFunction's __kwdefaults__ dict + + def __init__(self, pos, defaults, defaults_struct): + items = [] + for arg in defaults: + name = IdentifierStringNode(arg.pos, value=arg.name) + if not arg.default.is_literal: + arg = DefaultNonLiteralArgNode(pos, arg, defaults_struct) + else: + arg = arg.default + items.append(DictItemNode(arg.pos, key=name, value=arg)) + super(DefaultsKwDictNode, self).__init__(pos, key_value_pairs=items) + + +class LambdaNode(InnerFunctionNode): + # Lambda expression node (only used as a function reference) + # + # args [CArgDeclNode] formal arguments + # star_arg PyArgDeclNode or None * argument + # starstar_arg PyArgDeclNode or None ** argument + # lambda_name string a module-globally unique lambda name + # result_expr ExprNode + # def_node DefNode the underlying function 'def' node + + child_attrs = ['def_node'] + + name = StringEncoding.EncodedString('') + + def analyse_declarations(self, env): + self.lambda_name = self.def_node.lambda_name = env.next_id('lambda') + self.def_node.no_assignment_synthesis = True + self.def_node.pymethdef_required = True + self.def_node.analyse_declarations(env) + self.def_node.is_cyfunction = True + self.pymethdef_cname = self.def_node.entry.pymethdef_cname + env.add_lambda_def(self.def_node) + + def analyse_types(self, env): + self.def_node = self.def_node.analyse_expressions(env) + return super(LambdaNode, self).analyse_types(env) + + def generate_result_code(self, code): + self.def_node.generate_execution_code(code) + super(LambdaNode, self).generate_result_code(code) + + +class GeneratorExpressionNode(LambdaNode): + # A generator expression, e.g. (i for i in range(10)) + # + # Result is a generator. + # + # loop ForStatNode the for-loop, containing a YieldExprNode + # def_node DefNode the underlying generator 'def' node + + name = StringEncoding.EncodedString('genexpr') + binding = False + + def analyse_declarations(self, env): + self.genexpr_name = env.next_id('genexpr') + super(GeneratorExpressionNode, self).analyse_declarations(env) + # No pymethdef required + self.def_node.pymethdef_required = False + self.def_node.py_wrapper_required = False + self.def_node.is_cyfunction = False + # Force genexpr signature + self.def_node.entry.signature = TypeSlots.pyfunction_noargs + + def generate_result_code(self, code): + code.putln( + '%s = %s(%s); %s' % ( + self.result(), + self.def_node.entry.pyfunc_cname, + self.self_result_code(), + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + + +class YieldExprNode(ExprNode): + # Yield expression node + # + # arg ExprNode the value to return from the generator + # label_num integer yield label number + # is_yield_from boolean is a YieldFromExprNode to delegate to another generator + + subexprs = ['arg'] + type = py_object_type + label_num = 0 + is_yield_from = False + is_await = False + in_async_gen = False + expr_keyword = 'yield' + + def analyse_types(self, env): + if not self.label_num or (self.is_yield_from and self.in_async_gen): + error(self.pos, "'%s' not supported here" % self.expr_keyword) + self.is_temp = 1 + if self.arg is not None: + self.arg = self.arg.analyse_types(env) + if not self.arg.type.is_pyobject: + self.coerce_yield_argument(env) + return self + + def coerce_yield_argument(self, env): + self.arg = self.arg.coerce_to_pyobject(env) + + def generate_evaluation_code(self, code): + if self.arg: + self.arg.generate_evaluation_code(code) + self.arg.make_owned_reference(code) + code.putln( + "%s = %s;" % ( + Naming.retval_cname, + self.arg.result_as(py_object_type))) + self.arg.generate_post_assignment_code(code) + self.arg.free_temps(code) + else: + code.put_init_to_py_none(Naming.retval_cname, py_object_type) + self.generate_yield_code(code) + + def generate_yield_code(self, code): + """ + Generate the code to return the argument in 'Naming.retval_cname' + and to continue at the yield label. + """ + label_num, label_name = code.new_yield_label( + self.expr_keyword.replace(' ', '_')) + code.use_label(label_name) + + saved = [] + code.funcstate.closure_temps.reset() + for cname, type, manage_ref in code.funcstate.temps_in_use(): + save_cname = code.funcstate.closure_temps.allocate_temp(type) + saved.append((cname, save_cname, type)) + if type.is_pyobject: + code.put_xgiveref(cname) + code.putln('%s->%s = %s;' % (Naming.cur_scope_cname, save_cname, cname)) + + code.put_xgiveref(Naming.retval_cname) + profile = code.globalstate.directives['profile'] + linetrace = code.globalstate.directives['linetrace'] + if profile or linetrace: + code.put_trace_return(Naming.retval_cname, + nogil=not code.funcstate.gil_owned) + code.put_finish_refcount_context() + + if code.funcstate.current_except is not None: + # inside of an except block => save away currently handled exception + code.putln("__Pyx_Coroutine_SwapException(%s);" % Naming.generator_cname) + else: + # no exceptions being handled => restore exception state of caller + code.putln("__Pyx_Coroutine_ResetAndClearException(%s);" % Naming.generator_cname) + + code.putln("/* return from %sgenerator, %sing value */" % ( + 'async ' if self.in_async_gen else '', + 'await' if self.is_await else 'yield')) + code.putln("%s->resume_label = %d;" % ( + Naming.generator_cname, label_num)) + if self.in_async_gen and not self.is_await: + # __Pyx__PyAsyncGenValueWrapperNew() steals a reference to the return value + code.putln("return __Pyx__PyAsyncGenValueWrapperNew(%s);" % Naming.retval_cname) + else: + code.putln("return %s;" % Naming.retval_cname) + + code.put_label(label_name) + for cname, save_cname, type in saved: + code.putln('%s = %s->%s;' % (cname, Naming.cur_scope_cname, save_cname)) + if type.is_pyobject: + code.putln('%s->%s = 0;' % (Naming.cur_scope_cname, save_cname)) + code.put_xgotref(cname) + self.generate_sent_value_handling_code(code, Naming.sent_value_cname) + if self.result_is_used: + self.allocate_temp_result(code) + code.put('%s = %s; ' % (self.result(), Naming.sent_value_cname)) + code.put_incref(self.result(), py_object_type) + + def generate_sent_value_handling_code(self, code, value_cname): + code.putln(code.error_goto_if_null(value_cname, self.pos)) + + +class _YieldDelegationExprNode(YieldExprNode): + def yield_from_func(self, code): + raise NotImplementedError() + + def generate_evaluation_code(self, code, source_cname=None, decref_source=False): + if source_cname is None: + self.arg.generate_evaluation_code(code) + code.putln("%s = %s(%s, %s);" % ( + Naming.retval_cname, + self.yield_from_func(code), + Naming.generator_cname, + self.arg.py_result() if source_cname is None else source_cname)) + if source_cname is None: + self.arg.generate_disposal_code(code) + self.arg.free_temps(code) + elif decref_source: + code.put_decref_clear(source_cname, py_object_type) + code.put_xgotref(Naming.retval_cname) + + code.putln("if (likely(%s)) {" % Naming.retval_cname) + self.generate_yield_code(code) + code.putln("} else {") + # either error or sub-generator has normally terminated: return value => node result + if self.result_is_used: + self.fetch_iteration_result(code) + else: + self.handle_iteration_exception(code) + code.putln("}") + + def fetch_iteration_result(self, code): + # YieldExprNode has allocated the result temp for us + code.putln("%s = NULL;" % self.result()) + code.put_error_if_neg(self.pos, "__Pyx_PyGen_FetchStopIterationValue(&%s)" % self.result()) + code.put_gotref(self.result()) + + def handle_iteration_exception(self, code): + code.putln("PyObject* exc_type = __Pyx_PyErr_Occurred();") + code.putln("if (exc_type) {") + code.putln("if (likely(exc_type == PyExc_StopIteration || (exc_type != PyExc_GeneratorExit &&" + " __Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)))) PyErr_Clear();") + code.putln("else %s" % code.error_goto(self.pos)) + code.putln("}") + + +class YieldFromExprNode(_YieldDelegationExprNode): + # "yield from GEN" expression + is_yield_from = True + expr_keyword = 'yield from' + + def coerce_yield_argument(self, env): + if not self.arg.type.is_string: + # FIXME: support C arrays and C++ iterators? + error(self.pos, "yielding from non-Python object not supported") + self.arg = self.arg.coerce_to_pyobject(env) + + def yield_from_func(self, code): + code.globalstate.use_utility_code(UtilityCode.load_cached("GeneratorYieldFrom", "Coroutine.c")) + return "__Pyx_Generator_Yield_From" + + +class AwaitExprNode(_YieldDelegationExprNode): + # 'await' expression node + # + # arg ExprNode the Awaitable value to await + # label_num integer yield label number + + is_await = True + expr_keyword = 'await' + + def coerce_yield_argument(self, env): + if self.arg is not None: + # FIXME: use same check as in YieldFromExprNode.coerce_yield_argument() ? + self.arg = self.arg.coerce_to_pyobject(env) + + def yield_from_func(self, code): + code.globalstate.use_utility_code(UtilityCode.load_cached("CoroutineYieldFrom", "Coroutine.c")) + return "__Pyx_Coroutine_Yield_From" + + +class AwaitIterNextExprNode(AwaitExprNode): + # 'await' expression node as part of 'async for' iteration + # + # Breaks out of loop on StopAsyncIteration exception. + + def _generate_break(self, code): + code.globalstate.use_utility_code(UtilityCode.load_cached("StopAsyncIteration", "Coroutine.c")) + code.putln("PyObject* exc_type = __Pyx_PyErr_Occurred();") + code.putln("if (unlikely(exc_type && (exc_type == __Pyx_PyExc_StopAsyncIteration || (" + " exc_type != PyExc_StopIteration && exc_type != PyExc_GeneratorExit &&" + " __Pyx_PyErr_GivenExceptionMatches(exc_type, __Pyx_PyExc_StopAsyncIteration))))) {") + code.putln("PyErr_Clear();") + code.putln("break;") + code.putln("}") + + def fetch_iteration_result(self, code): + assert code.break_label, "AwaitIterNextExprNode outside of 'async for' loop" + self._generate_break(code) + super(AwaitIterNextExprNode, self).fetch_iteration_result(code) + + def generate_sent_value_handling_code(self, code, value_cname): + assert code.break_label, "AwaitIterNextExprNode outside of 'async for' loop" + code.putln("if (unlikely(!%s)) {" % value_cname) + self._generate_break(code) + # all non-break exceptions are errors, as in parent class + code.putln(code.error_goto(self.pos)) + code.putln("}") + + +class GlobalsExprNode(AtomicExprNode): + type = dict_type + is_temp = 1 + + def analyse_types(self, env): + env.use_utility_code(Builtin.globals_utility_code) + return self + + gil_message = "Constructing globals dict" + + def may_be_none(self): + return False + + def generate_result_code(self, code): + code.putln('%s = __Pyx_Globals(); %s' % ( + self.result(), + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.result()) + + +class LocalsDictItemNode(DictItemNode): + def analyse_types(self, env): + self.key = self.key.analyse_types(env) + self.value = self.value.analyse_types(env) + self.key = self.key.coerce_to_pyobject(env) + if self.value.type.can_coerce_to_pyobject(env): + self.value = self.value.coerce_to_pyobject(env) + else: + self.value = None + return self + + +class FuncLocalsExprNode(DictNode): + def __init__(self, pos, env): + local_vars = sorted([ + entry.name for entry in env.entries.values() if entry.name]) + items = [LocalsDictItemNode( + pos, key=IdentifierStringNode(pos, value=var), + value=NameNode(pos, name=var, allow_null=True)) + for var in local_vars] + DictNode.__init__(self, pos, key_value_pairs=items, + exclude_null_values=True) + + def analyse_types(self, env): + node = super(FuncLocalsExprNode, self).analyse_types(env) + node.key_value_pairs = [ i for i in node.key_value_pairs + if i.value is not None ] + return node + + +class PyClassLocalsExprNode(AtomicExprNode): + def __init__(self, pos, pyclass_dict): + AtomicExprNode.__init__(self, pos) + self.pyclass_dict = pyclass_dict + + def analyse_types(self, env): + self.type = self.pyclass_dict.type + self.is_temp = False + return self + + def may_be_none(self): + return False + + def result(self): + return self.pyclass_dict.result() + + def generate_result_code(self, code): + pass + + +def LocalsExprNode(pos, scope_node, env): + if env.is_module_scope: + return GlobalsExprNode(pos) + if env.is_py_class_scope: + return PyClassLocalsExprNode(pos, scope_node.dict) + return FuncLocalsExprNode(pos, env) + + +#------------------------------------------------------------------- +# +# Unary operator nodes +# +#------------------------------------------------------------------- + +compile_time_unary_operators = { + 'not': operator.not_, + '~': operator.inv, + '-': operator.neg, + '+': operator.pos, +} + +class UnopNode(ExprNode): + # operator string + # operand ExprNode + # + # Processing during analyse_expressions phase: + # + # analyse_c_operation + # Called when the operand is not a pyobject. + # - Check operand type and coerce if needed. + # - Determine result type and result code fragment. + # - Allocate temporary for result if needed. + + subexprs = ['operand'] + infix = True + + def calculate_constant_result(self): + func = compile_time_unary_operators[self.operator] + self.constant_result = func(self.operand.constant_result) + + def compile_time_value(self, denv): + func = compile_time_unary_operators.get(self.operator) + if not func: + error(self.pos, + "Unary '%s' not supported in compile-time expression" + % self.operator) + operand = self.operand.compile_time_value(denv) + try: + return func(operand) + except Exception as e: + self.compile_time_value_error(e) + + def infer_type(self, env): + operand_type = self.operand.infer_type(env) + if operand_type.is_cpp_class or operand_type.is_ptr: + cpp_type = operand_type.find_cpp_operation_type(self.operator) + if cpp_type is not None: + return cpp_type + return self.infer_unop_type(env, operand_type) + + def infer_unop_type(self, env, operand_type): + if operand_type.is_pyobject: + return py_object_type + else: + return operand_type + + def may_be_none(self): + if self.operand.type and self.operand.type.is_builtin_type: + if self.operand.type is not type_type: + return False + return ExprNode.may_be_none(self) + + def analyse_types(self, env): + self.operand = self.operand.analyse_types(env) + if self.is_pythran_operation(env): + self.type = PythranExpr(pythran_unaryop_type(self.operator, self.operand.type)) + self.is_temp = 1 + elif self.is_py_operation(): + self.coerce_operand_to_pyobject(env) + self.type = py_object_type + self.is_temp = 1 + elif self.is_cpp_operation(): + self.analyse_cpp_operation(env) + else: + self.analyse_c_operation(env) + return self + + def check_const(self): + return self.operand.check_const() + + def is_py_operation(self): + return self.operand.type.is_pyobject or self.operand.type.is_ctuple + + def is_pythran_operation(self, env): + np_pythran = has_np_pythran(env) + op_type = self.operand.type + return np_pythran and (op_type.is_buffer or op_type.is_pythran_expr) + + def nogil_check(self, env): + if self.is_py_operation(): + self.gil_error() + + def is_cpp_operation(self): + type = self.operand.type + return type.is_cpp_class + + def coerce_operand_to_pyobject(self, env): + self.operand = self.operand.coerce_to_pyobject(env) + + def generate_result_code(self, code): + if self.type.is_pythran_expr: + code.putln("// Pythran unaryop") + code.putln("__Pyx_call_destructor(%s);" % self.result()) + code.putln("new (&%s) decltype(%s){%s%s};" % ( + self.result(), + self.result(), + self.operator, + self.operand.pythran_result())) + elif self.operand.type.is_pyobject: + self.generate_py_operation_code(code) + elif self.is_temp: + if self.is_cpp_operation() and self.exception_check == '+': + translate_cpp_exception(code, self.pos, + "%s = %s %s;" % (self.result(), self.operator, self.operand.result()), + self.result() if self.type.is_pyobject else None, + self.exception_value, self.in_nogil_context) + else: + code.putln("%s = %s %s;" % (self.result(), self.operator, self.operand.result())) + + def generate_py_operation_code(self, code): + function = self.py_operation_function(code) + code.putln( + "%s = %s(%s); %s" % ( + self.result(), + function, + self.operand.py_result(), + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + + def type_error(self): + if not self.operand.type.is_error: + error(self.pos, "Invalid operand type for '%s' (%s)" % + (self.operator, self.operand.type)) + self.type = PyrexTypes.error_type + + def analyse_cpp_operation(self, env, overload_check=True): + entry = env.lookup_operator(self.operator, [self.operand]) + if overload_check and not entry: + self.type_error() + return + if entry: + self.exception_check = entry.type.exception_check + self.exception_value = entry.type.exception_value + if self.exception_check == '+': + self.is_temp = True + if self.exception_value is None: + env.use_utility_code(UtilityCode.load_cached("CppExceptionConversion", "CppSupport.cpp")) + else: + self.exception_check = '' + self.exception_value = '' + cpp_type = self.operand.type.find_cpp_operation_type(self.operator) + if overload_check and cpp_type is None: + error(self.pos, "'%s' operator not defined for %s" % ( + self.operator, type)) + self.type_error() + return + self.type = cpp_type + + +class NotNode(UnopNode): + # 'not' operator + # + # operand ExprNode + operator = '!' + + type = PyrexTypes.c_bint_type + + def calculate_constant_result(self): + self.constant_result = not self.operand.constant_result + + def compile_time_value(self, denv): + operand = self.operand.compile_time_value(denv) + try: + return not operand + except Exception as e: + self.compile_time_value_error(e) + + def infer_unop_type(self, env, operand_type): + return PyrexTypes.c_bint_type + + def analyse_types(self, env): + self.operand = self.operand.analyse_types(env) + operand_type = self.operand.type + if operand_type.is_cpp_class: + self.analyse_cpp_operation(env) + else: + self.operand = self.operand.coerce_to_boolean(env) + return self + + def calculate_result_code(self): + return "(!%s)" % self.operand.result() + + +class UnaryPlusNode(UnopNode): + # unary '+' operator + + operator = '+' + + def analyse_c_operation(self, env): + self.type = PyrexTypes.widest_numeric_type( + self.operand.type, PyrexTypes.c_int_type) + + def py_operation_function(self, code): + return "PyNumber_Positive" + + def calculate_result_code(self): + if self.is_cpp_operation(): + return "(+%s)" % self.operand.result() + else: + return self.operand.result() + + +class UnaryMinusNode(UnopNode): + # unary '-' operator + + operator = '-' + + def analyse_c_operation(self, env): + if self.operand.type.is_numeric: + self.type = PyrexTypes.widest_numeric_type( + self.operand.type, PyrexTypes.c_int_type) + elif self.operand.type.is_enum: + self.type = PyrexTypes.c_int_type + else: + self.type_error() + if self.type.is_complex: + self.infix = False + + def py_operation_function(self, code): + return "PyNumber_Negative" + + def calculate_result_code(self): + if self.infix: + return "(-%s)" % self.operand.result() + else: + return "%s(%s)" % (self.operand.type.unary_op('-'), self.operand.result()) + + def get_constant_c_result_code(self): + value = self.operand.get_constant_c_result_code() + if value: + return "(-%s)" % value + +class TildeNode(UnopNode): + # unary '~' operator + + def analyse_c_operation(self, env): + if self.operand.type.is_int: + self.type = PyrexTypes.widest_numeric_type( + self.operand.type, PyrexTypes.c_int_type) + elif self.operand.type.is_enum: + self.type = PyrexTypes.c_int_type + else: + self.type_error() + + def py_operation_function(self, code): + return "PyNumber_Invert" + + def calculate_result_code(self): + return "(~%s)" % self.operand.result() + + +class CUnopNode(UnopNode): + + def is_py_operation(self): + return False + +class DereferenceNode(CUnopNode): + # unary * operator + + operator = '*' + + def infer_unop_type(self, env, operand_type): + if operand_type.is_ptr: + return operand_type.base_type + else: + return PyrexTypes.error_type + + def analyse_c_operation(self, env): + if self.operand.type.is_ptr: + self.type = self.operand.type.base_type + else: + self.type_error() + + def calculate_result_code(self): + return "(*%s)" % self.operand.result() + + +class DecrementIncrementNode(CUnopNode): + # unary ++/-- operator + + def analyse_c_operation(self, env): + if self.operand.type.is_numeric: + self.type = PyrexTypes.widest_numeric_type( + self.operand.type, PyrexTypes.c_int_type) + elif self.operand.type.is_ptr: + self.type = self.operand.type + else: + self.type_error() + + def calculate_result_code(self): + if self.is_prefix: + return "(%s%s)" % (self.operator, self.operand.result()) + else: + return "(%s%s)" % (self.operand.result(), self.operator) + +def inc_dec_constructor(is_prefix, operator): + return lambda pos, **kwds: DecrementIncrementNode(pos, is_prefix=is_prefix, operator=operator, **kwds) + + +class AmpersandNode(CUnopNode): + # The C address-of operator. + # + # operand ExprNode + operator = '&' + + def infer_unop_type(self, env, operand_type): + return PyrexTypes.c_ptr_type(operand_type) + + def analyse_types(self, env): + self.operand = self.operand.analyse_types(env) + argtype = self.operand.type + if argtype.is_cpp_class: + self.analyse_cpp_operation(env, overload_check=False) + if not (argtype.is_cfunction or argtype.is_reference or self.operand.is_addressable()): + if argtype.is_memoryviewslice: + self.error("Cannot take address of memoryview slice") + else: + self.error("Taking address of non-lvalue (type %s)" % argtype) + return self + if argtype.is_pyobject: + self.error("Cannot take address of Python %s" % ( + "variable '%s'" % self.operand.name if self.operand.is_name else + "object attribute '%s'" % self.operand.attribute if self.operand.is_attribute else + "object")) + return self + if not argtype.is_cpp_class or not self.type: + self.type = PyrexTypes.c_ptr_type(argtype) + return self + + def check_const(self): + return self.operand.check_const_addr() + + def error(self, mess): + error(self.pos, mess) + self.type = PyrexTypes.error_type + self.result_code = "" + + def calculate_result_code(self): + return "(&%s)" % self.operand.result() + + def generate_result_code(self, code): + if (self.operand.type.is_cpp_class and self.exception_check == '+'): + translate_cpp_exception(code, self.pos, + "%s = %s %s;" % (self.result(), self.operator, self.operand.result()), + self.result() if self.type.is_pyobject else None, + self.exception_value, self.in_nogil_context) + + +unop_node_classes = { + "+": UnaryPlusNode, + "-": UnaryMinusNode, + "~": TildeNode, +} + +def unop_node(pos, operator, operand): + # Construct unnop node of appropriate class for + # given operator. + if isinstance(operand, IntNode) and operator == '-': + return IntNode(pos = operand.pos, value = str(-Utils.str_to_number(operand.value)), + longness=operand.longness, unsigned=operand.unsigned) + elif isinstance(operand, UnopNode) and operand.operator == operator in '+-': + warning(pos, "Python has no increment/decrement operator: %s%sx == %s(%sx) == x" % ((operator,)*4), 5) + return unop_node_classes[operator](pos, + operator = operator, + operand = operand) + + +class TypecastNode(ExprNode): + # C type cast + # + # operand ExprNode + # base_type CBaseTypeNode + # declarator CDeclaratorNode + # typecheck boolean + # + # If used from a transform, one can if wanted specify the attribute + # "type" directly and leave base_type and declarator to None + + subexprs = ['operand'] + base_type = declarator = type = None + + def type_dependencies(self, env): + return () + + def infer_type(self, env): + if self.type is None: + base_type = self.base_type.analyse(env) + _, self.type = self.declarator.analyse(base_type, env) + return self.type + + def analyse_types(self, env): + if self.type is None: + base_type = self.base_type.analyse(env) + _, self.type = self.declarator.analyse(base_type, env) + if self.operand.has_constant_result(): + # Must be done after self.type is resolved. + self.calculate_constant_result() + if self.type.is_cfunction: + error(self.pos, + "Cannot cast to a function type") + self.type = PyrexTypes.error_type + self.operand = self.operand.analyse_types(env) + if self.type is PyrexTypes.c_bint_type: + # short circuit this to a coercion + return self.operand.coerce_to_boolean(env) + to_py = self.type.is_pyobject + from_py = self.operand.type.is_pyobject + if from_py and not to_py and self.operand.is_ephemeral(): + if not self.type.is_numeric and not self.type.is_cpp_class: + error(self.pos, "Casting temporary Python object to non-numeric non-Python type") + if to_py and not from_py: + if self.type is bytes_type and self.operand.type.is_int: + return CoerceIntToBytesNode(self.operand, env) + elif self.operand.type.can_coerce_to_pyobject(env): + self.result_ctype = py_object_type + self.operand = self.operand.coerce_to(self.type, env) + else: + if self.operand.type.is_ptr: + if not (self.operand.type.base_type.is_void or self.operand.type.base_type.is_struct): + error(self.pos, "Python objects cannot be cast from pointers of primitive types") + else: + # Should this be an error? + warning(self.pos, "No conversion from %s to %s, python object pointer used." % ( + self.operand.type, self.type)) + self.operand = self.operand.coerce_to_simple(env) + elif from_py and not to_py: + if self.type.create_from_py_utility_code(env): + self.operand = self.operand.coerce_to(self.type, env) + elif self.type.is_ptr: + if not (self.type.base_type.is_void or self.type.base_type.is_struct): + error(self.pos, "Python objects cannot be cast to pointers of primitive types") + else: + warning(self.pos, "No conversion from %s to %s, python object pointer used." % ( + self.type, self.operand.type)) + elif from_py and to_py: + if self.typecheck: + self.operand = PyTypeTestNode(self.operand, self.type, env, notnone=True) + elif isinstance(self.operand, SliceIndexNode): + # This cast can influence the created type of string slices. + self.operand = self.operand.coerce_to(self.type, env) + elif self.type.is_complex and self.operand.type.is_complex: + self.operand = self.operand.coerce_to_simple(env) + elif self.operand.type.is_fused: + self.operand = self.operand.coerce_to(self.type, env) + #self.type = self.operand.type + if self.type.is_ptr and self.type.base_type.is_cfunction and self.type.base_type.nogil: + op_type = self.operand.type + if op_type.is_ptr: + op_type = op_type.base_type + if op_type.is_cfunction and not op_type.nogil: + warning(self.pos, + "Casting a GIL-requiring function into a nogil function circumvents GIL validation", 1) + return self + + def is_simple(self): + # either temp or a C cast => no side effects other than the operand's + return self.operand.is_simple() + + def is_ephemeral(self): + # either temp or a C cast => no side effects other than the operand's + return self.operand.is_ephemeral() + + def nonlocally_immutable(self): + return self.is_temp or self.operand.nonlocally_immutable() + + def nogil_check(self, env): + if self.type and self.type.is_pyobject and self.is_temp: + self.gil_error() + + def check_const(self): + return self.operand.check_const() + + def calculate_constant_result(self): + self.constant_result = self.calculate_result_code(self.operand.constant_result) + + def calculate_result_code(self, operand_result = None): + if operand_result is None: + operand_result = self.operand.result() + if self.type.is_complex: + operand_result = self.operand.result() + if self.operand.type.is_complex: + real_part = self.type.real_type.cast_code("__Pyx_CREAL(%s)" % operand_result) + imag_part = self.type.real_type.cast_code("__Pyx_CIMAG(%s)" % operand_result) + else: + real_part = self.type.real_type.cast_code(operand_result) + imag_part = "0" + return "%s(%s, %s)" % ( + self.type.from_parts, + real_part, + imag_part) + else: + return self.type.cast_code(operand_result) + + def get_constant_c_result_code(self): + operand_result = self.operand.get_constant_c_result_code() + if operand_result: + return self.type.cast_code(operand_result) + + def result_as(self, type): + if self.type.is_pyobject and not self.is_temp: + # Optimise away some unnecessary casting + return self.operand.result_as(type) + else: + return ExprNode.result_as(self, type) + + def generate_result_code(self, code): + if self.is_temp: + code.putln( + "%s = (PyObject *)%s;" % ( + self.result(), + self.operand.result())) + code.put_incref(self.result(), self.ctype()) + + +ERR_START = "Start may not be given" +ERR_NOT_STOP = "Stop must be provided to indicate shape" +ERR_STEPS = ("Strides may only be given to indicate contiguity. " + "Consider slicing it after conversion") +ERR_NOT_POINTER = "Can only create cython.array from pointer or array" +ERR_BASE_TYPE = "Pointer base type does not match cython.array base type" + + +class CythonArrayNode(ExprNode): + """ + Used when a pointer of base_type is cast to a memoryviewslice with that + base type. i.e. + + p + + creates a fortran-contiguous cython.array. + + We leave the type set to object so coercions to object are more efficient + and less work. Acquiring a memoryviewslice from this will be just as + efficient. ExprNode.coerce_to() will do the additional typecheck on + self.compile_time_type + + This also handles my_c_array + + + operand ExprNode the thing we're casting + base_type_node MemoryViewSliceTypeNode the cast expression node + """ + + subexprs = ['operand', 'shapes'] + + shapes = None + is_temp = True + mode = "c" + array_dtype = None + + shape_type = PyrexTypes.c_py_ssize_t_type + + def analyse_types(self, env): + from . import MemoryView + + self.operand = self.operand.analyse_types(env) + if self.array_dtype: + array_dtype = self.array_dtype + else: + array_dtype = self.base_type_node.base_type_node.analyse(env) + axes = self.base_type_node.axes + + self.type = error_type + self.shapes = [] + ndim = len(axes) + + # Base type of the pointer or C array we are converting + base_type = self.operand.type + + if not self.operand.type.is_ptr and not self.operand.type.is_array: + error(self.operand.pos, ERR_NOT_POINTER) + return self + + # Dimension sizes of C array + array_dimension_sizes = [] + if base_type.is_array: + while base_type.is_array: + array_dimension_sizes.append(base_type.size) + base_type = base_type.base_type + elif base_type.is_ptr: + base_type = base_type.base_type + else: + error(self.pos, "unexpected base type %s found" % base_type) + return self + + if not (base_type.same_as(array_dtype) or base_type.is_void): + error(self.operand.pos, ERR_BASE_TYPE) + return self + elif self.operand.type.is_array and len(array_dimension_sizes) != ndim: + error(self.operand.pos, + "Expected %d dimensions, array has %d dimensions" % + (ndim, len(array_dimension_sizes))) + return self + + # Verify the start, stop and step values + # In case of a C array, use the size of C array in each dimension to + # get an automatic cast + for axis_no, axis in enumerate(axes): + if not axis.start.is_none: + error(axis.start.pos, ERR_START) + return self + + if axis.stop.is_none: + if array_dimension_sizes: + dimsize = array_dimension_sizes[axis_no] + axis.stop = IntNode(self.pos, value=str(dimsize), + constant_result=dimsize, + type=PyrexTypes.c_int_type) + else: + error(axis.pos, ERR_NOT_STOP) + return self + + axis.stop = axis.stop.analyse_types(env) + shape = axis.stop.coerce_to(self.shape_type, env) + if not shape.is_literal: + shape.coerce_to_temp(env) + + self.shapes.append(shape) + + first_or_last = axis_no in (0, ndim - 1) + if not axis.step.is_none and first_or_last: + # '1' in the first or last dimension denotes F or C contiguity + axis.step = axis.step.analyse_types(env) + if (not axis.step.type.is_int and axis.step.is_literal and not + axis.step.type.is_error): + error(axis.step.pos, "Expected an integer literal") + return self + + if axis.step.compile_time_value(env) != 1: + error(axis.step.pos, ERR_STEPS) + return self + + if axis_no == 0: + self.mode = "fortran" + + elif not axis.step.is_none and not first_or_last: + # step provided in some other dimension + error(axis.step.pos, ERR_STEPS) + return self + + if not self.operand.is_name: + self.operand = self.operand.coerce_to_temp(env) + + axes = [('direct', 'follow')] * len(axes) + if self.mode == "fortran": + axes[0] = ('direct', 'contig') + else: + axes[-1] = ('direct', 'contig') + + self.coercion_type = PyrexTypes.MemoryViewSliceType(array_dtype, axes) + self.coercion_type.validate_memslice_dtype(self.pos) + self.type = self.get_cython_array_type(env) + MemoryView.use_cython_array_utility_code(env) + env.use_utility_code(MemoryView.typeinfo_to_format_code) + return self + + def allocate_temp_result(self, code): + if self.temp_code: + raise RuntimeError("temp allocated multiple times") + + self.temp_code = code.funcstate.allocate_temp(self.type, True) + + def infer_type(self, env): + return self.get_cython_array_type(env) + + def get_cython_array_type(self, env): + cython_scope = env.global_scope().context.cython_scope + cython_scope.load_cythonscope() + return cython_scope.viewscope.lookup("array").type + + def generate_result_code(self, code): + from . import Buffer + + shapes = [self.shape_type.cast_code(shape.result()) + for shape in self.shapes] + dtype = self.coercion_type.dtype + + shapes_temp = code.funcstate.allocate_temp(py_object_type, True) + format_temp = code.funcstate.allocate_temp(py_object_type, True) + + itemsize = "sizeof(%s)" % dtype.empty_declaration_code() + type_info = Buffer.get_type_information_cname(code, dtype) + + if self.operand.type.is_ptr: + code.putln("if (!%s) {" % self.operand.result()) + code.putln( 'PyErr_SetString(PyExc_ValueError,' + '"Cannot create cython.array from NULL pointer");') + code.putln(code.error_goto(self.operand.pos)) + code.putln("}") + + code.putln("%s = __pyx_format_from_typeinfo(&%s); %s" % ( + format_temp, + type_info, + code.error_goto_if_null(format_temp, self.pos), + )) + code.put_gotref(format_temp) + + buildvalue_fmt = " __PYX_BUILD_PY_SSIZE_T " * len(shapes) + code.putln('%s = Py_BuildValue((char*) "(" %s ")", %s); %s' % ( + shapes_temp, + buildvalue_fmt, + ", ".join(shapes), + code.error_goto_if_null(shapes_temp, self.pos), + )) + code.put_gotref(shapes_temp) + + tup = (self.result(), shapes_temp, itemsize, format_temp, + self.mode, self.operand.result()) + code.putln('%s = __pyx_array_new(' + '%s, %s, PyBytes_AS_STRING(%s), ' + '(char *) "%s", (char *) %s);' % tup) + code.putln(code.error_goto_if_null(self.result(), self.pos)) + code.put_gotref(self.result()) + + def dispose(temp): + code.put_decref_clear(temp, py_object_type) + code.funcstate.release_temp(temp) + + dispose(shapes_temp) + dispose(format_temp) + + @classmethod + def from_carray(cls, src_node, env): + """ + Given a C array type, return a CythonArrayNode + """ + pos = src_node.pos + base_type = src_node.type + + none_node = NoneNode(pos) + axes = [] + + while base_type.is_array: + axes.append(SliceNode(pos, start=none_node, stop=none_node, + step=none_node)) + base_type = base_type.base_type + axes[-1].step = IntNode(pos, value="1", is_c_literal=True) + + memslicenode = Nodes.MemoryViewSliceTypeNode(pos, axes=axes, + base_type_node=base_type) + result = CythonArrayNode(pos, base_type_node=memslicenode, + operand=src_node, array_dtype=base_type) + result = result.analyse_types(env) + return result + +class SizeofNode(ExprNode): + # Abstract base class for sizeof(x) expression nodes. + + type = PyrexTypes.c_size_t_type + + def check_const(self): + return True + + def generate_result_code(self, code): + pass + + +class SizeofTypeNode(SizeofNode): + # C sizeof function applied to a type + # + # base_type CBaseTypeNode + # declarator CDeclaratorNode + + subexprs = [] + arg_type = None + + def analyse_types(self, env): + # we may have incorrectly interpreted a dotted name as a type rather than an attribute + # this could be better handled by more uniformly treating types as runtime-available objects + if 0 and self.base_type.module_path: + path = self.base_type.module_path + obj = env.lookup(path[0]) + if obj.as_module is None: + operand = NameNode(pos=self.pos, name=path[0]) + for attr in path[1:]: + operand = AttributeNode(pos=self.pos, obj=operand, attribute=attr) + operand = AttributeNode(pos=self.pos, obj=operand, attribute=self.base_type.name) + node = SizeofVarNode(self.pos, operand=operand).analyse_types(env) + return node + if self.arg_type is None: + base_type = self.base_type.analyse(env) + _, arg_type = self.declarator.analyse(base_type, env) + self.arg_type = arg_type + self.check_type() + return self + + def check_type(self): + arg_type = self.arg_type + if not arg_type: + return + if arg_type.is_pyobject and not arg_type.is_extension_type: + error(self.pos, "Cannot take sizeof Python object") + elif arg_type.is_void: + error(self.pos, "Cannot take sizeof void") + elif not arg_type.is_complete(): + error(self.pos, "Cannot take sizeof incomplete type '%s'" % arg_type) + + def calculate_result_code(self): + if self.arg_type.is_extension_type: + # the size of the pointer is boring + # we want the size of the actual struct + arg_code = self.arg_type.declaration_code("", deref=1) + else: + arg_code = self.arg_type.empty_declaration_code() + return "(sizeof(%s))" % arg_code + + +class SizeofVarNode(SizeofNode): + # C sizeof function applied to a variable + # + # operand ExprNode + + subexprs = ['operand'] + + def analyse_types(self, env): + # We may actually be looking at a type rather than a variable... + # If we are, traditional analysis would fail... + operand_as_type = self.operand.analyse_as_type(env) + if operand_as_type: + self.arg_type = operand_as_type + if self.arg_type.is_fused: + self.arg_type = self.arg_type.specialize(env.fused_to_specific) + self.__class__ = SizeofTypeNode + self.check_type() + else: + self.operand = self.operand.analyse_types(env) + return self + + def calculate_result_code(self): + return "(sizeof(%s))" % self.operand.result() + + def generate_result_code(self, code): + pass + + +class TypeidNode(ExprNode): + # C++ typeid operator applied to a type or variable + # + # operand ExprNode + # arg_type ExprNode + # is_variable boolean + + type = PyrexTypes.error_type + + subexprs = ['operand'] + + arg_type = None + is_variable = None + is_temp = 1 + + def get_type_info_type(self, env): + env_module = env + while not env_module.is_module_scope: + env_module = env_module.outer_scope + typeinfo_module = env_module.find_module('libcpp.typeinfo', self.pos) + typeinfo_entry = typeinfo_module.lookup('type_info') + return PyrexTypes.CFakeReferenceType(PyrexTypes.c_const_type(typeinfo_entry.type)) + + def analyse_types(self, env): + type_info = self.get_type_info_type(env) + if not type_info: + self.error("The 'libcpp.typeinfo' module must be cimported to use the typeid() operator") + return self + self.type = type_info + as_type = self.operand.analyse_as_type(env) + if as_type: + self.arg_type = as_type + self.is_type = True + else: + self.arg_type = self.operand.analyse_types(env) + self.is_type = False + if self.arg_type.type.is_pyobject: + self.error("Cannot use typeid on a Python object") + return self + elif self.arg_type.type.is_void: + self.error("Cannot use typeid on void") + return self + elif not self.arg_type.type.is_complete(): + self.error("Cannot use typeid on incomplete type '%s'" % self.arg_type.type) + return self + env.use_utility_code(UtilityCode.load_cached("CppExceptionConversion", "CppSupport.cpp")) + return self + + def error(self, mess): + error(self.pos, mess) + self.type = PyrexTypes.error_type + self.result_code = "" + + def check_const(self): + return True + + def calculate_result_code(self): + return self.temp_code + + def generate_result_code(self, code): + if self.is_type: + arg_code = self.arg_type.empty_declaration_code() + else: + arg_code = self.arg_type.result() + translate_cpp_exception(code, self.pos, + "%s = typeid(%s);" % (self.temp_code, arg_code), + None, None, self.in_nogil_context) + +class TypeofNode(ExprNode): + # Compile-time type of an expression, as a string. + # + # operand ExprNode + # literal StringNode # internal + + literal = None + type = py_object_type + + subexprs = ['literal'] # 'operand' will be ignored after type analysis! + + def analyse_types(self, env): + self.operand = self.operand.analyse_types(env) + value = StringEncoding.EncodedString(str(self.operand.type)) #self.operand.type.typeof_name()) + literal = StringNode(self.pos, value=value) + literal = literal.analyse_types(env) + self.literal = literal.coerce_to_pyobject(env) + return self + + def analyse_as_type(self, env): + self.operand = self.operand.analyse_types(env) + return self.operand.type + + def may_be_none(self): + return False + + def generate_evaluation_code(self, code): + self.literal.generate_evaluation_code(code) + + def calculate_result_code(self): + return self.literal.calculate_result_code() + +#------------------------------------------------------------------- +# +# Binary operator nodes +# +#------------------------------------------------------------------- + +try: + matmul_operator = operator.matmul +except AttributeError: + def matmul_operator(a, b): + try: + func = a.__matmul__ + except AttributeError: + func = b.__rmatmul__ + return func(a, b) + +compile_time_binary_operators = { + '<': operator.lt, + '<=': operator.le, + '==': operator.eq, + '!=': operator.ne, + '>=': operator.ge, + '>': operator.gt, + 'is': operator.is_, + 'is_not': operator.is_not, + '+': operator.add, + '&': operator.and_, + '/': operator.truediv, + '//': operator.floordiv, + '<<': operator.lshift, + '%': operator.mod, + '*': operator.mul, + '|': operator.or_, + '**': operator.pow, + '>>': operator.rshift, + '-': operator.sub, + '^': operator.xor, + '@': matmul_operator, + 'in': lambda x, seq: x in seq, + 'not_in': lambda x, seq: x not in seq, +} + +def get_compile_time_binop(node): + func = compile_time_binary_operators.get(node.operator) + if not func: + error(node.pos, + "Binary '%s' not supported in compile-time expression" + % node.operator) + return func + + +class BinopNode(ExprNode): + # operator string + # operand1 ExprNode + # operand2 ExprNode + # + # Processing during analyse_expressions phase: + # + # analyse_c_operation + # Called when neither operand is a pyobject. + # - Check operand types and coerce if needed. + # - Determine result type and result code fragment. + # - Allocate temporary for result if needed. + + subexprs = ['operand1', 'operand2'] + inplace = False + + def calculate_constant_result(self): + func = compile_time_binary_operators[self.operator] + self.constant_result = func( + self.operand1.constant_result, + self.operand2.constant_result) + + def compile_time_value(self, denv): + func = get_compile_time_binop(self) + operand1 = self.operand1.compile_time_value(denv) + operand2 = self.operand2.compile_time_value(denv) + try: + return func(operand1, operand2) + except Exception as e: + self.compile_time_value_error(e) + + def infer_type(self, env): + return self.result_type(self.operand1.infer_type(env), + self.operand2.infer_type(env), env) + + def analyse_types(self, env): + self.operand1 = self.operand1.analyse_types(env) + self.operand2 = self.operand2.analyse_types(env) + self.analyse_operation(env) + return self + + def analyse_operation(self, env): + if self.is_pythran_operation(env): + self.type = self.result_type(self.operand1.type, + self.operand2.type, env) + assert self.type.is_pythran_expr + self.is_temp = 1 + elif self.is_py_operation(): + self.coerce_operands_to_pyobjects(env) + self.type = self.result_type(self.operand1.type, + self.operand2.type, env) + assert self.type.is_pyobject + self.is_temp = 1 + elif self.is_cpp_operation(): + self.analyse_cpp_operation(env) + else: + self.analyse_c_operation(env) + + def is_py_operation(self): + return self.is_py_operation_types(self.operand1.type, self.operand2.type) + + def is_py_operation_types(self, type1, type2): + return type1.is_pyobject or type2.is_pyobject or type1.is_ctuple or type2.is_ctuple + + def is_pythran_operation(self, env): + return self.is_pythran_operation_types(self.operand1.type, self.operand2.type, env) + + def is_pythran_operation_types(self, type1, type2, env): + # Support only expr op supported_type, or supported_type op expr + return has_np_pythran(env) and \ + (is_pythran_supported_operation_type(type1) and is_pythran_supported_operation_type(type2)) and \ + (is_pythran_expr(type1) or is_pythran_expr(type2)) + + def is_cpp_operation(self): + return (self.operand1.type.is_cpp_class + or self.operand2.type.is_cpp_class) + + def analyse_cpp_operation(self, env): + entry = env.lookup_operator(self.operator, [self.operand1, self.operand2]) + if not entry: + self.type_error() + return + func_type = entry.type + self.exception_check = func_type.exception_check + self.exception_value = func_type.exception_value + if self.exception_check == '+': + # Used by NumBinopNodes to break up expressions involving multiple + # operators so that exceptions can be handled properly. + self.is_temp = 1 + if self.exception_value is None: + env.use_utility_code(UtilityCode.load_cached("CppExceptionConversion", "CppSupport.cpp")) + if func_type.is_ptr: + func_type = func_type.base_type + if len(func_type.args) == 1: + self.operand2 = self.operand2.coerce_to(func_type.args[0].type, env) + else: + self.operand1 = self.operand1.coerce_to(func_type.args[0].type, env) + self.operand2 = self.operand2.coerce_to(func_type.args[1].type, env) + self.type = func_type.return_type + + def result_type(self, type1, type2, env): + if self.is_pythran_operation_types(type1, type2, env): + return PythranExpr(pythran_binop_type(self.operator, type1, type2)) + if self.is_py_operation_types(type1, type2): + if type2.is_string: + type2 = Builtin.bytes_type + elif type2.is_pyunicode_ptr: + type2 = Builtin.unicode_type + if type1.is_string: + type1 = Builtin.bytes_type + elif type1.is_pyunicode_ptr: + type1 = Builtin.unicode_type + if type1.is_builtin_type or type2.is_builtin_type: + if type1 is type2 and self.operator in '**%+|&^': + # FIXME: at least these operators should be safe - others? + return type1 + result_type = self.infer_builtin_types_operation(type1, type2) + if result_type is not None: + return result_type + return py_object_type + elif type1.is_error or type2.is_error: + return PyrexTypes.error_type + else: + return self.compute_c_result_type(type1, type2) + + def infer_builtin_types_operation(self, type1, type2): + return None + + def nogil_check(self, env): + if self.is_py_operation(): + self.gil_error() + + def coerce_operands_to_pyobjects(self, env): + self.operand1 = self.operand1.coerce_to_pyobject(env) + self.operand2 = self.operand2.coerce_to_pyobject(env) + + def check_const(self): + return self.operand1.check_const() and self.operand2.check_const() + + def is_ephemeral(self): + return (super(BinopNode, self).is_ephemeral() or + self.operand1.is_ephemeral() or self.operand2.is_ephemeral()) + + def generate_result_code(self, code): + if self.type.is_pythran_expr: + code.putln("// Pythran binop") + code.putln("__Pyx_call_destructor(%s);" % self.result()) + if self.operator == '**': + code.putln("new (&%s) decltype(%s){pythonic::numpy::functor::power{}(%s, %s)};" % ( + self.result(), + self.result(), + self.operand1.pythran_result(), + self.operand2.pythran_result())) + else: + code.putln("new (&%s) decltype(%s){%s %s %s};" % ( + self.result(), + self.result(), + self.operand1.pythran_result(), + self.operator, + self.operand2.pythran_result())) + elif self.operand1.type.is_pyobject: + function = self.py_operation_function(code) + if self.operator == '**': + extra_args = ", Py_None" + else: + extra_args = "" + code.putln( + "%s = %s(%s, %s%s); %s" % ( + self.result(), + function, + self.operand1.py_result(), + self.operand2.py_result(), + extra_args, + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.py_result()) + elif self.is_temp: + # C++ overloaded operators with exception values are currently all + # handled through temporaries. + if self.is_cpp_operation() and self.exception_check == '+': + translate_cpp_exception(code, self.pos, + "%s = %s;" % (self.result(), self.calculate_result_code()), + self.result() if self.type.is_pyobject else None, + self.exception_value, self.in_nogil_context) + else: + code.putln("%s = %s;" % (self.result(), self.calculate_result_code())) + + def type_error(self): + if not (self.operand1.type.is_error + or self.operand2.type.is_error): + error(self.pos, "Invalid operand types for '%s' (%s; %s)" % + (self.operator, self.operand1.type, + self.operand2.type)) + self.type = PyrexTypes.error_type + + +class CBinopNode(BinopNode): + + def analyse_types(self, env): + node = BinopNode.analyse_types(self, env) + if node.is_py_operation(): + node.type = PyrexTypes.error_type + return node + + def py_operation_function(self, code): + return "" + + def calculate_result_code(self): + return "(%s %s %s)" % ( + self.operand1.result(), + self.operator, + self.operand2.result()) + + def compute_c_result_type(self, type1, type2): + cpp_type = None + if type1.is_cpp_class or type1.is_ptr: + cpp_type = type1.find_cpp_operation_type(self.operator, type2) + if cpp_type is None and (type2.is_cpp_class or type2.is_ptr): + cpp_type = type2.find_cpp_operation_type(self.operator, type1) + # FIXME: do we need to handle other cases here? + return cpp_type + + +def c_binop_constructor(operator): + def make_binop_node(pos, **operands): + return CBinopNode(pos, operator=operator, **operands) + return make_binop_node + +class NumBinopNode(BinopNode): + # Binary operation taking numeric arguments. + + infix = True + overflow_check = False + overflow_bit_node = None + + def analyse_c_operation(self, env): + type1 = self.operand1.type + type2 = self.operand2.type + self.type = self.compute_c_result_type(type1, type2) + if not self.type: + self.type_error() + return + if self.type.is_complex: + self.infix = False + if (self.type.is_int + and env.directives['overflowcheck'] + and self.operator in self.overflow_op_names): + if (self.operator in ('+', '*') + and self.operand1.has_constant_result() + and not self.operand2.has_constant_result()): + self.operand1, self.operand2 = self.operand2, self.operand1 + self.overflow_check = True + self.overflow_fold = env.directives['overflowcheck.fold'] + self.func = self.type.overflow_check_binop( + self.overflow_op_names[self.operator], + env, + const_rhs = self.operand2.has_constant_result()) + self.is_temp = True + if not self.infix or (type1.is_numeric and type2.is_numeric): + self.operand1 = self.operand1.coerce_to(self.type, env) + self.operand2 = self.operand2.coerce_to(self.type, env) + + def compute_c_result_type(self, type1, type2): + if self.c_types_okay(type1, type2): + widest_type = PyrexTypes.widest_numeric_type(type1, type2) + if widest_type is PyrexTypes.c_bint_type: + if self.operator not in '|^&': + # False + False == 0 # not False! + widest_type = PyrexTypes.c_int_type + else: + widest_type = PyrexTypes.widest_numeric_type( + widest_type, PyrexTypes.c_int_type) + return widest_type + else: + return None + + def may_be_none(self): + if self.type and self.type.is_builtin_type: + # if we know the result type, we know the operation, so it can't be None + return False + type1 = self.operand1.type + type2 = self.operand2.type + if type1 and type1.is_builtin_type and type2 and type2.is_builtin_type: + # XXX: I can't think of any case where a binary operation + # on builtin types evaluates to None - add a special case + # here if there is one. + return False + return super(NumBinopNode, self).may_be_none() + + def get_constant_c_result_code(self): + value1 = self.operand1.get_constant_c_result_code() + value2 = self.operand2.get_constant_c_result_code() + if value1 and value2: + return "(%s %s %s)" % (value1, self.operator, value2) + else: + return None + + def c_types_okay(self, type1, type2): + #print "NumBinopNode.c_types_okay:", type1, type2 ### + return (type1.is_numeric or type1.is_enum) \ + and (type2.is_numeric or type2.is_enum) + + def generate_evaluation_code(self, code): + if self.overflow_check: + self.overflow_bit_node = self + self.overflow_bit = code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False) + code.putln("%s = 0;" % self.overflow_bit) + super(NumBinopNode, self).generate_evaluation_code(code) + if self.overflow_check: + code.putln("if (unlikely(%s)) {" % self.overflow_bit) + code.putln('PyErr_SetString(PyExc_OverflowError, "value too large");') + code.putln(code.error_goto(self.pos)) + code.putln("}") + code.funcstate.release_temp(self.overflow_bit) + + def calculate_result_code(self): + if self.overflow_bit_node is not None: + return "%s(%s, %s, &%s)" % ( + self.func, + self.operand1.result(), + self.operand2.result(), + self.overflow_bit_node.overflow_bit) + elif self.type.is_cpp_class or self.infix: + if is_pythran_expr(self.type): + result1, result2 = self.operand1.pythran_result(), self.operand2.pythran_result() + else: + result1, result2 = self.operand1.result(), self.operand2.result() + return "(%s %s %s)" % (result1, self.operator, result2) + else: + func = self.type.binary_op(self.operator) + if func is None: + error(self.pos, "binary operator %s not supported for %s" % (self.operator, self.type)) + return "%s(%s, %s)" % ( + func, + self.operand1.result(), + self.operand2.result()) + + def is_py_operation_types(self, type1, type2): + return (type1.is_unicode_char or + type2.is_unicode_char or + BinopNode.is_py_operation_types(self, type1, type2)) + + def py_operation_function(self, code): + function_name = self.py_functions[self.operator] + if self.inplace: + function_name = function_name.replace('PyNumber_', 'PyNumber_InPlace') + return function_name + + py_functions = { + "|": "PyNumber_Or", + "^": "PyNumber_Xor", + "&": "PyNumber_And", + "<<": "PyNumber_Lshift", + ">>": "PyNumber_Rshift", + "+": "PyNumber_Add", + "-": "PyNumber_Subtract", + "*": "PyNumber_Multiply", + "@": "__Pyx_PyNumber_MatrixMultiply", + "/": "__Pyx_PyNumber_Divide", + "//": "PyNumber_FloorDivide", + "%": "PyNumber_Remainder", + "**": "PyNumber_Power", + } + + overflow_op_names = { + "+": "add", + "-": "sub", + "*": "mul", + "<<": "lshift", + } + + +class IntBinopNode(NumBinopNode): + # Binary operation taking integer arguments. + + def c_types_okay(self, type1, type2): + #print "IntBinopNode.c_types_okay:", type1, type2 ### + return (type1.is_int or type1.is_enum) \ + and (type2.is_int or type2.is_enum) + + +class AddNode(NumBinopNode): + # '+' operator. + + def is_py_operation_types(self, type1, type2): + if type1.is_string and type2.is_string or type1.is_pyunicode_ptr and type2.is_pyunicode_ptr: + return 1 + else: + return NumBinopNode.is_py_operation_types(self, type1, type2) + + def infer_builtin_types_operation(self, type1, type2): + # b'abc' + 'abc' raises an exception in Py3, + # so we can safely infer the Py2 type for bytes here + string_types = (bytes_type, bytearray_type, str_type, basestring_type, unicode_type) + if type1 in string_types and type2 in string_types: + return string_types[max(string_types.index(type1), + string_types.index(type2))] + return None + + def compute_c_result_type(self, type1, type2): + #print "AddNode.compute_c_result_type:", type1, self.operator, type2 ### + if (type1.is_ptr or type1.is_array) and (type2.is_int or type2.is_enum): + return type1 + elif (type2.is_ptr or type2.is_array) and (type1.is_int or type1.is_enum): + return type2 + else: + return NumBinopNode.compute_c_result_type( + self, type1, type2) + + def py_operation_function(self, code): + type1, type2 = self.operand1.type, self.operand2.type + + if type1 is unicode_type or type2 is unicode_type: + if type1 in (unicode_type, str_type) and type2 in (unicode_type, str_type): + is_unicode_concat = True + elif isinstance(self.operand1, FormattedValueNode) or isinstance(self.operand2, FormattedValueNode): + # Assume that even if we don't know the second type, it's going to be a string. + is_unicode_concat = True + else: + # Operation depends on the second type. + is_unicode_concat = False + + if is_unicode_concat: + if self.operand1.may_be_none() or self.operand2.may_be_none(): + return '__Pyx_PyUnicode_ConcatSafe' + else: + return '__Pyx_PyUnicode_Concat' + + return super(AddNode, self).py_operation_function(code) + + +class SubNode(NumBinopNode): + # '-' operator. + + def compute_c_result_type(self, type1, type2): + if (type1.is_ptr or type1.is_array) and (type2.is_int or type2.is_enum): + return type1 + elif (type1.is_ptr or type1.is_array) and (type2.is_ptr or type2.is_array): + return PyrexTypes.c_ptrdiff_t_type + else: + return NumBinopNode.compute_c_result_type( + self, type1, type2) + + +class MulNode(NumBinopNode): + # '*' operator. + + def is_py_operation_types(self, type1, type2): + if ((type1.is_string and type2.is_int) or + (type2.is_string and type1.is_int)): + return 1 + else: + return NumBinopNode.is_py_operation_types(self, type1, type2) + + def infer_builtin_types_operation(self, type1, type2): + # let's assume that whatever builtin type you multiply a string with + # will either return a string of the same type or fail with an exception + string_types = (bytes_type, bytearray_type, str_type, basestring_type, unicode_type) + if type1 in string_types and type2.is_builtin_type: + return type1 + if type2 in string_types and type1.is_builtin_type: + return type2 + # multiplication of containers/numbers with an integer value + # always (?) returns the same type + if type1.is_int: + return type2 + if type2.is_int: + return type1 + return None + + +class MatMultNode(NumBinopNode): + # '@' operator. + + def is_py_operation_types(self, type1, type2): + return True + + def generate_evaluation_code(self, code): + code.globalstate.use_utility_code(UtilityCode.load_cached("MatrixMultiply", "ObjectHandling.c")) + super(MatMultNode, self).generate_evaluation_code(code) + + +class DivNode(NumBinopNode): + # '/' or '//' operator. + + cdivision = None + truedivision = None # == "unknown" if operator == '/' + ctruedivision = False + cdivision_warnings = False + zerodivision_check = None + + def find_compile_time_binary_operator(self, op1, op2): + func = compile_time_binary_operators[self.operator] + if self.operator == '/' and self.truedivision is None: + # => true div for floats, floor div for integers + if isinstance(op1, _py_int_types) and isinstance(op2, _py_int_types): + func = compile_time_binary_operators['//'] + return func + + def calculate_constant_result(self): + op1 = self.operand1.constant_result + op2 = self.operand2.constant_result + func = self.find_compile_time_binary_operator(op1, op2) + self.constant_result = func( + self.operand1.constant_result, + self.operand2.constant_result) + + def compile_time_value(self, denv): + operand1 = self.operand1.compile_time_value(denv) + operand2 = self.operand2.compile_time_value(denv) + try: + func = self.find_compile_time_binary_operator( + operand1, operand2) + return func(operand1, operand2) + except Exception as e: + self.compile_time_value_error(e) + + def _check_truedivision(self, env): + if self.cdivision or env.directives['cdivision']: + self.ctruedivision = False + else: + self.ctruedivision = self.truedivision + + def infer_type(self, env): + self._check_truedivision(env) + return self.result_type( + self.operand1.infer_type(env), + self.operand2.infer_type(env), env) + + def analyse_operation(self, env): + self._check_truedivision(env) + NumBinopNode.analyse_operation(self, env) + if self.is_cpp_operation(): + self.cdivision = True + if not self.type.is_pyobject: + self.zerodivision_check = ( + self.cdivision is None and not env.directives['cdivision'] + and (not self.operand2.has_constant_result() or + self.operand2.constant_result == 0)) + if self.zerodivision_check or env.directives['cdivision_warnings']: + # Need to check ahead of time to warn or raise zero division error + self.operand1 = self.operand1.coerce_to_simple(env) + self.operand2 = self.operand2.coerce_to_simple(env) + + def compute_c_result_type(self, type1, type2): + if self.operator == '/' and self.ctruedivision and not type1.is_cpp_class and not type2.is_cpp_class: + if not type1.is_float and not type2.is_float: + widest_type = PyrexTypes.widest_numeric_type(type1, PyrexTypes.c_double_type) + widest_type = PyrexTypes.widest_numeric_type(type2, widest_type) + return widest_type + return NumBinopNode.compute_c_result_type(self, type1, type2) + + def zero_division_message(self): + if self.type.is_int: + return "integer division or modulo by zero" + else: + return "float division" + + def generate_evaluation_code(self, code): + if not self.type.is_pyobject and not self.type.is_complex: + if self.cdivision is None: + self.cdivision = ( + code.globalstate.directives['cdivision'] + or self.type.is_float + or ((self.type.is_numeric or self.type.is_enum) and not self.type.signed) + ) + if not self.cdivision: + code.globalstate.use_utility_code( + UtilityCode.load_cached("DivInt", "CMath.c").specialize(self.type)) + NumBinopNode.generate_evaluation_code(self, code) + self.generate_div_warning_code(code) + + def generate_div_warning_code(self, code): + in_nogil = self.in_nogil_context + if not self.type.is_pyobject: + if self.zerodivision_check: + if not self.infix: + zero_test = "%s(%s)" % (self.type.unary_op('zero'), self.operand2.result()) + else: + zero_test = "%s == 0" % self.operand2.result() + code.putln("if (unlikely(%s)) {" % zero_test) + if in_nogil: + code.put_ensure_gil() + code.putln('PyErr_SetString(PyExc_ZeroDivisionError, "%s");' % self.zero_division_message()) + if in_nogil: + code.put_release_ensured_gil() + code.putln(code.error_goto(self.pos)) + code.putln("}") + if self.type.is_int and self.type.signed and self.operator != '%': + code.globalstate.use_utility_code(UtilityCode.load_cached("UnaryNegOverflows", "Overflow.c")) + if self.operand2.type.signed == 2: + # explicitly signed, no runtime check needed + minus1_check = 'unlikely(%s == -1)' % self.operand2.result() + else: + type_of_op2 = self.operand2.type.empty_declaration_code() + minus1_check = '(!(((%s)-1) > 0)) && unlikely(%s == (%s)-1)' % ( + type_of_op2, self.operand2.result(), type_of_op2) + code.putln("else if (sizeof(%s) == sizeof(long) && %s " + " && unlikely(UNARY_NEG_WOULD_OVERFLOW(%s))) {" % ( + self.type.empty_declaration_code(), + minus1_check, + self.operand1.result())) + if in_nogil: + code.put_ensure_gil() + code.putln('PyErr_SetString(PyExc_OverflowError, "value too large to perform division");') + if in_nogil: + code.put_release_ensured_gil() + code.putln(code.error_goto(self.pos)) + code.putln("}") + if code.globalstate.directives['cdivision_warnings'] and self.operator != '/': + code.globalstate.use_utility_code( + UtilityCode.load_cached("CDivisionWarning", "CMath.c")) + code.putln("if (unlikely((%s < 0) ^ (%s < 0))) {" % ( + self.operand1.result(), + self.operand2.result())) + warning_code = "__Pyx_cdivision_warning(%(FILENAME)s, %(LINENO)s)" % { + 'FILENAME': Naming.filename_cname, + 'LINENO': Naming.lineno_cname, + } + + if in_nogil: + result_code = 'result' + code.putln("int %s;" % result_code) + code.put_ensure_gil() + code.putln(code.set_error_info(self.pos, used=True)) + code.putln("%s = %s;" % (result_code, warning_code)) + code.put_release_ensured_gil() + else: + result_code = warning_code + code.putln(code.set_error_info(self.pos, used=True)) + + code.put("if (unlikely(%s)) " % result_code) + code.put_goto(code.error_label) + code.putln("}") + + def calculate_result_code(self): + if self.type.is_complex or self.is_cpp_operation(): + return NumBinopNode.calculate_result_code(self) + elif self.type.is_float and self.operator == '//': + return "floor(%s / %s)" % ( + self.operand1.result(), + self.operand2.result()) + elif self.truedivision or self.cdivision: + op1 = self.operand1.result() + op2 = self.operand2.result() + if self.truedivision: + if self.type != self.operand1.type: + op1 = self.type.cast_code(op1) + if self.type != self.operand2.type: + op2 = self.type.cast_code(op2) + return "(%s / %s)" % (op1, op2) + else: + return "__Pyx_div_%s(%s, %s)" % ( + self.type.specialization_name(), + self.operand1.result(), + self.operand2.result()) + + +_find_formatting_types = re.compile( + br"%" + br"(?:%|" # %% + br"(?:\([^)]+\))?" # %(name) + br"[-+#,0-9 ]*([a-z])" # %.2f etc. + br")").findall + +# These format conversion types can never trigger a Unicode string conversion in Py2. +_safe_bytes_formats = set([ + # Excludes 's' and 'r', which can generate non-bytes strings. + b'd', b'i', b'o', b'u', b'x', b'X', b'e', b'E', b'f', b'F', b'g', b'G', b'c', b'b', b'a', +]) + + +class ModNode(DivNode): + # '%' operator. + + def is_py_operation_types(self, type1, type2): + return (type1.is_string + or type2.is_string + or NumBinopNode.is_py_operation_types(self, type1, type2)) + + def infer_builtin_types_operation(self, type1, type2): + # b'%s' % xyz raises an exception in Py3<3.5, so it's safe to infer the type for Py2 and later Py3's. + if type1 is unicode_type: + # None + xyz may be implemented by RHS + if type2.is_builtin_type or not self.operand1.may_be_none(): + return type1 + elif type1 in (bytes_type, str_type, basestring_type): + if type2 is unicode_type: + return type2 + elif type2.is_numeric: + return type1 + elif self.operand1.is_string_literal: + if type1 is str_type or type1 is bytes_type: + if set(_find_formatting_types(self.operand1.value)) <= _safe_bytes_formats: + return type1 + return basestring_type + elif type1 is bytes_type and not type2.is_builtin_type: + return None # RHS might implement '% operator differently in Py3 + else: + return basestring_type # either str or unicode, can't tell + return None + + def zero_division_message(self): + if self.type.is_int: + return "integer division or modulo by zero" + else: + return "float divmod()" + + def analyse_operation(self, env): + DivNode.analyse_operation(self, env) + if not self.type.is_pyobject: + if self.cdivision is None: + self.cdivision = env.directives['cdivision'] or not self.type.signed + if not self.cdivision and not self.type.is_int and not self.type.is_float: + error(self.pos, "mod operator not supported for type '%s'" % self.type) + + def generate_evaluation_code(self, code): + if not self.type.is_pyobject and not self.cdivision: + if self.type.is_int: + code.globalstate.use_utility_code( + UtilityCode.load_cached("ModInt", "CMath.c").specialize(self.type)) + else: # float + code.globalstate.use_utility_code( + UtilityCode.load_cached("ModFloat", "CMath.c").specialize( + self.type, math_h_modifier=self.type.math_h_modifier)) + # NOTE: skipping over DivNode here + NumBinopNode.generate_evaluation_code(self, code) + self.generate_div_warning_code(code) + + def calculate_result_code(self): + if self.cdivision: + if self.type.is_float: + return "fmod%s(%s, %s)" % ( + self.type.math_h_modifier, + self.operand1.result(), + self.operand2.result()) + else: + return "(%s %% %s)" % ( + self.operand1.result(), + self.operand2.result()) + else: + return "__Pyx_mod_%s(%s, %s)" % ( + self.type.specialization_name(), + self.operand1.result(), + self.operand2.result()) + + def py_operation_function(self, code): + type1, type2 = self.operand1.type, self.operand2.type + # ("..." % x) must call "x.__rmod__()" for string subtypes. + if type1 is unicode_type: + if self.operand1.may_be_none() or ( + type2.is_extension_type and type2.subtype_of(type1) or + type2 is py_object_type and not isinstance(self.operand2, CoerceToPyTypeNode)): + return '__Pyx_PyUnicode_FormatSafe' + else: + return 'PyUnicode_Format' + elif type1 is str_type: + if self.operand1.may_be_none() or ( + type2.is_extension_type and type2.subtype_of(type1) or + type2 is py_object_type and not isinstance(self.operand2, CoerceToPyTypeNode)): + return '__Pyx_PyString_FormatSafe' + else: + return '__Pyx_PyString_Format' + return super(ModNode, self).py_operation_function(code) + + +class PowNode(NumBinopNode): + # '**' operator. + + def analyse_c_operation(self, env): + NumBinopNode.analyse_c_operation(self, env) + if self.type.is_complex: + if self.type.real_type.is_float: + self.operand1 = self.operand1.coerce_to(self.type, env) + self.operand2 = self.operand2.coerce_to(self.type, env) + self.pow_func = self.type.binary_op('**') + else: + error(self.pos, "complex int powers not supported") + self.pow_func = "" + elif self.type.is_float: + self.pow_func = "pow" + self.type.math_h_modifier + elif self.type.is_int: + self.pow_func = "__Pyx_pow_%s" % self.type.empty_declaration_code().replace(' ', '_') + env.use_utility_code( + UtilityCode.load_cached("IntPow", "CMath.c").specialize( + func_name=self.pow_func, + type=self.type.empty_declaration_code(), + signed=self.type.signed and 1 or 0)) + elif not self.type.is_error: + error(self.pos, "got unexpected types for C power operator: %s, %s" % + (self.operand1.type, self.operand2.type)) + + def calculate_result_code(self): + # Work around MSVC overloading ambiguity. + def typecast(operand): + if self.type == operand.type: + return operand.result() + else: + return self.type.cast_code(operand.result()) + return "%s(%s, %s)" % ( + self.pow_func, + typecast(self.operand1), + typecast(self.operand2)) + + def py_operation_function(self, code): + if (self.type.is_pyobject and + self.operand1.constant_result == 2 and + isinstance(self.operand1.constant_result, _py_int_types) and + self.operand2.type is py_object_type): + code.globalstate.use_utility_code(UtilityCode.load_cached('PyNumberPow2', 'Optimize.c')) + if self.inplace: + return '__Pyx_PyNumber_InPlacePowerOf2' + else: + return '__Pyx_PyNumber_PowerOf2' + return super(PowNode, self).py_operation_function(code) + + +class BoolBinopNode(ExprNode): + """ + Short-circuiting boolean operation. + + Note that this node provides the same code generation method as + BoolBinopResultNode to simplify expression nesting. + + operator string "and"/"or" + operand1 BoolBinopNode/BoolBinopResultNode left operand + operand2 BoolBinopNode/BoolBinopResultNode right operand + """ + subexprs = ['operand1', 'operand2'] + is_temp = True + operator = None + operand1 = None + operand2 = None + + def infer_type(self, env): + type1 = self.operand1.infer_type(env) + type2 = self.operand2.infer_type(env) + return PyrexTypes.independent_spanning_type(type1, type2) + + def may_be_none(self): + if self.operator == 'or': + return self.operand2.may_be_none() + else: + return self.operand1.may_be_none() or self.operand2.may_be_none() + + def calculate_constant_result(self): + operand1 = self.operand1.constant_result + operand2 = self.operand2.constant_result + if self.operator == 'and': + self.constant_result = operand1 and operand2 + else: + self.constant_result = operand1 or operand2 + + def compile_time_value(self, denv): + operand1 = self.operand1.compile_time_value(denv) + operand2 = self.operand2.compile_time_value(denv) + if self.operator == 'and': + return operand1 and operand2 + else: + return operand1 or operand2 + + def is_ephemeral(self): + return self.operand1.is_ephemeral() or self.operand2.is_ephemeral() + + def analyse_types(self, env): + # Note: we do not do any coercion here as we most likely do not know the final type anyway. + # We even accept to set self.type to ErrorType if both operands do not have a spanning type. + # The coercion to the final type and to a "simple" value is left to coerce_to(). + operand1 = self.operand1.analyse_types(env) + operand2 = self.operand2.analyse_types(env) + self.type = PyrexTypes.independent_spanning_type( + operand1.type, operand2.type) + self.operand1 = self._wrap_operand(operand1, env) + self.operand2 = self._wrap_operand(operand2, env) + return self + + def _wrap_operand(self, operand, env): + if not isinstance(operand, (BoolBinopNode, BoolBinopResultNode)): + operand = BoolBinopResultNode(operand, self.type, env) + return operand + + def wrap_operands(self, env): + """ + Must get called by transforms that want to create a correct BoolBinopNode + after the type analysis phase. + """ + self.operand1 = self._wrap_operand(self.operand1, env) + self.operand2 = self._wrap_operand(self.operand2, env) + + def coerce_to_boolean(self, env): + return self.coerce_to(PyrexTypes.c_bint_type, env) + + def coerce_to(self, dst_type, env): + operand1 = self.operand1.coerce_to(dst_type, env) + operand2 = self.operand2.coerce_to(dst_type, env) + return BoolBinopNode.from_node( + self, type=dst_type, + operator=self.operator, + operand1=operand1, operand2=operand2) + + def generate_bool_evaluation_code(self, code, final_result_temp, final_result_type, and_label, or_label, end_label, fall_through): + code.mark_pos(self.pos) + + outer_labels = (and_label, or_label) + if self.operator == 'and': + my_label = and_label = code.new_label('next_and') + else: + my_label = or_label = code.new_label('next_or') + self.operand1.generate_bool_evaluation_code( + code, final_result_temp, final_result_type, and_label, or_label, end_label, my_label) + + and_label, or_label = outer_labels + + code.put_label(my_label) + self.operand2.generate_bool_evaluation_code( + code, final_result_temp, final_result_type, and_label, or_label, end_label, fall_through) + + def generate_evaluation_code(self, code): + self.allocate_temp_result(code) + result_type = PyrexTypes.py_object_type if self.type.is_pyobject else self.type + or_label = and_label = None + end_label = code.new_label('bool_binop_done') + self.generate_bool_evaluation_code(code, self.result(), result_type, and_label, or_label, end_label, end_label) + code.put_label(end_label) + + gil_message = "Truth-testing Python object" + + def check_const(self): + return self.operand1.check_const() and self.operand2.check_const() + + def generate_subexpr_disposal_code(self, code): + pass # nothing to do here, all done in generate_evaluation_code() + + def free_subexpr_temps(self, code): + pass # nothing to do here, all done in generate_evaluation_code() + + def generate_operand1_test(self, code): + # Generate code to test the truth of the first operand. + if self.type.is_pyobject: + test_result = code.funcstate.allocate_temp( + PyrexTypes.c_bint_type, manage_ref=False) + code.putln( + "%s = __Pyx_PyObject_IsTrue(%s); %s" % ( + test_result, + self.operand1.py_result(), + code.error_goto_if_neg(test_result, self.pos))) + else: + test_result = self.operand1.result() + return (test_result, self.type.is_pyobject) + + +class BoolBinopResultNode(ExprNode): + """ + Intermediate result of a short-circuiting and/or expression. + Tests the result for 'truthiness' and takes care of coercing the final result + of the overall expression to the target type. + + Note that this node provides the same code generation method as + BoolBinopNode to simplify expression nesting. + + arg ExprNode the argument to test + value ExprNode the coerced result value node + """ + + subexprs = ['arg', 'value'] + is_temp = True + arg = None + value = None + + def __init__(self, arg, result_type, env): + # using 'arg' multiple times, so it must be a simple/temp value + arg = arg.coerce_to_simple(env) + # wrap in ProxyNode, in case a transform wants to replace self.arg later + arg = ProxyNode(arg) + super(BoolBinopResultNode, self).__init__( + arg.pos, arg=arg, type=result_type, + value=CloneNode(arg).coerce_to(result_type, env)) + + def coerce_to_boolean(self, env): + return self.coerce_to(PyrexTypes.c_bint_type, env) + + def coerce_to(self, dst_type, env): + # unwrap, coerce, rewrap + arg = self.arg.arg + if dst_type is PyrexTypes.c_bint_type: + arg = arg.coerce_to_boolean(env) + # TODO: unwrap more coercion nodes? + return BoolBinopResultNode(arg, dst_type, env) + + def nogil_check(self, env): + # let's leave all errors to BoolBinopNode + pass + + def generate_operand_test(self, code): + # Generate code to test the truth of the first operand. + if self.arg.type.is_pyobject: + test_result = code.funcstate.allocate_temp( + PyrexTypes.c_bint_type, manage_ref=False) + code.putln( + "%s = __Pyx_PyObject_IsTrue(%s); %s" % ( + test_result, + self.arg.py_result(), + code.error_goto_if_neg(test_result, self.pos))) + else: + test_result = self.arg.result() + return (test_result, self.arg.type.is_pyobject) + + def generate_bool_evaluation_code(self, code, final_result_temp, final_result_type, and_label, or_label, end_label, fall_through): + code.mark_pos(self.pos) + + # x => x + # x and ... or ... => next 'and' / 'or' + # False ... or x => next 'or' + # True and x => next 'and' + # True or x => True (operand) + + self.arg.generate_evaluation_code(code) + if and_label or or_label: + test_result, uses_temp = self.generate_operand_test(code) + if uses_temp and (and_label and or_label): + # cannot become final result => free early + # disposal: uses_temp and (and_label and or_label) + self.arg.generate_disposal_code(code) + sense = '!' if or_label else '' + code.putln("if (%s%s) {" % (sense, test_result)) + if uses_temp: + code.funcstate.release_temp(test_result) + if not uses_temp or not (and_label and or_label): + # disposal: (not uses_temp) or {not (and_label and or_label) [if]} + self.arg.generate_disposal_code(code) + + if or_label and or_label != fall_through: + # value is false => short-circuit to next 'or' + code.put_goto(or_label) + if and_label: + # value is true => go to next 'and' + if or_label: + code.putln("} else {") + if not uses_temp: + # disposal: (not uses_temp) and {(and_label and or_label) [else]} + self.arg.generate_disposal_code(code) + if and_label != fall_through: + code.put_goto(and_label) + + if not and_label or not or_label: + # if no next 'and' or 'or', we provide the result + if and_label or or_label: + code.putln("} else {") + self.value.generate_evaluation_code(code) + self.value.make_owned_reference(code) + code.putln("%s = %s;" % (final_result_temp, self.value.result_as(final_result_type))) + self.value.generate_post_assignment_code(code) + # disposal: {not (and_label and or_label) [else]} + self.arg.generate_disposal_code(code) + self.value.free_temps(code) + if end_label != fall_through: + code.put_goto(end_label) + + if and_label or or_label: + code.putln("}") + self.arg.free_temps(code) + + +class CondExprNode(ExprNode): + # Short-circuiting conditional expression. + # + # test ExprNode + # true_val ExprNode + # false_val ExprNode + + true_val = None + false_val = None + is_temp = True + + subexprs = ['test', 'true_val', 'false_val'] + + def type_dependencies(self, env): + return self.true_val.type_dependencies(env) + self.false_val.type_dependencies(env) + + def infer_type(self, env): + return PyrexTypes.independent_spanning_type( + self.true_val.infer_type(env), + self.false_val.infer_type(env)) + + def calculate_constant_result(self): + if self.test.constant_result: + self.constant_result = self.true_val.constant_result + else: + self.constant_result = self.false_val.constant_result + + def is_ephemeral(self): + return self.true_val.is_ephemeral() or self.false_val.is_ephemeral() + + def analyse_types(self, env): + self.test = self.test.analyse_types(env).coerce_to_boolean(env) + self.true_val = self.true_val.analyse_types(env) + self.false_val = self.false_val.analyse_types(env) + return self.analyse_result_type(env) + + def analyse_result_type(self, env): + self.type = PyrexTypes.independent_spanning_type( + self.true_val.type, self.false_val.type) + if self.type.is_reference: + self.type = PyrexTypes.CFakeReferenceType(self.type.ref_base_type) + if self.type.is_pyobject: + self.result_ctype = py_object_type + elif self.true_val.is_ephemeral() or self.false_val.is_ephemeral(): + error(self.pos, "Unsafe C derivative of temporary Python reference used in conditional expression") + if self.true_val.type.is_pyobject or self.false_val.type.is_pyobject: + self.true_val = self.true_val.coerce_to(self.type, env) + self.false_val = self.false_val.coerce_to(self.type, env) + if self.type.is_error: + self.type_error() + return self + + def coerce_to_integer(self, env): + self.true_val = self.true_val.coerce_to_integer(env) + self.false_val = self.false_val.coerce_to_integer(env) + self.result_ctype = None + return self.analyse_result_type(env) + + def coerce_to(self, dst_type, env): + self.true_val = self.true_val.coerce_to(dst_type, env) + self.false_val = self.false_val.coerce_to(dst_type, env) + self.result_ctype = None + return self.analyse_result_type(env) + + def type_error(self): + if not (self.true_val.type.is_error or self.false_val.type.is_error): + error(self.pos, "Incompatible types in conditional expression (%s; %s)" % + (self.true_val.type, self.false_val.type)) + self.type = PyrexTypes.error_type + + def check_const(self): + return (self.test.check_const() + and self.true_val.check_const() + and self.false_val.check_const()) + + def generate_evaluation_code(self, code): + # Because subexprs may not be evaluated we can use a more optimal + # subexpr allocation strategy than the default, so override evaluation_code. + + code.mark_pos(self.pos) + self.allocate_temp_result(code) + self.test.generate_evaluation_code(code) + code.putln("if (%s) {" % self.test.result()) + self.eval_and_get(code, self.true_val) + code.putln("} else {") + self.eval_and_get(code, self.false_val) + code.putln("}") + self.test.generate_disposal_code(code) + self.test.free_temps(code) + + def eval_and_get(self, code, expr): + expr.generate_evaluation_code(code) + if self.type.is_memoryviewslice: + expr.make_owned_memoryviewslice(code) + else: + expr.make_owned_reference(code) + code.putln('%s = %s;' % (self.result(), expr.result_as(self.ctype()))) + expr.generate_post_assignment_code(code) + expr.free_temps(code) + + def generate_subexpr_disposal_code(self, code): + pass # done explicitly above (cleanup must separately happen within the if/else blocks) + + def free_subexpr_temps(self, code): + pass # done explicitly above (cleanup must separately happen within the if/else blocks) + + +richcmp_constants = { + "<" : "Py_LT", + "<=": "Py_LE", + "==": "Py_EQ", + "!=": "Py_NE", + "<>": "Py_NE", + ">" : "Py_GT", + ">=": "Py_GE", + # the following are faked by special compare functions + "in" : "Py_EQ", + "not_in": "Py_NE", +} + +class CmpNode(object): + # Mixin class containing code common to PrimaryCmpNodes + # and CascadedCmpNodes. + + special_bool_cmp_function = None + special_bool_cmp_utility_code = None + + def infer_type(self, env): + # TODO: Actually implement this (after merging with -unstable). + return py_object_type + + def calculate_cascaded_constant_result(self, operand1_result): + func = compile_time_binary_operators[self.operator] + operand2_result = self.operand2.constant_result + if (isinstance(operand1_result, any_string_type) and + isinstance(operand2_result, any_string_type) and + type(operand1_result) != type(operand2_result)): + # string comparison of different types isn't portable + return + + if self.operator in ('in', 'not_in'): + if isinstance(self.operand2, (ListNode, TupleNode, SetNode)): + if not self.operand2.args: + self.constant_result = self.operator == 'not_in' + return + elif isinstance(self.operand2, ListNode) and not self.cascade: + # tuples are more efficient to store than lists + self.operand2 = self.operand2.as_tuple() + elif isinstance(self.operand2, DictNode): + if not self.operand2.key_value_pairs: + self.constant_result = self.operator == 'not_in' + return + + self.constant_result = func(operand1_result, operand2_result) + + def cascaded_compile_time_value(self, operand1, denv): + func = get_compile_time_binop(self) + operand2 = self.operand2.compile_time_value(denv) + try: + result = func(operand1, operand2) + except Exception as e: + self.compile_time_value_error(e) + result = None + if result: + cascade = self.cascade + if cascade: + result = result and cascade.cascaded_compile_time_value(operand2, denv) + return result + + def is_cpp_comparison(self): + return self.operand1.type.is_cpp_class or self.operand2.type.is_cpp_class + + def find_common_int_type(self, env, op, operand1, operand2): + # type1 != type2 and at least one of the types is not a C int + type1 = operand1.type + type2 = operand2.type + type1_can_be_int = False + type2_can_be_int = False + + if operand1.is_string_literal and operand1.can_coerce_to_char_literal(): + type1_can_be_int = True + if operand2.is_string_literal and operand2.can_coerce_to_char_literal(): + type2_can_be_int = True + + if type1.is_int: + if type2_can_be_int: + return type1 + elif type2.is_int: + if type1_can_be_int: + return type2 + elif type1_can_be_int: + if type2_can_be_int: + if Builtin.unicode_type in (type1, type2): + return PyrexTypes.c_py_ucs4_type + else: + return PyrexTypes.c_uchar_type + + return None + + def find_common_type(self, env, op, operand1, common_type=None): + operand2 = self.operand2 + type1 = operand1.type + type2 = operand2.type + + new_common_type = None + + # catch general errors + if (type1 == str_type and (type2.is_string or type2 in (bytes_type, unicode_type)) or + type2 == str_type and (type1.is_string or type1 in (bytes_type, unicode_type))): + error(self.pos, "Comparisons between bytes/unicode and str are not portable to Python 3") + new_common_type = error_type + + # try to use numeric comparisons where possible + elif type1.is_complex or type2.is_complex: + if (op not in ('==', '!=') + and (type1.is_complex or type1.is_numeric) + and (type2.is_complex or type2.is_numeric)): + error(self.pos, "complex types are unordered") + new_common_type = error_type + elif type1.is_pyobject: + new_common_type = Builtin.complex_type if type1.subtype_of(Builtin.complex_type) else py_object_type + elif type2.is_pyobject: + new_common_type = Builtin.complex_type if type2.subtype_of(Builtin.complex_type) else py_object_type + else: + new_common_type = PyrexTypes.widest_numeric_type(type1, type2) + elif type1.is_numeric and type2.is_numeric: + new_common_type = PyrexTypes.widest_numeric_type(type1, type2) + elif common_type is None or not common_type.is_pyobject: + new_common_type = self.find_common_int_type(env, op, operand1, operand2) + + if new_common_type is None: + # fall back to generic type compatibility tests + if type1.is_ctuple or type2.is_ctuple: + new_common_type = py_object_type + elif type1 == type2: + new_common_type = type1 + elif type1.is_pyobject or type2.is_pyobject: + if type2.is_numeric or type2.is_string: + if operand2.check_for_coercion_error(type1, env): + new_common_type = error_type + else: + new_common_type = py_object_type + elif type1.is_numeric or type1.is_string: + if operand1.check_for_coercion_error(type2, env): + new_common_type = error_type + else: + new_common_type = py_object_type + elif py_object_type.assignable_from(type1) and py_object_type.assignable_from(type2): + new_common_type = py_object_type + else: + # one Python type and one non-Python type, not assignable + self.invalid_types_error(operand1, op, operand2) + new_common_type = error_type + elif type1.assignable_from(type2): + new_common_type = type1 + elif type2.assignable_from(type1): + new_common_type = type2 + else: + # C types that we couldn't handle up to here are an error + self.invalid_types_error(operand1, op, operand2) + new_common_type = error_type + + if new_common_type.is_string and (isinstance(operand1, BytesNode) or + isinstance(operand2, BytesNode)): + # special case when comparing char* to bytes literal: must + # compare string values! + new_common_type = bytes_type + + # recursively merge types + if common_type is None or new_common_type.is_error: + common_type = new_common_type + else: + # we could do a lot better by splitting the comparison + # into a non-Python part and a Python part, but this is + # safer for now + common_type = PyrexTypes.spanning_type(common_type, new_common_type) + + if self.cascade: + common_type = self.cascade.find_common_type(env, self.operator, operand2, common_type) + + return common_type + + def invalid_types_error(self, operand1, op, operand2): + error(self.pos, "Invalid types for '%s' (%s, %s)" % + (op, operand1.type, operand2.type)) + + def is_python_comparison(self): + return (not self.is_ptr_contains() + and not self.is_c_string_contains() + and (self.has_python_operands() + or (self.cascade and self.cascade.is_python_comparison()) + or self.operator in ('in', 'not_in'))) + + def coerce_operands_to(self, dst_type, env): + operand2 = self.operand2 + if operand2.type != dst_type: + self.operand2 = operand2.coerce_to(dst_type, env) + if self.cascade: + self.cascade.coerce_operands_to(dst_type, env) + + def is_python_result(self): + return ((self.has_python_operands() and + self.special_bool_cmp_function is None and + self.operator not in ('is', 'is_not', 'in', 'not_in') and + not self.is_c_string_contains() and + not self.is_ptr_contains()) + or (self.cascade and self.cascade.is_python_result())) + + def is_c_string_contains(self): + return self.operator in ('in', 'not_in') and \ + ((self.operand1.type.is_int + and (self.operand2.type.is_string or self.operand2.type is bytes_type)) or + (self.operand1.type.is_unicode_char + and self.operand2.type is unicode_type)) + + def is_ptr_contains(self): + if self.operator in ('in', 'not_in'): + container_type = self.operand2.type + return (container_type.is_ptr or container_type.is_array) \ + and not container_type.is_string + + def find_special_bool_compare_function(self, env, operand1, result_is_bool=False): + # note: currently operand1 must get coerced to a Python object if we succeed here! + if self.operator in ('==', '!='): + type1, type2 = operand1.type, self.operand2.type + if result_is_bool or (type1.is_builtin_type and type2.is_builtin_type): + if type1 is Builtin.unicode_type or type2 is Builtin.unicode_type: + self.special_bool_cmp_utility_code = UtilityCode.load_cached("UnicodeEquals", "StringTools.c") + self.special_bool_cmp_function = "__Pyx_PyUnicode_Equals" + return True + elif type1 is Builtin.bytes_type or type2 is Builtin.bytes_type: + self.special_bool_cmp_utility_code = UtilityCode.load_cached("BytesEquals", "StringTools.c") + self.special_bool_cmp_function = "__Pyx_PyBytes_Equals" + return True + elif type1 is Builtin.basestring_type or type2 is Builtin.basestring_type: + self.special_bool_cmp_utility_code = UtilityCode.load_cached("UnicodeEquals", "StringTools.c") + self.special_bool_cmp_function = "__Pyx_PyUnicode_Equals" + return True + elif type1 is Builtin.str_type or type2 is Builtin.str_type: + self.special_bool_cmp_utility_code = UtilityCode.load_cached("StrEquals", "StringTools.c") + self.special_bool_cmp_function = "__Pyx_PyString_Equals" + return True + elif self.operator in ('in', 'not_in'): + if self.operand2.type is Builtin.dict_type: + self.operand2 = self.operand2.as_none_safe_node("'NoneType' object is not iterable") + self.special_bool_cmp_utility_code = UtilityCode.load_cached("PyDictContains", "ObjectHandling.c") + self.special_bool_cmp_function = "__Pyx_PyDict_ContainsTF" + return True + elif self.operand2.type is Builtin.set_type: + self.operand2 = self.operand2.as_none_safe_node("'NoneType' object is not iterable") + self.special_bool_cmp_utility_code = UtilityCode.load_cached("PySetContains", "ObjectHandling.c") + self.special_bool_cmp_function = "__Pyx_PySet_ContainsTF" + return True + elif self.operand2.type is Builtin.unicode_type: + self.operand2 = self.operand2.as_none_safe_node("'NoneType' object is not iterable") + self.special_bool_cmp_utility_code = UtilityCode.load_cached("PyUnicodeContains", "StringTools.c") + self.special_bool_cmp_function = "__Pyx_PyUnicode_ContainsTF" + return True + else: + if not self.operand2.type.is_pyobject: + self.operand2 = self.operand2.coerce_to_pyobject(env) + self.special_bool_cmp_utility_code = UtilityCode.load_cached("PySequenceContains", "ObjectHandling.c") + self.special_bool_cmp_function = "__Pyx_PySequence_ContainsTF" + return True + return False + + def generate_operation_code(self, code, result_code, + operand1, op , operand2): + if self.type.is_pyobject: + error_clause = code.error_goto_if_null + got_ref = "__Pyx_XGOTREF(%s); " % result_code + if self.special_bool_cmp_function: + code.globalstate.use_utility_code( + UtilityCode.load_cached("PyBoolOrNullFromLong", "ObjectHandling.c")) + coerce_result = "__Pyx_PyBoolOrNull_FromLong" + else: + coerce_result = "__Pyx_PyBool_FromLong" + else: + error_clause = code.error_goto_if_neg + got_ref = "" + coerce_result = "" + + if self.special_bool_cmp_function: + if operand1.type.is_pyobject: + result1 = operand1.py_result() + else: + result1 = operand1.result() + if operand2.type.is_pyobject: + result2 = operand2.py_result() + else: + result2 = operand2.result() + if self.special_bool_cmp_utility_code: + code.globalstate.use_utility_code(self.special_bool_cmp_utility_code) + code.putln( + "%s = %s(%s(%s, %s, %s)); %s%s" % ( + result_code, + coerce_result, + self.special_bool_cmp_function, + result1, result2, richcmp_constants[op], + got_ref, + error_clause(result_code, self.pos))) + + elif operand1.type.is_pyobject and op not in ('is', 'is_not'): + assert op not in ('in', 'not_in'), op + code.putln("%s = PyObject_RichCompare(%s, %s, %s); %s%s" % ( + result_code, + operand1.py_result(), + operand2.py_result(), + richcmp_constants[op], + got_ref, + error_clause(result_code, self.pos))) + + elif operand1.type.is_complex: + code.putln("%s = %s(%s%s(%s, %s));" % ( + result_code, + coerce_result, + op == "!=" and "!" or "", + operand1.type.unary_op('eq'), + operand1.result(), + operand2.result())) + + else: + type1 = operand1.type + type2 = operand2.type + if (type1.is_extension_type or type2.is_extension_type) \ + and not type1.same_as(type2): + common_type = py_object_type + elif type1.is_numeric: + common_type = PyrexTypes.widest_numeric_type(type1, type2) + else: + common_type = type1 + code1 = operand1.result_as(common_type) + code2 = operand2.result_as(common_type) + statement = "%s = %s(%s %s %s);" % ( + result_code, + coerce_result, + code1, + self.c_operator(op), + code2) + if self.is_cpp_comparison() and self.exception_check == '+': + translate_cpp_exception( + code, + self.pos, + statement, + result_code if self.type.is_pyobject else None, + self.exception_value, + self.in_nogil_context) + else: + code.putln(statement) + + def c_operator(self, op): + if op == 'is': + return "==" + elif op == 'is_not': + return "!=" + else: + return op + +class PrimaryCmpNode(ExprNode, CmpNode): + # Non-cascaded comparison or first comparison of + # a cascaded sequence. + # + # operator string + # operand1 ExprNode + # operand2 ExprNode + # cascade CascadedCmpNode + + # We don't use the subexprs mechanism, because + # things here are too complicated for it to handle. + # Instead, we override all the framework methods + # which use it. + + child_attrs = ['operand1', 'operand2', 'coerced_operand2', 'cascade'] + + cascade = None + coerced_operand2 = None + is_memslice_nonecheck = False + + def infer_type(self, env): + type1 = self.operand1.infer_type(env) + type2 = self.operand2.infer_type(env) + + if is_pythran_expr(type1) or is_pythran_expr(type2): + if is_pythran_supported_type(type1) and is_pythran_supported_type(type2): + return PythranExpr(pythran_binop_type(self.operator, type1, type2)) + + # TODO: implement this for other types. + return py_object_type + + def type_dependencies(self, env): + return () + + def calculate_constant_result(self): + assert not self.cascade + self.calculate_cascaded_constant_result(self.operand1.constant_result) + + def compile_time_value(self, denv): + operand1 = self.operand1.compile_time_value(denv) + return self.cascaded_compile_time_value(operand1, denv) + + def analyse_types(self, env): + self.operand1 = self.operand1.analyse_types(env) + self.operand2 = self.operand2.analyse_types(env) + if self.is_cpp_comparison(): + self.analyse_cpp_comparison(env) + if self.cascade: + error(self.pos, "Cascading comparison not yet supported for cpp types.") + return self + + type1 = self.operand1.type + type2 = self.operand2.type + if is_pythran_expr(type1) or is_pythran_expr(type2): + if is_pythran_supported_type(type1) and is_pythran_supported_type(type2): + self.type = PythranExpr(pythran_binop_type(self.operator, type1, type2)) + self.is_pycmp = False + return self + + if self.analyse_memoryviewslice_comparison(env): + return self + + if self.cascade: + self.cascade = self.cascade.analyse_types(env) + + if self.operator in ('in', 'not_in'): + if self.is_c_string_contains(): + self.is_pycmp = False + common_type = None + if self.cascade: + error(self.pos, "Cascading comparison not yet supported for 'int_val in string'.") + return self + if self.operand2.type is unicode_type: + env.use_utility_code(UtilityCode.load_cached("PyUCS4InUnicode", "StringTools.c")) + else: + if self.operand1.type is PyrexTypes.c_uchar_type: + self.operand1 = self.operand1.coerce_to(PyrexTypes.c_char_type, env) + if self.operand2.type is not bytes_type: + self.operand2 = self.operand2.coerce_to(bytes_type, env) + env.use_utility_code(UtilityCode.load_cached("BytesContains", "StringTools.c")) + self.operand2 = self.operand2.as_none_safe_node( + "argument of type 'NoneType' is not iterable") + elif self.is_ptr_contains(): + if self.cascade: + error(self.pos, "Cascading comparison not supported for 'val in sliced pointer'.") + self.type = PyrexTypes.c_bint_type + # Will be transformed by IterationTransform + return self + elif self.find_special_bool_compare_function(env, self.operand1): + if not self.operand1.type.is_pyobject: + self.operand1 = self.operand1.coerce_to_pyobject(env) + common_type = None # if coercion needed, the method call above has already done it + self.is_pycmp = False # result is bint + else: + common_type = py_object_type + self.is_pycmp = True + elif self.find_special_bool_compare_function(env, self.operand1): + if not self.operand1.type.is_pyobject: + self.operand1 = self.operand1.coerce_to_pyobject(env) + common_type = None # if coercion needed, the method call above has already done it + self.is_pycmp = False # result is bint + else: + common_type = self.find_common_type(env, self.operator, self.operand1) + self.is_pycmp = common_type.is_pyobject + + if common_type is not None and not common_type.is_error: + if self.operand1.type != common_type: + self.operand1 = self.operand1.coerce_to(common_type, env) + self.coerce_operands_to(common_type, env) + + if self.cascade: + self.operand2 = self.operand2.coerce_to_simple(env) + self.cascade.coerce_cascaded_operands_to_temp(env) + operand2 = self.cascade.optimise_comparison(self.operand2, env) + if operand2 is not self.operand2: + self.coerced_operand2 = operand2 + if self.is_python_result(): + self.type = PyrexTypes.py_object_type + else: + self.type = PyrexTypes.c_bint_type + cdr = self.cascade + while cdr: + cdr.type = self.type + cdr = cdr.cascade + if self.is_pycmp or self.cascade or self.special_bool_cmp_function: + # 1) owned reference, 2) reused value, 3) potential function error return value + self.is_temp = 1 + return self + + def analyse_cpp_comparison(self, env): + type1 = self.operand1.type + type2 = self.operand2.type + self.is_pycmp = False + entry = env.lookup_operator(self.operator, [self.operand1, self.operand2]) + if entry is None: + error(self.pos, "Invalid types for '%s' (%s, %s)" % + (self.operator, type1, type2)) + self.type = PyrexTypes.error_type + self.result_code = "" + return + func_type = entry.type + if func_type.is_ptr: + func_type = func_type.base_type + self.exception_check = func_type.exception_check + self.exception_value = func_type.exception_value + if self.exception_check == '+': + self.is_temp = True + if self.exception_value is None: + env.use_utility_code(UtilityCode.load_cached("CppExceptionConversion", "CppSupport.cpp")) + if len(func_type.args) == 1: + self.operand2 = self.operand2.coerce_to(func_type.args[0].type, env) + else: + self.operand1 = self.operand1.coerce_to(func_type.args[0].type, env) + self.operand2 = self.operand2.coerce_to(func_type.args[1].type, env) + self.type = func_type.return_type + + def analyse_memoryviewslice_comparison(self, env): + have_none = self.operand1.is_none or self.operand2.is_none + have_slice = (self.operand1.type.is_memoryviewslice or + self.operand2.type.is_memoryviewslice) + ops = ('==', '!=', 'is', 'is_not') + if have_slice and have_none and self.operator in ops: + self.is_pycmp = False + self.type = PyrexTypes.c_bint_type + self.is_memslice_nonecheck = True + return True + + return False + + def coerce_to_boolean(self, env): + if self.is_pycmp: + # coercing to bool => may allow for more efficient comparison code + if self.find_special_bool_compare_function( + env, self.operand1, result_is_bool=True): + self.is_pycmp = False + self.type = PyrexTypes.c_bint_type + self.is_temp = 1 + if self.cascade: + operand2 = self.cascade.optimise_comparison( + self.operand2, env, result_is_bool=True) + if operand2 is not self.operand2: + self.coerced_operand2 = operand2 + return self + # TODO: check if we can optimise parts of the cascade here + return ExprNode.coerce_to_boolean(self, env) + + def has_python_operands(self): + return (self.operand1.type.is_pyobject + or self.operand2.type.is_pyobject) + + def check_const(self): + if self.cascade: + self.not_const() + return False + else: + return self.operand1.check_const() and self.operand2.check_const() + + def calculate_result_code(self): + operand1, operand2 = self.operand1, self.operand2 + if operand1.type.is_complex: + if self.operator == "!=": + negation = "!" + else: + negation = "" + return "(%s%s(%s, %s))" % ( + negation, + operand1.type.binary_op('=='), + operand1.result(), + operand2.result()) + elif self.is_c_string_contains(): + if operand2.type is unicode_type: + method = "__Pyx_UnicodeContainsUCS4" + else: + method = "__Pyx_BytesContains" + if self.operator == "not_in": + negation = "!" + else: + negation = "" + return "(%s%s(%s, %s))" % ( + negation, + method, + operand2.result(), + operand1.result()) + else: + if is_pythran_expr(self.type): + result1, result2 = operand1.pythran_result(), operand2.pythran_result() + else: + result1, result2 = operand1.result(), operand2.result() + if self.is_memslice_nonecheck: + if operand1.type.is_memoryviewslice: + result1 = "((PyObject *) %s.memview)" % result1 + else: + result2 = "((PyObject *) %s.memview)" % result2 + + return "(%s %s %s)" % ( + result1, + self.c_operator(self.operator), + result2) + + def generate_evaluation_code(self, code): + self.operand1.generate_evaluation_code(code) + self.operand2.generate_evaluation_code(code) + if self.is_temp: + self.allocate_temp_result(code) + self.generate_operation_code(code, self.result(), + self.operand1, self.operator, self.operand2) + if self.cascade: + self.cascade.generate_evaluation_code( + code, self.result(), self.coerced_operand2 or self.operand2, + needs_evaluation=self.coerced_operand2 is not None) + self.operand1.generate_disposal_code(code) + self.operand1.free_temps(code) + self.operand2.generate_disposal_code(code) + self.operand2.free_temps(code) + + def generate_subexpr_disposal_code(self, code): + # If this is called, it is a non-cascaded cmp, + # so only need to dispose of the two main operands. + self.operand1.generate_disposal_code(code) + self.operand2.generate_disposal_code(code) + + def free_subexpr_temps(self, code): + # If this is called, it is a non-cascaded cmp, + # so only need to dispose of the two main operands. + self.operand1.free_temps(code) + self.operand2.free_temps(code) + + def annotate(self, code): + self.operand1.annotate(code) + self.operand2.annotate(code) + if self.cascade: + self.cascade.annotate(code) + + +class CascadedCmpNode(Node, CmpNode): + # A CascadedCmpNode is not a complete expression node. It + # hangs off the side of another comparison node, shares + # its left operand with that node, and shares its result + # with the PrimaryCmpNode at the head of the chain. + # + # operator string + # operand2 ExprNode + # cascade CascadedCmpNode + + child_attrs = ['operand2', 'coerced_operand2', 'cascade'] + + cascade = None + coerced_operand2 = None + constant_result = constant_value_not_set # FIXME: where to calculate this? + + def infer_type(self, env): + # TODO: Actually implement this (after merging with -unstable). + return py_object_type + + def type_dependencies(self, env): + return () + + def has_constant_result(self): + return self.constant_result is not constant_value_not_set and \ + self.constant_result is not not_a_constant + + def analyse_types(self, env): + self.operand2 = self.operand2.analyse_types(env) + if self.cascade: + self.cascade = self.cascade.analyse_types(env) + return self + + def has_python_operands(self): + return self.operand2.type.is_pyobject + + def is_cpp_comparison(self): + # cascaded comparisons aren't currently implemented for c++ classes. + return False + + def optimise_comparison(self, operand1, env, result_is_bool=False): + if self.find_special_bool_compare_function(env, operand1, result_is_bool): + self.is_pycmp = False + self.type = PyrexTypes.c_bint_type + if not operand1.type.is_pyobject: + operand1 = operand1.coerce_to_pyobject(env) + if self.cascade: + operand2 = self.cascade.optimise_comparison(self.operand2, env, result_is_bool) + if operand2 is not self.operand2: + self.coerced_operand2 = operand2 + return operand1 + + def coerce_operands_to_pyobjects(self, env): + self.operand2 = self.operand2.coerce_to_pyobject(env) + if self.operand2.type is dict_type and self.operator in ('in', 'not_in'): + self.operand2 = self.operand2.as_none_safe_node("'NoneType' object is not iterable") + if self.cascade: + self.cascade.coerce_operands_to_pyobjects(env) + + def coerce_cascaded_operands_to_temp(self, env): + if self.cascade: + #self.operand2 = self.operand2.coerce_to_temp(env) #CTT + self.operand2 = self.operand2.coerce_to_simple(env) + self.cascade.coerce_cascaded_operands_to_temp(env) + + def generate_evaluation_code(self, code, result, operand1, needs_evaluation=False): + if self.type.is_pyobject: + code.putln("if (__Pyx_PyObject_IsTrue(%s)) {" % result) + code.put_decref(result, self.type) + else: + code.putln("if (%s) {" % result) + if needs_evaluation: + operand1.generate_evaluation_code(code) + self.operand2.generate_evaluation_code(code) + self.generate_operation_code(code, result, + operand1, self.operator, self.operand2) + if self.cascade: + self.cascade.generate_evaluation_code( + code, result, self.coerced_operand2 or self.operand2, + needs_evaluation=self.coerced_operand2 is not None) + if needs_evaluation: + operand1.generate_disposal_code(code) + operand1.free_temps(code) + # Cascaded cmp result is always temp + self.operand2.generate_disposal_code(code) + self.operand2.free_temps(code) + code.putln("}") + + def annotate(self, code): + self.operand2.annotate(code) + if self.cascade: + self.cascade.annotate(code) + + +binop_node_classes = { + "or": BoolBinopNode, + "and": BoolBinopNode, + "|": IntBinopNode, + "^": IntBinopNode, + "&": IntBinopNode, + "<<": IntBinopNode, + ">>": IntBinopNode, + "+": AddNode, + "-": SubNode, + "*": MulNode, + "@": MatMultNode, + "/": DivNode, + "//": DivNode, + "%": ModNode, + "**": PowNode, +} + + +def binop_node(pos, operator, operand1, operand2, inplace=False, **kwargs): + # Construct binop node of appropriate class for + # given operator. + return binop_node_classes[operator]( + pos, + operator=operator, + operand1=operand1, + operand2=operand2, + inplace=inplace, + **kwargs) + + +#------------------------------------------------------------------- +# +# Coercion nodes +# +# Coercion nodes are special in that they are created during +# the analyse_types phase of parse tree processing. +# Their __init__ methods consequently incorporate some aspects +# of that phase. +# +#------------------------------------------------------------------- + +class CoercionNode(ExprNode): + # Abstract base class for coercion nodes. + # + # arg ExprNode node being coerced + + subexprs = ['arg'] + constant_result = not_a_constant + + def __init__(self, arg): + super(CoercionNode, self).__init__(arg.pos) + self.arg = arg + if debug_coercion: + print("%s Coercing %s" % (self, self.arg)) + + def calculate_constant_result(self): + # constant folding can break type coercion, so this is disabled + pass + + def annotate(self, code): + self.arg.annotate(code) + if self.arg.type != self.type: + file, line, col = self.pos + code.annotate((file, line, col-1), AnnotationItem( + style='coerce', tag='coerce', text='[%s] to [%s]' % (self.arg.type, self.type))) + + +class CoerceToMemViewSliceNode(CoercionNode): + """ + Coerce an object to a memoryview slice. This holds a new reference in + a managed temp. + """ + + def __init__(self, arg, dst_type, env): + assert dst_type.is_memoryviewslice + assert not arg.type.is_memoryviewslice + CoercionNode.__init__(self, arg) + self.type = dst_type + self.is_temp = 1 + self.env = env + self.use_managed_ref = True + self.arg = arg + + def generate_result_code(self, code): + self.type.create_from_py_utility_code(self.env) + code.putln(self.type.from_py_call_code( + self.arg.py_result(), + self.result(), + self.pos, + code + )) + + +class CastNode(CoercionNode): + # Wrap a node in a C type cast. + + def __init__(self, arg, new_type): + CoercionNode.__init__(self, arg) + self.type = new_type + + def may_be_none(self): + return self.arg.may_be_none() + + def calculate_result_code(self): + return self.arg.result_as(self.type) + + def generate_result_code(self, code): + self.arg.generate_result_code(code) + + +class PyTypeTestNode(CoercionNode): + # This node is used to check that a generic Python + # object is an instance of a particular extension type. + # This node borrows the result of its argument node. + + exact_builtin_type = True + + def __init__(self, arg, dst_type, env, notnone=False): + # The arg is know to be a Python object, and + # the dst_type is known to be an extension type. + assert dst_type.is_extension_type or dst_type.is_builtin_type, "PyTypeTest on non extension type" + CoercionNode.__init__(self, arg) + self.type = dst_type + self.result_ctype = arg.ctype() + self.notnone = notnone + + nogil_check = Node.gil_error + gil_message = "Python type test" + + def analyse_types(self, env): + return self + + def may_be_none(self): + if self.notnone: + return False + return self.arg.may_be_none() + + def is_simple(self): + return self.arg.is_simple() + + def result_in_temp(self): + return self.arg.result_in_temp() + + def is_ephemeral(self): + return self.arg.is_ephemeral() + + def nonlocally_immutable(self): + return self.arg.nonlocally_immutable() + + def reanalyse(self): + if self.type != self.arg.type or not self.arg.is_temp: + return self + if not self.type.typeobj_is_available(): + return self + if self.arg.may_be_none() and self.notnone: + return self.arg.as_none_safe_node("Cannot convert NoneType to %.200s" % self.type.name) + return self.arg + + def calculate_constant_result(self): + # FIXME + pass + + def calculate_result_code(self): + return self.arg.result() + + def generate_result_code(self, code): + if self.type.typeobj_is_available(): + if self.type.is_builtin_type: + type_test = self.type.type_test_code( + self.arg.py_result(), + self.notnone, exact=self.exact_builtin_type) + else: + type_test = self.type.type_test_code( + self.arg.py_result(), self.notnone) + code.globalstate.use_utility_code( + UtilityCode.load_cached("ExtTypeTest", "ObjectHandling.c")) + code.putln("if (!(%s)) %s" % ( + type_test, code.error_goto(self.pos))) + else: + error(self.pos, "Cannot test type of extern C class " + "without type object name specification") + + def generate_post_assignment_code(self, code): + self.arg.generate_post_assignment_code(code) + + def free_temps(self, code): + self.arg.free_temps(code) + + +class NoneCheckNode(CoercionNode): + # This node is used to check that a Python object is not None and + # raises an appropriate exception (as specified by the creating + # transform). + + is_nonecheck = True + + def __init__(self, arg, exception_type_cname, exception_message, + exception_format_args=()): + CoercionNode.__init__(self, arg) + self.type = arg.type + self.result_ctype = arg.ctype() + self.exception_type_cname = exception_type_cname + self.exception_message = exception_message + self.exception_format_args = tuple(exception_format_args or ()) + + nogil_check = None # this node only guards an operation that would fail already + + def analyse_types(self, env): + return self + + def may_be_none(self): + return False + + def is_simple(self): + return self.arg.is_simple() + + def result_in_temp(self): + return self.arg.result_in_temp() + + def nonlocally_immutable(self): + return self.arg.nonlocally_immutable() + + def calculate_result_code(self): + return self.arg.result() + + def condition(self): + if self.type.is_pyobject: + return self.arg.py_result() + elif self.type.is_memoryviewslice: + return "((PyObject *) %s.memview)" % self.arg.result() + else: + raise Exception("unsupported type") + + @classmethod + def generate(cls, arg, code, exception_message, + exception_type_cname="PyExc_TypeError", exception_format_args=(), in_nogil_context=False): + node = cls(arg, exception_type_cname, exception_message, exception_format_args) + node.in_nogil_context = in_nogil_context + node.put_nonecheck(code) + + @classmethod + def generate_if_needed(cls, arg, code, exception_message, + exception_type_cname="PyExc_TypeError", exception_format_args=(), in_nogil_context=False): + if arg.may_be_none(): + cls.generate(arg, code, exception_message, exception_type_cname, exception_format_args, in_nogil_context) + + def put_nonecheck(self, code): + code.putln( + "if (unlikely(%s == Py_None)) {" % self.condition()) + + if self.in_nogil_context: + code.put_ensure_gil() + + escape = StringEncoding.escape_byte_string + if self.exception_format_args: + code.putln('PyErr_Format(%s, "%s", %s);' % ( + self.exception_type_cname, + StringEncoding.escape_byte_string( + self.exception_message.encode('UTF-8')), + ', '.join([ '"%s"' % escape(str(arg).encode('UTF-8')) + for arg in self.exception_format_args ]))) + else: + code.putln('PyErr_SetString(%s, "%s");' % ( + self.exception_type_cname, + escape(self.exception_message.encode('UTF-8')))) + + if self.in_nogil_context: + code.put_release_ensured_gil() + + code.putln(code.error_goto(self.pos)) + code.putln("}") + + def generate_result_code(self, code): + self.put_nonecheck(code) + + def generate_post_assignment_code(self, code): + self.arg.generate_post_assignment_code(code) + + def free_temps(self, code): + self.arg.free_temps(code) + + +class CoerceToPyTypeNode(CoercionNode): + # This node is used to convert a C data type + # to a Python object. + + type = py_object_type + target_type = py_object_type + is_temp = 1 + + def __init__(self, arg, env, type=py_object_type): + if not arg.type.create_to_py_utility_code(env): + error(arg.pos, "Cannot convert '%s' to Python object" % arg.type) + elif arg.type.is_complex: + # special case: complex coercion is so complex that it + # uses a macro ("__pyx_PyComplex_FromComplex()"), for + # which the argument must be simple + arg = arg.coerce_to_simple(env) + CoercionNode.__init__(self, arg) + if type is py_object_type: + # be specific about some known types + if arg.type.is_string or arg.type.is_cpp_string: + self.type = default_str_type(env) + elif arg.type.is_pyunicode_ptr or arg.type.is_unicode_char: + self.type = unicode_type + elif arg.type.is_complex: + self.type = Builtin.complex_type + self.target_type = self.type + elif arg.type.is_string or arg.type.is_cpp_string: + if (type not in (bytes_type, bytearray_type) + and not env.directives['c_string_encoding']): + error(arg.pos, + "default encoding required for conversion from '%s' to '%s'" % + (arg.type, type)) + self.type = self.target_type = type + else: + # FIXME: check that the target type and the resulting type are compatible + self.target_type = type + + gil_message = "Converting to Python object" + + def may_be_none(self): + # FIXME: is this always safe? + return False + + def coerce_to_boolean(self, env): + arg_type = self.arg.type + if (arg_type == PyrexTypes.c_bint_type or + (arg_type.is_pyobject and arg_type.name == 'bool')): + return self.arg.coerce_to_temp(env) + else: + return CoerceToBooleanNode(self, env) + + def coerce_to_integer(self, env): + # If not already some C integer type, coerce to longint. + if self.arg.type.is_int: + return self.arg + else: + return self.arg.coerce_to(PyrexTypes.c_long_type, env) + + def analyse_types(self, env): + # The arg is always already analysed + return self + + def generate_result_code(self, code): + code.putln('%s; %s' % ( + self.arg.type.to_py_call_code( + self.arg.result(), + self.result(), + self.target_type), + code.error_goto_if_null(self.result(), self.pos))) + + code.put_gotref(self.py_result()) + + +class CoerceIntToBytesNode(CoerceToPyTypeNode): + # This node is used to convert a C int type to a Python bytes + # object. + + is_temp = 1 + + def __init__(self, arg, env): + arg = arg.coerce_to_simple(env) + CoercionNode.__init__(self, arg) + self.type = Builtin.bytes_type + + def generate_result_code(self, code): + arg = self.arg + arg_result = arg.result() + if arg.type not in (PyrexTypes.c_char_type, + PyrexTypes.c_uchar_type, + PyrexTypes.c_schar_type): + if arg.type.signed: + code.putln("if ((%s < 0) || (%s > 255)) {" % ( + arg_result, arg_result)) + else: + code.putln("if (%s > 255) {" % arg_result) + code.putln('PyErr_SetString(PyExc_OverflowError, ' + '"value too large to pack into a byte"); %s' % ( + code.error_goto(self.pos))) + code.putln('}') + temp = None + if arg.type is not PyrexTypes.c_char_type: + temp = code.funcstate.allocate_temp(PyrexTypes.c_char_type, manage_ref=False) + code.putln("%s = (char)%s;" % (temp, arg_result)) + arg_result = temp + code.putln('%s = PyBytes_FromStringAndSize(&%s, 1); %s' % ( + self.result(), + arg_result, + code.error_goto_if_null(self.result(), self.pos))) + if temp is not None: + code.funcstate.release_temp(temp) + code.put_gotref(self.py_result()) + + +class CoerceFromPyTypeNode(CoercionNode): + # This node is used to convert a Python object + # to a C data type. + + def __init__(self, result_type, arg, env): + CoercionNode.__init__(self, arg) + self.type = result_type + self.is_temp = 1 + if not result_type.create_from_py_utility_code(env): + error(arg.pos, + "Cannot convert Python object to '%s'" % result_type) + if self.type.is_string or self.type.is_pyunicode_ptr: + if self.arg.is_name and self.arg.entry and self.arg.entry.is_pyglobal: + warning(arg.pos, + "Obtaining '%s' from externally modifiable global Python value" % result_type, + level=1) + + def analyse_types(self, env): + # The arg is always already analysed + return self + + def is_ephemeral(self): + return (self.type.is_ptr and not self.type.is_array) and self.arg.is_ephemeral() + + def generate_result_code(self, code): + from_py_function = None + # for certain source types, we can do better than the generic coercion + if self.type.is_string and self.arg.type is bytes_type: + if self.type.from_py_function.startswith('__Pyx_PyObject_As'): + from_py_function = '__Pyx_PyBytes' + self.type.from_py_function[len('__Pyx_PyObject'):] + NoneCheckNode.generate_if_needed(self.arg, code, "expected bytes, NoneType found") + + code.putln(self.type.from_py_call_code( + self.arg.py_result(), self.result(), self.pos, code, from_py_function=from_py_function)) + if self.type.is_pyobject: + code.put_gotref(self.py_result()) + + def nogil_check(self, env): + error(self.pos, "Coercion from Python not allowed without the GIL") + + +class CoerceToBooleanNode(CoercionNode): + # This node is used when a result needs to be used + # in a boolean context. + + type = PyrexTypes.c_bint_type + + _special_builtins = { + Builtin.list_type: 'PyList_GET_SIZE', + Builtin.tuple_type: 'PyTuple_GET_SIZE', + Builtin.set_type: 'PySet_GET_SIZE', + Builtin.frozenset_type: 'PySet_GET_SIZE', + Builtin.bytes_type: 'PyBytes_GET_SIZE', + Builtin.bytearray_type: 'PyByteArray_GET_SIZE', + Builtin.unicode_type: '__Pyx_PyUnicode_IS_TRUE', + } + + def __init__(self, arg, env): + CoercionNode.__init__(self, arg) + if arg.type.is_pyobject: + self.is_temp = 1 + + def nogil_check(self, env): + if self.arg.type.is_pyobject and self._special_builtins.get(self.arg.type) is None: + self.gil_error() + + gil_message = "Truth-testing Python object" + + def check_const(self): + if self.is_temp: + self.not_const() + return False + return self.arg.check_const() + + def calculate_result_code(self): + return "(%s != 0)" % self.arg.result() + + def generate_result_code(self, code): + if not self.is_temp: + return + test_func = self._special_builtins.get(self.arg.type) + if test_func is not None: + checks = ["(%s != Py_None)" % self.arg.py_result()] if self.arg.may_be_none() else [] + checks.append("(%s(%s) != 0)" % (test_func, self.arg.py_result())) + code.putln("%s = %s;" % (self.result(), '&&'.join(checks))) + else: + code.putln( + "%s = __Pyx_PyObject_IsTrue(%s); %s" % ( + self.result(), + self.arg.py_result(), + code.error_goto_if_neg(self.result(), self.pos))) + + +class CoerceToComplexNode(CoercionNode): + + def __init__(self, arg, dst_type, env): + if arg.type.is_complex: + arg = arg.coerce_to_simple(env) + self.type = dst_type + CoercionNode.__init__(self, arg) + dst_type.create_declaration_utility_code(env) + + def calculate_result_code(self): + if self.arg.type.is_complex: + real_part = "__Pyx_CREAL(%s)" % self.arg.result() + imag_part = "__Pyx_CIMAG(%s)" % self.arg.result() + else: + real_part = self.arg.result() + imag_part = "0" + return "%s(%s, %s)" % ( + self.type.from_parts, + real_part, + imag_part) + + def generate_result_code(self, code): + pass + +class CoerceToTempNode(CoercionNode): + # This node is used to force the result of another node + # to be stored in a temporary. It is only used if the + # argument node's result is not already in a temporary. + + def __init__(self, arg, env): + CoercionNode.__init__(self, arg) + self.type = self.arg.type.as_argument_type() + self.constant_result = self.arg.constant_result + self.is_temp = 1 + if self.type.is_pyobject: + self.result_ctype = py_object_type + + gil_message = "Creating temporary Python reference" + + def analyse_types(self, env): + # The arg is always already analysed + return self + + def coerce_to_boolean(self, env): + self.arg = self.arg.coerce_to_boolean(env) + if self.arg.is_simple(): + return self.arg + self.type = self.arg.type + self.result_ctype = self.type + return self + + def generate_result_code(self, code): + #self.arg.generate_evaluation_code(code) # Already done + # by generic generate_subexpr_evaluation_code! + code.putln("%s = %s;" % ( + self.result(), self.arg.result_as(self.ctype()))) + if self.use_managed_ref: + if self.type.is_pyobject: + code.put_incref(self.result(), self.ctype()) + elif self.type.is_memoryviewslice: + code.put_incref_memoryviewslice(self.result(), + not self.in_nogil_context) + +class ProxyNode(CoercionNode): + """ + A node that should not be replaced by transforms or other means, + and hence can be useful to wrap the argument to a clone node + + MyNode -> ProxyNode -> ArgNode + CloneNode -^ + """ + + nogil_check = None + + def __init__(self, arg): + super(ProxyNode, self).__init__(arg) + self.constant_result = arg.constant_result + self._proxy_type() + + def analyse_types(self, env): + self.arg = self.arg.analyse_expressions(env) + self._proxy_type() + return self + + def infer_type(self, env): + return self.arg.infer_type(env) + + def _proxy_type(self): + if hasattr(self.arg, 'type'): + self.type = self.arg.type + self.result_ctype = self.arg.result_ctype + if hasattr(self.arg, 'entry'): + self.entry = self.arg.entry + + def generate_result_code(self, code): + self.arg.generate_result_code(code) + + def result(self): + return self.arg.result() + + def is_simple(self): + return self.arg.is_simple() + + def may_be_none(self): + return self.arg.may_be_none() + + def generate_evaluation_code(self, code): + self.arg.generate_evaluation_code(code) + + def generate_disposal_code(self, code): + self.arg.generate_disposal_code(code) + + def free_temps(self, code): + self.arg.free_temps(code) + +class CloneNode(CoercionNode): + # This node is employed when the result of another node needs + # to be used multiple times. The argument node's result must + # be in a temporary. This node "borrows" the result from the + # argument node, and does not generate any evaluation or + # disposal code for it. The original owner of the argument + # node is responsible for doing those things. + + subexprs = [] # Arg is not considered a subexpr + nogil_check = None + + def __init__(self, arg): + CoercionNode.__init__(self, arg) + self.constant_result = arg.constant_result + if hasattr(arg, 'type'): + self.type = arg.type + self.result_ctype = arg.result_ctype + if hasattr(arg, 'entry'): + self.entry = arg.entry + + def result(self): + return self.arg.result() + + def may_be_none(self): + return self.arg.may_be_none() + + def type_dependencies(self, env): + return self.arg.type_dependencies(env) + + def infer_type(self, env): + return self.arg.infer_type(env) + + def analyse_types(self, env): + self.type = self.arg.type + self.result_ctype = self.arg.result_ctype + self.is_temp = 1 + if hasattr(self.arg, 'entry'): + self.entry = self.arg.entry + return self + + def coerce_to(self, dest_type, env): + if self.arg.is_literal: + return self.arg.coerce_to(dest_type, env) + return super(CloneNode, self).coerce_to(dest_type, env) + + def is_simple(self): + return True # result is always in a temp (or a name) + + def generate_evaluation_code(self, code): + pass + + def generate_result_code(self, code): + pass + + def generate_disposal_code(self, code): + pass + + def free_temps(self, code): + pass + + +class CMethodSelfCloneNode(CloneNode): + # Special CloneNode for the self argument of builtin C methods + # that accepts subtypes of the builtin type. This is safe only + # for 'final' subtypes, as subtypes of the declared type may + # override the C method. + + def coerce_to(self, dst_type, env): + if dst_type.is_builtin_type and self.type.subtype_of(dst_type): + return self + return CloneNode.coerce_to(self, dst_type, env) + + +class ModuleRefNode(ExprNode): + # Simple returns the module object + + type = py_object_type + is_temp = False + subexprs = [] + + def analyse_types(self, env): + return self + + def may_be_none(self): + return False + + def calculate_result_code(self): + return Naming.module_cname + + def generate_result_code(self, code): + pass + +class DocstringRefNode(ExprNode): + # Extracts the docstring of the body element + + subexprs = ['body'] + type = py_object_type + is_temp = True + + def __init__(self, pos, body): + ExprNode.__init__(self, pos) + assert body.type.is_pyobject + self.body = body + + def analyse_types(self, env): + return self + + def generate_result_code(self, code): + code.putln('%s = __Pyx_GetAttr(%s, %s); %s' % ( + self.result(), self.body.result(), + code.intern_identifier(StringEncoding.EncodedString("__doc__")), + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.result()) + + + +#------------------------------------------------------------------------------------ +# +# Runtime support code +# +#------------------------------------------------------------------------------------ + +pyerr_occurred_withgil_utility_code= UtilityCode( +proto = """ +static CYTHON_INLINE int __Pyx_ErrOccurredWithGIL(void); /* proto */ +""", +impl = """ +static CYTHON_INLINE int __Pyx_ErrOccurredWithGIL(void) { + int err; + #ifdef WITH_THREAD + PyGILState_STATE _save = PyGILState_Ensure(); + #endif + err = !!PyErr_Occurred(); + #ifdef WITH_THREAD + PyGILState_Release(_save); + #endif + return err; +} +""" +) + +#------------------------------------------------------------------------------------ + +raise_unbound_local_error_utility_code = UtilityCode( +proto = """ +static CYTHON_INLINE void __Pyx_RaiseUnboundLocalError(const char *varname); +""", +impl = """ +static CYTHON_INLINE void __Pyx_RaiseUnboundLocalError(const char *varname) { + PyErr_Format(PyExc_UnboundLocalError, "local variable '%s' referenced before assignment", varname); +} +""") + +raise_closure_name_error_utility_code = UtilityCode( +proto = """ +static CYTHON_INLINE void __Pyx_RaiseClosureNameError(const char *varname); +""", +impl = """ +static CYTHON_INLINE void __Pyx_RaiseClosureNameError(const char *varname) { + PyErr_Format(PyExc_NameError, "free variable '%s' referenced before assignment in enclosing scope", varname); +} +""") + +# Don't inline the function, it should really never be called in production +raise_unbound_memoryview_utility_code_nogil = UtilityCode( +proto = """ +static void __Pyx_RaiseUnboundMemoryviewSliceNogil(const char *varname); +""", +impl = """ +static void __Pyx_RaiseUnboundMemoryviewSliceNogil(const char *varname) { + #ifdef WITH_THREAD + PyGILState_STATE gilstate = PyGILState_Ensure(); + #endif + __Pyx_RaiseUnboundLocalError(varname); + #ifdef WITH_THREAD + PyGILState_Release(gilstate); + #endif +} +""", +requires = [raise_unbound_local_error_utility_code]) + +#------------------------------------------------------------------------------------ + +raise_too_many_values_to_unpack = UtilityCode.load_cached("RaiseTooManyValuesToUnpack", "ObjectHandling.c") +raise_need_more_values_to_unpack = UtilityCode.load_cached("RaiseNeedMoreValuesToUnpack", "ObjectHandling.c") +tuple_unpacking_error_code = UtilityCode.load_cached("UnpackTupleError", "ObjectHandling.c") diff --git a/venv/lib/python3.8/site-packages/Cython/Compiler/FlowControl.cpython-38-x86_64-linux-gnu.so b/venv/lib/python3.8/site-packages/Cython/Compiler/FlowControl.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 0000000..c6c1035 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Compiler/FlowControl.cpython-38-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.8/site-packages/Cython/Compiler/FlowControl.pxd b/venv/lib/python3.8/site-packages/Cython/Compiler/FlowControl.pxd new file mode 100644 index 0000000..c87370b --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Compiler/FlowControl.pxd @@ -0,0 +1,111 @@ +from __future__ import absolute_import + +cimport cython + +from .Visitor cimport CythonTransform, TreeVisitor + +cdef class ControlBlock: + cdef public set children + cdef public set parents + cdef public set positions + cdef public list stats + cdef public dict gen + cdef public set bounded + + # Big integer bitsets + cdef public object i_input + cdef public object i_output + cdef public object i_gen + cdef public object i_kill + cdef public object i_state + + cpdef bint empty(self) + cpdef detach(self) + cpdef add_child(self, block) + +cdef class ExitBlock(ControlBlock): + cpdef bint empty(self) + +cdef class NameAssignment: + cdef public bint is_arg + cdef public bint is_deletion + cdef public object lhs + cdef public object rhs + cdef public object entry + cdef public object pos + cdef public set refs + cdef public object bit + cdef public object inferred_type + +cdef class AssignmentList: + cdef public object bit + cdef public object mask + cdef public list stats + +cdef class AssignmentCollector(TreeVisitor): + cdef list assignments + +@cython.final +cdef class ControlFlow: + cdef public set blocks + cdef public set entries + cdef public list loops + cdef public list exceptions + + cdef public ControlBlock entry_point + cdef public ExitBlock exit_point + cdef public ControlBlock block + + cdef public dict assmts + + cpdef newblock(self, ControlBlock parent=*) + cpdef nextblock(self, ControlBlock parent=*) + cpdef bint is_tracked(self, entry) + cpdef bint is_statically_assigned(self, entry) + cpdef mark_position(self, node) + cpdef mark_assignment(self, lhs, rhs, entry) + cpdef mark_argument(self, lhs, rhs, entry) + cpdef mark_deletion(self, node, entry) + cpdef mark_reference(self, node, entry) + + @cython.locals(block=ControlBlock, parent=ControlBlock, unreachable=set) + cpdef normalize(self) + + @cython.locals(bit=object, assmts=AssignmentList, + block=ControlBlock) + cpdef initialize(self) + + @cython.locals(assmts=AssignmentList, assmt=NameAssignment) + cpdef set map_one(self, istate, entry) + + @cython.locals(block=ControlBlock, parent=ControlBlock) + cdef reaching_definitions(self) + +cdef class Uninitialized: + pass + +cdef class Unknown: + pass + + +cdef class MessageCollection: + cdef set messages + + +@cython.locals(dirty=bint, block=ControlBlock, parent=ControlBlock, + assmt=NameAssignment) +cdef check_definitions(ControlFlow flow, dict compiler_directives) + +@cython.final +cdef class ControlFlowAnalysis(CythonTransform): + cdef object gv_ctx + cdef object constant_folder + cdef set reductions + cdef list env_stack + cdef list stack + cdef object env + cdef ControlFlow flow + cdef bint in_inplace_assignment + + cpdef mark_assignment(self, lhs, rhs=*) + cpdef mark_position(self, node) diff --git a/venv/lib/python3.8/site-packages/Cython/Compiler/FlowControl.py b/venv/lib/python3.8/site-packages/Cython/Compiler/FlowControl.py new file mode 100644 index 0000000..b4a9356 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Compiler/FlowControl.py @@ -0,0 +1,1313 @@ +from __future__ import absolute_import + +import cython +cython.declare(PyrexTypes=object, ExprNodes=object, Nodes=object, + Builtin=object, InternalError=object, error=object, warning=object, + py_object_type=object, unspecified_type=object, + object_expr=object, fake_rhs_expr=object, TypedExprNode=object) + +from . import Builtin +from . import ExprNodes +from . import Nodes +from . import Options +from .PyrexTypes import py_object_type, unspecified_type +from . import PyrexTypes + +from .Visitor import TreeVisitor, CythonTransform +from .Errors import error, warning, InternalError +from .Optimize import ConstantFolding + + +class TypedExprNode(ExprNodes.ExprNode): + # Used for declaring assignments of a specified type without a known entry. + def __init__(self, type, may_be_none=None, pos=None): + super(TypedExprNode, self).__init__(pos) + self.type = type + self._may_be_none = may_be_none + + def may_be_none(self): + return self._may_be_none != False + +object_expr = TypedExprNode(py_object_type, may_be_none=True) +# Fake rhs to silence "unused variable" warning +fake_rhs_expr = TypedExprNode(unspecified_type) + + +class ControlBlock(object): + """Control flow graph node. Sequence of assignments and name references. + + children set of children nodes + parents set of parent nodes + positions set of position markers + + stats list of block statements + gen dict of assignments generated by this block + bounded set of entries that are definitely bounded in this block + + Example: + + a = 1 + b = a + c # 'c' is already bounded or exception here + + stats = [Assignment(a), NameReference(a), NameReference(c), + Assignment(b)] + gen = {Entry(a): Assignment(a), Entry(b): Assignment(b)} + bounded = set([Entry(a), Entry(c)]) + + """ + + def __init__(self): + self.children = set() + self.parents = set() + self.positions = set() + + self.stats = [] + self.gen = {} + self.bounded = set() + + self.i_input = 0 + self.i_output = 0 + self.i_gen = 0 + self.i_kill = 0 + self.i_state = 0 + + def empty(self): + return (not self.stats and not self.positions) + + def detach(self): + """Detach block from parents and children.""" + for child in self.children: + child.parents.remove(self) + for parent in self.parents: + parent.children.remove(self) + self.parents.clear() + self.children.clear() + + def add_child(self, block): + self.children.add(block) + block.parents.add(self) + + +class ExitBlock(ControlBlock): + """Non-empty exit point block.""" + + def empty(self): + return False + + +class AssignmentList(object): + def __init__(self): + self.stats = [] + + +class ControlFlow(object): + """Control-flow graph. + + entry_point ControlBlock entry point for this graph + exit_point ControlBlock normal exit point + block ControlBlock current block + blocks set children nodes + entries set tracked entries + loops list stack for loop descriptors + exceptions list stack for exception descriptors + """ + + def __init__(self): + self.blocks = set() + self.entries = set() + self.loops = [] + self.exceptions = [] + + self.entry_point = ControlBlock() + self.exit_point = ExitBlock() + self.blocks.add(self.exit_point) + self.block = self.entry_point + + def newblock(self, parent=None): + """Create floating block linked to `parent` if given. + + NOTE: Block is NOT added to self.blocks + """ + block = ControlBlock() + self.blocks.add(block) + if parent: + parent.add_child(block) + return block + + def nextblock(self, parent=None): + """Create block children block linked to current or `parent` if given. + + NOTE: Block is added to self.blocks + """ + block = ControlBlock() + self.blocks.add(block) + if parent: + parent.add_child(block) + elif self.block: + self.block.add_child(block) + self.block = block + return self.block + + def is_tracked(self, entry): + if entry.is_anonymous: + return False + return (entry.is_local or entry.is_pyclass_attr or entry.is_arg or + entry.from_closure or entry.in_closure or + entry.error_on_uninitialized) + + def is_statically_assigned(self, entry): + if (entry.is_local and entry.is_variable and + (entry.type.is_struct_or_union or + entry.type.is_complex or + entry.type.is_array or + entry.type.is_cpp_class)): + # stack allocated structured variable => never uninitialised + return True + return False + + def mark_position(self, node): + """Mark position, will be used to draw graph nodes.""" + if self.block: + self.block.positions.add(node.pos[:2]) + + def mark_assignment(self, lhs, rhs, entry): + if self.block and self.is_tracked(entry): + assignment = NameAssignment(lhs, rhs, entry) + self.block.stats.append(assignment) + self.block.gen[entry] = assignment + self.entries.add(entry) + + def mark_argument(self, lhs, rhs, entry): + if self.block and self.is_tracked(entry): + assignment = Argument(lhs, rhs, entry) + self.block.stats.append(assignment) + self.block.gen[entry] = assignment + self.entries.add(entry) + + def mark_deletion(self, node, entry): + if self.block and self.is_tracked(entry): + assignment = NameDeletion(node, entry) + self.block.stats.append(assignment) + self.block.gen[entry] = Uninitialized + self.entries.add(entry) + + def mark_reference(self, node, entry): + if self.block and self.is_tracked(entry): + self.block.stats.append(NameReference(node, entry)) + ## XXX: We don't track expression evaluation order so we can't use + ## XXX: successful reference as initialization sign. + ## # Local variable is definitely bound after this reference + ## if not node.allow_null: + ## self.block.bounded.add(entry) + self.entries.add(entry) + + def normalize(self): + """Delete unreachable and orphan blocks.""" + queue = set([self.entry_point]) + visited = set() + while queue: + root = queue.pop() + visited.add(root) + for child in root.children: + if child not in visited: + queue.add(child) + unreachable = self.blocks - visited + for block in unreachable: + block.detach() + visited.remove(self.entry_point) + for block in visited: + if block.empty(): + for parent in block.parents: # Re-parent + for child in block.children: + parent.add_child(child) + block.detach() + unreachable.add(block) + self.blocks -= unreachable + + def initialize(self): + """Set initial state, map assignments to bits.""" + self.assmts = {} + + bit = 1 + for entry in self.entries: + assmts = AssignmentList() + assmts.mask = assmts.bit = bit + self.assmts[entry] = assmts + bit <<= 1 + + for block in self.blocks: + for stat in block.stats: + if isinstance(stat, NameAssignment): + stat.bit = bit + assmts = self.assmts[stat.entry] + assmts.stats.append(stat) + assmts.mask |= bit + bit <<= 1 + + for block in self.blocks: + for entry, stat in block.gen.items(): + assmts = self.assmts[entry] + if stat is Uninitialized: + block.i_gen |= assmts.bit + else: + block.i_gen |= stat.bit + block.i_kill |= assmts.mask + block.i_output = block.i_gen + for entry in block.bounded: + block.i_kill |= self.assmts[entry].bit + + for assmts in self.assmts.values(): + self.entry_point.i_gen |= assmts.bit + self.entry_point.i_output = self.entry_point.i_gen + + def map_one(self, istate, entry): + ret = set() + assmts = self.assmts[entry] + if istate & assmts.bit: + if self.is_statically_assigned(entry): + ret.add(StaticAssignment(entry)) + elif entry.from_closure: + ret.add(Unknown) + else: + ret.add(Uninitialized) + for assmt in assmts.stats: + if istate & assmt.bit: + ret.add(assmt) + return ret + + def reaching_definitions(self): + """Per-block reaching definitions analysis.""" + dirty = True + while dirty: + dirty = False + for block in self.blocks: + i_input = 0 + for parent in block.parents: + i_input |= parent.i_output + i_output = (i_input & ~block.i_kill) | block.i_gen + if i_output != block.i_output: + dirty = True + block.i_input = i_input + block.i_output = i_output + + +class LoopDescr(object): + def __init__(self, next_block, loop_block): + self.next_block = next_block + self.loop_block = loop_block + self.exceptions = [] + + +class ExceptionDescr(object): + """Exception handling helper. + + entry_point ControlBlock Exception handling entry point + finally_enter ControlBlock Normal finally clause entry point + finally_exit ControlBlock Normal finally clause exit point + """ + + def __init__(self, entry_point, finally_enter=None, finally_exit=None): + self.entry_point = entry_point + self.finally_enter = finally_enter + self.finally_exit = finally_exit + + +class NameAssignment(object): + def __init__(self, lhs, rhs, entry): + if lhs.cf_state is None: + lhs.cf_state = set() + self.lhs = lhs + self.rhs = rhs + self.entry = entry + self.pos = lhs.pos + self.refs = set() + self.is_arg = False + self.is_deletion = False + self.inferred_type = None + + def __repr__(self): + return '%s(entry=%r)' % (self.__class__.__name__, self.entry) + + def infer_type(self): + self.inferred_type = self.rhs.infer_type(self.entry.scope) + return self.inferred_type + + def type_dependencies(self): + return self.rhs.type_dependencies(self.entry.scope) + + @property + def type(self): + if not self.entry.type.is_unspecified: + return self.entry.type + return self.inferred_type + + +class StaticAssignment(NameAssignment): + """Initialised at declaration time, e.g. stack allocation.""" + def __init__(self, entry): + if not entry.type.is_pyobject: + may_be_none = False + else: + may_be_none = None # unknown + lhs = TypedExprNode( + entry.type, may_be_none=may_be_none, pos=entry.pos) + super(StaticAssignment, self).__init__(lhs, lhs, entry) + + def infer_type(self): + return self.entry.type + + def type_dependencies(self): + return () + + +class Argument(NameAssignment): + def __init__(self, lhs, rhs, entry): + NameAssignment.__init__(self, lhs, rhs, entry) + self.is_arg = True + + +class NameDeletion(NameAssignment): + def __init__(self, lhs, entry): + NameAssignment.__init__(self, lhs, lhs, entry) + self.is_deletion = True + + def infer_type(self): + inferred_type = self.rhs.infer_type(self.entry.scope) + if (not inferred_type.is_pyobject and + inferred_type.can_coerce_to_pyobject(self.entry.scope)): + return py_object_type + self.inferred_type = inferred_type + return inferred_type + + +class Uninitialized(object): + """Definitely not initialised yet.""" + + +class Unknown(object): + """Coming from outer closure, might be initialised or not.""" + + +class NameReference(object): + def __init__(self, node, entry): + if node.cf_state is None: + node.cf_state = set() + self.node = node + self.entry = entry + self.pos = node.pos + + def __repr__(self): + return '%s(entry=%r)' % (self.__class__.__name__, self.entry) + + +class ControlFlowState(list): + # Keeps track of Node's entry assignments + # + # cf_is_null [boolean] It is uninitialized + # cf_maybe_null [boolean] May be uninitialized + # is_single [boolean] Has only one assignment at this point + + cf_maybe_null = False + cf_is_null = False + is_single = False + + def __init__(self, state): + if Uninitialized in state: + state.discard(Uninitialized) + self.cf_maybe_null = True + if not state: + self.cf_is_null = True + elif Unknown in state: + state.discard(Unknown) + self.cf_maybe_null = True + else: + if len(state) == 1: + self.is_single = True + # XXX: Remove fake_rhs_expr + super(ControlFlowState, self).__init__( + [i for i in state if i.rhs is not fake_rhs_expr]) + + def one(self): + return self[0] + + +class GVContext(object): + """Graphviz subgraph object.""" + + def __init__(self): + self.blockids = {} + self.nextid = 0 + self.children = [] + self.sources = {} + + def add(self, child): + self.children.append(child) + + def nodeid(self, block): + if block not in self.blockids: + self.blockids[block] = 'block%d' % self.nextid + self.nextid += 1 + return self.blockids[block] + + def extract_sources(self, block): + if not block.positions: + return '' + start = min(block.positions) + stop = max(block.positions) + srcdescr = start[0] + if not srcdescr in self.sources: + self.sources[srcdescr] = list(srcdescr.get_lines()) + lines = self.sources[srcdescr] + return '\\n'.join([l.strip() for l in lines[start[1] - 1:stop[1]]]) + + def render(self, fp, name, annotate_defs=False): + """Render graphviz dot graph""" + fp.write('digraph %s {\n' % name) + fp.write(' node [shape=box];\n') + for child in self.children: + child.render(fp, self, annotate_defs) + fp.write('}\n') + + def escape(self, text): + return text.replace('"', '\\"').replace('\n', '\\n') + + +class GV(object): + """Graphviz DOT renderer.""" + + def __init__(self, name, flow): + self.name = name + self.flow = flow + + def render(self, fp, ctx, annotate_defs=False): + fp.write(' subgraph %s {\n' % self.name) + for block in self.flow.blocks: + label = ctx.extract_sources(block) + if annotate_defs: + for stat in block.stats: + if isinstance(stat, NameAssignment): + label += '\n %s [%s %s]' % ( + stat.entry.name, 'deletion' if stat.is_deletion else 'definition', stat.pos[1]) + elif isinstance(stat, NameReference): + if stat.entry: + label += '\n %s [reference %s]' % (stat.entry.name, stat.pos[1]) + if not label: + label = 'empty' + pid = ctx.nodeid(block) + fp.write(' %s [label="%s"];\n' % (pid, ctx.escape(label))) + for block in self.flow.blocks: + pid = ctx.nodeid(block) + for child in block.children: + fp.write(' %s -> %s;\n' % (pid, ctx.nodeid(child))) + fp.write(' }\n') + + +class MessageCollection(object): + """Collect error/warnings messages first then sort""" + def __init__(self): + self.messages = set() + + def error(self, pos, message): + self.messages.add((pos, True, message)) + + def warning(self, pos, message): + self.messages.add((pos, False, message)) + + def report(self): + for pos, is_error, message in sorted(self.messages): + if is_error: + error(pos, message) + else: + warning(pos, message, 2) + + +def check_definitions(flow, compiler_directives): + flow.initialize() + flow.reaching_definitions() + + # Track down state + assignments = set() + # Node to entry map + references = {} + assmt_nodes = set() + + for block in flow.blocks: + i_state = block.i_input + for stat in block.stats: + i_assmts = flow.assmts[stat.entry] + state = flow.map_one(i_state, stat.entry) + if isinstance(stat, NameAssignment): + stat.lhs.cf_state.update(state) + assmt_nodes.add(stat.lhs) + i_state = i_state & ~i_assmts.mask + if stat.is_deletion: + i_state |= i_assmts.bit + else: + i_state |= stat.bit + assignments.add(stat) + if stat.rhs is not fake_rhs_expr: + stat.entry.cf_assignments.append(stat) + elif isinstance(stat, NameReference): + references[stat.node] = stat.entry + stat.entry.cf_references.append(stat) + stat.node.cf_state.update(state) + ## if not stat.node.allow_null: + ## i_state &= ~i_assmts.bit + ## # after successful read, the state is known to be initialised + state.discard(Uninitialized) + state.discard(Unknown) + for assmt in state: + assmt.refs.add(stat) + + # Check variable usage + warn_maybe_uninitialized = compiler_directives['warn.maybe_uninitialized'] + warn_unused_result = compiler_directives['warn.unused_result'] + warn_unused = compiler_directives['warn.unused'] + warn_unused_arg = compiler_directives['warn.unused_arg'] + + messages = MessageCollection() + + # assignment hints + for node in assmt_nodes: + if Uninitialized in node.cf_state: + node.cf_maybe_null = True + if len(node.cf_state) == 1: + node.cf_is_null = True + else: + node.cf_is_null = False + elif Unknown in node.cf_state: + node.cf_maybe_null = True + else: + node.cf_is_null = False + node.cf_maybe_null = False + + # Find uninitialized references and cf-hints + for node, entry in references.items(): + if Uninitialized in node.cf_state: + node.cf_maybe_null = True + if not entry.from_closure and len(node.cf_state) == 1: + node.cf_is_null = True + if (node.allow_null or entry.from_closure + or entry.is_pyclass_attr or entry.type.is_error): + pass # Can be uninitialized here + elif node.cf_is_null: + if entry.error_on_uninitialized or ( + Options.error_on_uninitialized and ( + entry.type.is_pyobject or entry.type.is_unspecified)): + messages.error( + node.pos, + "local variable '%s' referenced before assignment" + % entry.name) + else: + messages.warning( + node.pos, + "local variable '%s' referenced before assignment" + % entry.name) + elif warn_maybe_uninitialized: + messages.warning( + node.pos, + "local variable '%s' might be referenced before assignment" + % entry.name) + elif Unknown in node.cf_state: + # TODO: better cross-closure analysis to know when inner functions + # are being called before a variable is being set, and when + # a variable is known to be set before even defining the + # inner function, etc. + node.cf_maybe_null = True + else: + node.cf_is_null = False + node.cf_maybe_null = False + + # Unused result + for assmt in assignments: + if (not assmt.refs and not assmt.entry.is_pyclass_attr + and not assmt.entry.in_closure): + if assmt.entry.cf_references and warn_unused_result: + if assmt.is_arg: + messages.warning(assmt.pos, "Unused argument value '%s'" % + assmt.entry.name) + else: + messages.warning(assmt.pos, "Unused result in '%s'" % + assmt.entry.name) + assmt.lhs.cf_used = False + + # Unused entries + for entry in flow.entries: + if (not entry.cf_references + and not entry.is_pyclass_attr): + if entry.name != '_' and not entry.name.startswith('unused'): + # '_' is often used for unused variables, e.g. in loops + if entry.is_arg: + if warn_unused_arg: + messages.warning(entry.pos, "Unused argument '%s'" % + entry.name) + else: + if warn_unused: + messages.warning(entry.pos, "Unused entry '%s'" % + entry.name) + entry.cf_used = False + + messages.report() + + for node in assmt_nodes: + node.cf_state = ControlFlowState(node.cf_state) + for node in references: + node.cf_state = ControlFlowState(node.cf_state) + + +class AssignmentCollector(TreeVisitor): + def __init__(self): + super(AssignmentCollector, self).__init__() + self.assignments = [] + + def visit_Node(self): + self._visitchildren(self, None) + + def visit_SingleAssignmentNode(self, node): + self.assignments.append((node.lhs, node.rhs)) + + def visit_CascadedAssignmentNode(self, node): + for lhs in node.lhs_list: + self.assignments.append((lhs, node.rhs)) + + +class ControlFlowAnalysis(CythonTransform): + + def visit_ModuleNode(self, node): + self.gv_ctx = GVContext() + self.constant_folder = ConstantFolding() + + # Set of NameNode reductions + self.reductions = set() + + self.in_inplace_assignment = False + self.env_stack = [] + self.env = node.scope + self.stack = [] + self.flow = ControlFlow() + self.visitchildren(node) + + check_definitions(self.flow, self.current_directives) + + dot_output = self.current_directives['control_flow.dot_output'] + if dot_output: + annotate_defs = self.current_directives['control_flow.dot_annotate_defs'] + fp = open(dot_output, 'wt') + try: + self.gv_ctx.render(fp, 'module', annotate_defs=annotate_defs) + finally: + fp.close() + return node + + def visit_FuncDefNode(self, node): + for arg in node.args: + if arg.default: + self.visitchildren(arg) + self.visitchildren(node, ('decorators',)) + self.env_stack.append(self.env) + self.env = node.local_scope + self.stack.append(self.flow) + self.flow = ControlFlow() + + # Collect all entries + for entry in node.local_scope.entries.values(): + if self.flow.is_tracked(entry): + self.flow.entries.add(entry) + + self.mark_position(node) + # Function body block + self.flow.nextblock() + + for arg in node.args: + self._visit(arg) + if node.star_arg: + self.flow.mark_argument(node.star_arg, + TypedExprNode(Builtin.tuple_type, + may_be_none=False), + node.star_arg.entry) + if node.starstar_arg: + self.flow.mark_argument(node.starstar_arg, + TypedExprNode(Builtin.dict_type, + may_be_none=False), + node.starstar_arg.entry) + self._visit(node.body) + # Workaround for generators + if node.is_generator: + self._visit(node.gbody.body) + + # Exit point + if self.flow.block: + self.flow.block.add_child(self.flow.exit_point) + + # Cleanup graph + self.flow.normalize() + check_definitions(self.flow, self.current_directives) + self.flow.blocks.add(self.flow.entry_point) + + self.gv_ctx.add(GV(node.local_scope.name, self.flow)) + + self.flow = self.stack.pop() + self.env = self.env_stack.pop() + return node + + def visit_DefNode(self, node): + node.used = True + return self.visit_FuncDefNode(node) + + def visit_GeneratorBodyDefNode(self, node): + return node + + def visit_CTypeDefNode(self, node): + return node + + def mark_assignment(self, lhs, rhs=None): + if not self.flow.block: + return + if self.flow.exceptions: + exc_descr = self.flow.exceptions[-1] + self.flow.block.add_child(exc_descr.entry_point) + self.flow.nextblock() + + if not rhs: + rhs = object_expr + if lhs.is_name: + if lhs.entry is not None: + entry = lhs.entry + else: + entry = self.env.lookup(lhs.name) + if entry is None: # TODO: This shouldn't happen... + return + self.flow.mark_assignment(lhs, rhs, entry) + elif lhs.is_sequence_constructor: + for i, arg in enumerate(lhs.args): + if not rhs or arg.is_starred: + item_node = None + else: + item_node = rhs.inferable_item_node(i) + self.mark_assignment(arg, item_node) + else: + self._visit(lhs) + + if self.flow.exceptions: + exc_descr = self.flow.exceptions[-1] + self.flow.block.add_child(exc_descr.entry_point) + self.flow.nextblock() + + def mark_position(self, node): + """Mark position if DOT output is enabled.""" + if self.current_directives['control_flow.dot_output']: + self.flow.mark_position(node) + + def visit_FromImportStatNode(self, node): + for name, target in node.items: + if name != "*": + self.mark_assignment(target) + self.visitchildren(node) + return node + + def visit_AssignmentNode(self, node): + raise InternalError("Unhandled assignment node") + + def visit_SingleAssignmentNode(self, node): + self._visit(node.rhs) + self.mark_assignment(node.lhs, node.rhs) + return node + + def visit_CascadedAssignmentNode(self, node): + self._visit(node.rhs) + for lhs in node.lhs_list: + self.mark_assignment(lhs, node.rhs) + return node + + def visit_ParallelAssignmentNode(self, node): + collector = AssignmentCollector() + collector.visitchildren(node) + for lhs, rhs in collector.assignments: + self._visit(rhs) + for lhs, rhs in collector.assignments: + self.mark_assignment(lhs, rhs) + return node + + def visit_InPlaceAssignmentNode(self, node): + self.in_inplace_assignment = True + self.visitchildren(node) + self.in_inplace_assignment = False + self.mark_assignment(node.lhs, self.constant_folder(node.create_binop_node())) + return node + + def visit_DelStatNode(self, node): + for arg in node.args: + if arg.is_name: + entry = arg.entry or self.env.lookup(arg.name) + if entry.in_closure or entry.from_closure: + error(arg.pos, + "can not delete variable '%s' " + "referenced in nested scope" % entry.name) + if not node.ignore_nonexisting: + self._visit(arg) # mark reference + self.flow.mark_deletion(arg, entry) + else: + self._visit(arg) + return node + + def visit_CArgDeclNode(self, node): + entry = self.env.lookup(node.name) + if entry: + may_be_none = not node.not_none + self.flow.mark_argument( + node, TypedExprNode(entry.type, may_be_none), entry) + return node + + def visit_NameNode(self, node): + if self.flow.block: + entry = node.entry or self.env.lookup(node.name) + if entry: + self.flow.mark_reference(node, entry) + + if entry in self.reductions and not self.in_inplace_assignment: + error(node.pos, + "Cannot read reduction variable in loop body") + + return node + + def visit_StatListNode(self, node): + if self.flow.block: + for stat in node.stats: + self._visit(stat) + if not self.flow.block: + stat.is_terminator = True + break + return node + + def visit_Node(self, node): + self.visitchildren(node) + self.mark_position(node) + return node + + def visit_IfStatNode(self, node): + next_block = self.flow.newblock() + parent = self.flow.block + # If clauses + for clause in node.if_clauses: + parent = self.flow.nextblock(parent) + self._visit(clause.condition) + self.flow.nextblock() + self._visit(clause.body) + if self.flow.block: + self.flow.block.add_child(next_block) + # Else clause + if node.else_clause: + self.flow.nextblock(parent=parent) + self._visit(node.else_clause) + if self.flow.block: + self.flow.block.add_child(next_block) + else: + parent.add_child(next_block) + + if next_block.parents: + self.flow.block = next_block + else: + self.flow.block = None + return node + + def visit_WhileStatNode(self, node): + condition_block = self.flow.nextblock() + next_block = self.flow.newblock() + # Condition block + self.flow.loops.append(LoopDescr(next_block, condition_block)) + if node.condition: + self._visit(node.condition) + # Body block + self.flow.nextblock() + self._visit(node.body) + self.flow.loops.pop() + # Loop it + if self.flow.block: + self.flow.block.add_child(condition_block) + self.flow.block.add_child(next_block) + # Else clause + if node.else_clause: + self.flow.nextblock(parent=condition_block) + self._visit(node.else_clause) + if self.flow.block: + self.flow.block.add_child(next_block) + else: + condition_block.add_child(next_block) + + if next_block.parents: + self.flow.block = next_block + else: + self.flow.block = None + return node + + def mark_forloop_target(self, node): + # TODO: Remove redundancy with range optimization... + is_special = False + sequence = node.iterator.sequence + target = node.target + if isinstance(sequence, ExprNodes.SimpleCallNode): + function = sequence.function + if sequence.self is None and function.is_name: + entry = self.env.lookup(function.name) + if not entry or entry.is_builtin: + if function.name == 'reversed' and len(sequence.args) == 1: + sequence = sequence.args[0] + elif function.name == 'enumerate' and len(sequence.args) == 1: + if target.is_sequence_constructor and len(target.args) == 2: + iterator = sequence.args[0] + if iterator.is_name: + iterator_type = iterator.infer_type(self.env) + if iterator_type.is_builtin_type: + # assume that builtin types have a length within Py_ssize_t + self.mark_assignment( + target.args[0], + ExprNodes.IntNode(target.pos, value='PY_SSIZE_T_MAX', + type=PyrexTypes.c_py_ssize_t_type)) + target = target.args[1] + sequence = sequence.args[0] + if isinstance(sequence, ExprNodes.SimpleCallNode): + function = sequence.function + if sequence.self is None and function.is_name: + entry = self.env.lookup(function.name) + if not entry or entry.is_builtin: + if function.name in ('range', 'xrange'): + is_special = True + for arg in sequence.args[:2]: + self.mark_assignment(target, arg) + if len(sequence.args) > 2: + self.mark_assignment(target, self.constant_folder( + ExprNodes.binop_node(node.pos, + '+', + sequence.args[0], + sequence.args[2]))) + + if not is_special: + # A for-loop basically translates to subsequent calls to + # __getitem__(), so using an IndexNode here allows us to + # naturally infer the base type of pointers, C arrays, + # Python strings, etc., while correctly falling back to an + # object type when the base type cannot be handled. + + self.mark_assignment(target, node.item) + + def visit_AsyncForStatNode(self, node): + return self.visit_ForInStatNode(node) + + def visit_ForInStatNode(self, node): + condition_block = self.flow.nextblock() + next_block = self.flow.newblock() + # Condition with iterator + self.flow.loops.append(LoopDescr(next_block, condition_block)) + self._visit(node.iterator) + # Target assignment + self.flow.nextblock() + + if isinstance(node, Nodes.ForInStatNode): + self.mark_forloop_target(node) + elif isinstance(node, Nodes.AsyncForStatNode): + # not entirely correct, but good enough for now + self.mark_assignment(node.target, node.item) + else: # Parallel + self.mark_assignment(node.target) + + # Body block + if isinstance(node, Nodes.ParallelRangeNode): + # In case of an invalid + self._delete_privates(node, exclude=node.target.entry) + + self.flow.nextblock() + self._visit(node.body) + self.flow.loops.pop() + + # Loop it + if self.flow.block: + self.flow.block.add_child(condition_block) + # Else clause + if node.else_clause: + self.flow.nextblock(parent=condition_block) + self._visit(node.else_clause) + if self.flow.block: + self.flow.block.add_child(next_block) + else: + condition_block.add_child(next_block) + + if next_block.parents: + self.flow.block = next_block + else: + self.flow.block = None + return node + + def _delete_privates(self, node, exclude=None): + for private_node in node.assigned_nodes: + if not exclude or private_node.entry is not exclude: + self.flow.mark_deletion(private_node, private_node.entry) + + def visit_ParallelRangeNode(self, node): + reductions = self.reductions + + # if node.target is None or not a NameNode, an error will have + # been previously issued + if hasattr(node.target, 'entry'): + self.reductions = set(reductions) + + for private_node in node.assigned_nodes: + private_node.entry.error_on_uninitialized = True + pos, reduction = node.assignments[private_node.entry] + if reduction: + self.reductions.add(private_node.entry) + + node = self.visit_ForInStatNode(node) + + self.reductions = reductions + return node + + def visit_ParallelWithBlockNode(self, node): + for private_node in node.assigned_nodes: + private_node.entry.error_on_uninitialized = True + + self._delete_privates(node) + self.visitchildren(node) + self._delete_privates(node) + + return node + + def visit_ForFromStatNode(self, node): + condition_block = self.flow.nextblock() + next_block = self.flow.newblock() + # Condition with iterator + self.flow.loops.append(LoopDescr(next_block, condition_block)) + self._visit(node.bound1) + self._visit(node.bound2) + if node.step is not None: + self._visit(node.step) + # Target assignment + self.flow.nextblock() + self.mark_assignment(node.target, node.bound1) + if node.step is not None: + self.mark_assignment(node.target, self.constant_folder( + ExprNodes.binop_node(node.pos, '+', node.bound1, node.step))) + # Body block + self.flow.nextblock() + self._visit(node.body) + self.flow.loops.pop() + # Loop it + if self.flow.block: + self.flow.block.add_child(condition_block) + # Else clause + if node.else_clause: + self.flow.nextblock(parent=condition_block) + self._visit(node.else_clause) + if self.flow.block: + self.flow.block.add_child(next_block) + else: + condition_block.add_child(next_block) + + if next_block.parents: + self.flow.block = next_block + else: + self.flow.block = None + return node + + def visit_LoopNode(self, node): + raise InternalError("Generic loops are not supported") + + def visit_WithTargetAssignmentStatNode(self, node): + self.mark_assignment(node.lhs, node.with_node.enter_call) + return node + + def visit_WithStatNode(self, node): + self._visit(node.manager) + self._visit(node.enter_call) + self._visit(node.body) + return node + + def visit_TryExceptStatNode(self, node): + # After exception handling + next_block = self.flow.newblock() + # Body block + self.flow.newblock() + # Exception entry point + entry_point = self.flow.newblock() + self.flow.exceptions.append(ExceptionDescr(entry_point)) + self.flow.nextblock() + ## XXX: links to exception handling point should be added by + ## XXX: children nodes + self.flow.block.add_child(entry_point) + self.flow.nextblock() + self._visit(node.body) + self.flow.exceptions.pop() + + # After exception + if self.flow.block: + if node.else_clause: + self.flow.nextblock() + self._visit(node.else_clause) + if self.flow.block: + self.flow.block.add_child(next_block) + + for clause in node.except_clauses: + self.flow.block = entry_point + if clause.pattern: + for pattern in clause.pattern: + self._visit(pattern) + else: + # TODO: handle * pattern + pass + entry_point = self.flow.newblock(parent=self.flow.block) + self.flow.nextblock() + if clause.target: + self.mark_assignment(clause.target) + self._visit(clause.body) + if self.flow.block: + self.flow.block.add_child(next_block) + + if self.flow.exceptions: + entry_point.add_child(self.flow.exceptions[-1].entry_point) + + if next_block.parents: + self.flow.block = next_block + else: + self.flow.block = None + return node + + def visit_TryFinallyStatNode(self, node): + body_block = self.flow.nextblock() + + # Exception entry point + entry_point = self.flow.newblock() + self.flow.block = entry_point + self._visit(node.finally_except_clause) + + if self.flow.block and self.flow.exceptions: + self.flow.block.add_child(self.flow.exceptions[-1].entry_point) + + # Normal execution + finally_enter = self.flow.newblock() + self.flow.block = finally_enter + self._visit(node.finally_clause) + finally_exit = self.flow.block + + descr = ExceptionDescr(entry_point, finally_enter, finally_exit) + self.flow.exceptions.append(descr) + if self.flow.loops: + self.flow.loops[-1].exceptions.append(descr) + self.flow.block = body_block + ## XXX: Is it still required + body_block.add_child(entry_point) + self.flow.nextblock() + self._visit(node.body) + self.flow.exceptions.pop() + if self.flow.loops: + self.flow.loops[-1].exceptions.pop() + + if self.flow.block: + self.flow.block.add_child(finally_enter) + if finally_exit: + self.flow.block = self.flow.nextblock(parent=finally_exit) + else: + self.flow.block = None + return node + + def visit_RaiseStatNode(self, node): + self.mark_position(node) + self.visitchildren(node) + if self.flow.exceptions: + self.flow.block.add_child(self.flow.exceptions[-1].entry_point) + self.flow.block = None + return node + + def visit_ReraiseStatNode(self, node): + self.mark_position(node) + if self.flow.exceptions: + self.flow.block.add_child(self.flow.exceptions[-1].entry_point) + self.flow.block = None + return node + + def visit_ReturnStatNode(self, node): + self.mark_position(node) + self.visitchildren(node) + + for exception in self.flow.exceptions[::-1]: + if exception.finally_enter: + self.flow.block.add_child(exception.finally_enter) + if exception.finally_exit: + exception.finally_exit.add_child(self.flow.exit_point) + break + else: + if self.flow.block: + self.flow.block.add_child(self.flow.exit_point) + self.flow.block = None + return node + + def visit_BreakStatNode(self, node): + if not self.flow.loops: + #error(node.pos, "break statement not inside loop") + return node + loop = self.flow.loops[-1] + self.mark_position(node) + for exception in loop.exceptions[::-1]: + if exception.finally_enter: + self.flow.block.add_child(exception.finally_enter) + if exception.finally_exit: + exception.finally_exit.add_child(loop.next_block) + break + else: + self.flow.block.add_child(loop.next_block) + self.flow.block = None + return node + + def visit_ContinueStatNode(self, node): + if not self.flow.loops: + #error(node.pos, "continue statement not inside loop") + return node + loop = self.flow.loops[-1] + self.mark_position(node) + for exception in loop.exceptions[::-1]: + if exception.finally_enter: + self.flow.block.add_child(exception.finally_enter) + if exception.finally_exit: + exception.finally_exit.add_child(loop.loop_block) + break + else: + self.flow.block.add_child(loop.loop_block) + self.flow.block = None + return node + + def visit_ComprehensionNode(self, node): + if node.expr_scope: + self.env_stack.append(self.env) + self.env = node.expr_scope + # Skip append node here + self._visit(node.loop) + if node.expr_scope: + self.env = self.env_stack.pop() + return node + + def visit_ScopedExprNode(self, node): + if node.expr_scope: + self.env_stack.append(self.env) + self.env = node.expr_scope + self.visitchildren(node) + if node.expr_scope: + self.env = self.env_stack.pop() + return node + + def visit_PyClassDefNode(self, node): + self.visitchildren(node, ('dict', 'metaclass', + 'mkw', 'bases', 'class_result')) + self.flow.mark_assignment(node.target, node.classobj, + self.env.lookup(node.name)) + self.env_stack.append(self.env) + self.env = node.scope + self.flow.nextblock() + self.visitchildren(node, ('body',)) + self.flow.nextblock() + self.env = self.env_stack.pop() + return node + + def visit_AmpersandNode(self, node): + if node.operand.is_name: + # Fake assignment to silence warning + self.mark_assignment(node.operand, fake_rhs_expr) + self.visitchildren(node) + return node diff --git a/venv/lib/python3.8/site-packages/Cython/Compiler/FusedNode.cpython-38-x86_64-linux-gnu.so b/venv/lib/python3.8/site-packages/Cython/Compiler/FusedNode.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 0000000..916e201 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Compiler/FusedNode.cpython-38-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.8/site-packages/Cython/Compiler/FusedNode.py b/venv/lib/python3.8/site-packages/Cython/Compiler/FusedNode.py new file mode 100644 index 0000000..87a9ae2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Compiler/FusedNode.py @@ -0,0 +1,895 @@ +from __future__ import absolute_import + +import copy + +from . import (ExprNodes, PyrexTypes, MemoryView, + ParseTreeTransforms, StringEncoding, Errors) +from .ExprNodes import CloneNode, ProxyNode, TupleNode +from .Nodes import FuncDefNode, CFuncDefNode, StatListNode, DefNode +from ..Utils import OrderedSet + + +class FusedCFuncDefNode(StatListNode): + """ + This node replaces a function with fused arguments. It deep-copies the + function for every permutation of fused types, and allocates a new local + scope for it. It keeps track of the original function in self.node, and + the entry of the original function in the symbol table is given the + 'fused_cfunction' attribute which points back to us. + Then when a function lookup occurs (to e.g. call it), the call can be + dispatched to the right function. + + node FuncDefNode the original function + nodes [FuncDefNode] list of copies of node with different specific types + py_func DefNode the fused python function subscriptable from + Python space + __signatures__ A DictNode mapping signature specialization strings + to PyCFunction nodes + resulting_fused_function PyCFunction for the fused DefNode that delegates + to specializations + fused_func_assignment Assignment of the fused function to the function name + defaults_tuple TupleNode of defaults (letting PyCFunctionNode build + defaults would result in many different tuples) + specialized_pycfuncs List of synthesized pycfunction nodes for the + specializations + code_object CodeObjectNode shared by all specializations and the + fused function + + fused_compound_types All fused (compound) types (e.g. floating[:]) + """ + + __signatures__ = None + resulting_fused_function = None + fused_func_assignment = None + defaults_tuple = None + decorators = None + + child_attrs = StatListNode.child_attrs + [ + '__signatures__', 'resulting_fused_function', 'fused_func_assignment'] + + def __init__(self, node, env): + super(FusedCFuncDefNode, self).__init__(node.pos) + + self.nodes = [] + self.node = node + + is_def = isinstance(self.node, DefNode) + if is_def: + # self.node.decorators = [] + self.copy_def(env) + else: + self.copy_cdef(env) + + # Perform some sanity checks. If anything fails, it's a bug + for n in self.nodes: + assert not n.entry.type.is_fused + assert not n.local_scope.return_type.is_fused + if node.return_type.is_fused: + assert not n.return_type.is_fused + + if not is_def and n.cfunc_declarator.optional_arg_count: + assert n.type.op_arg_struct + + node.entry.fused_cfunction = self + # Copy the nodes as AnalyseDeclarationsTransform will prepend + # self.py_func to self.stats, as we only want specialized + # CFuncDefNodes in self.nodes + self.stats = self.nodes[:] + + def copy_def(self, env): + """ + Create a copy of the original def or lambda function for specialized + versions. + """ + fused_compound_types = PyrexTypes.unique( + [arg.type for arg in self.node.args if arg.type.is_fused]) + fused_types = self._get_fused_base_types(fused_compound_types) + permutations = PyrexTypes.get_all_specialized_permutations(fused_types) + + self.fused_compound_types = fused_compound_types + + if self.node.entry in env.pyfunc_entries: + env.pyfunc_entries.remove(self.node.entry) + + for cname, fused_to_specific in permutations: + copied_node = copy.deepcopy(self.node) + # keep signature object identity for special casing in DefNode.analyse_declarations() + copied_node.entry.signature = self.node.entry.signature + + self._specialize_function_args(copied_node.args, fused_to_specific) + copied_node.return_type = self.node.return_type.specialize( + fused_to_specific) + + copied_node.analyse_declarations(env) + # copied_node.is_staticmethod = self.node.is_staticmethod + # copied_node.is_classmethod = self.node.is_classmethod + self.create_new_local_scope(copied_node, env, fused_to_specific) + self.specialize_copied_def(copied_node, cname, self.node.entry, + fused_to_specific, fused_compound_types) + + PyrexTypes.specialize_entry(copied_node.entry, cname) + copied_node.entry.used = True + env.entries[copied_node.entry.name] = copied_node.entry + + if not self.replace_fused_typechecks(copied_node): + break + + self.orig_py_func = self.node + self.py_func = self.make_fused_cpdef(self.node, env, is_def=True) + + def copy_cdef(self, env): + """ + Create a copy of the original c(p)def function for all specialized + versions. + """ + permutations = self.node.type.get_all_specialized_permutations() + # print 'Node %s has %d specializations:' % (self.node.entry.name, + # len(permutations)) + # import pprint; pprint.pprint([d for cname, d in permutations]) + + # Prevent copying of the python function + self.orig_py_func = orig_py_func = self.node.py_func + self.node.py_func = None + if orig_py_func: + env.pyfunc_entries.remove(orig_py_func.entry) + + fused_types = self.node.type.get_fused_types() + self.fused_compound_types = fused_types + + new_cfunc_entries = [] + for cname, fused_to_specific in permutations: + copied_node = copy.deepcopy(self.node) + + # Make the types in our CFuncType specific. + type = copied_node.type.specialize(fused_to_specific) + entry = copied_node.entry + type.specialize_entry(entry, cname) + + # Reuse existing Entries (e.g. from .pxd files). + for i, orig_entry in enumerate(env.cfunc_entries): + if entry.cname == orig_entry.cname and type.same_as_resolved_type(orig_entry.type): + copied_node.entry = env.cfunc_entries[i] + if not copied_node.entry.func_cname: + copied_node.entry.func_cname = entry.func_cname + entry = copied_node.entry + type = entry.type + break + else: + new_cfunc_entries.append(entry) + + copied_node.type = type + entry.type, type.entry = type, entry + + entry.used = (entry.used or + self.node.entry.defined_in_pxd or + env.is_c_class_scope or + entry.is_cmethod) + + if self.node.cfunc_declarator.optional_arg_count: + self.node.cfunc_declarator.declare_optional_arg_struct( + type, env, fused_cname=cname) + + copied_node.return_type = type.return_type + self.create_new_local_scope(copied_node, env, fused_to_specific) + + # Make the argument types in the CFuncDeclarator specific + self._specialize_function_args(copied_node.cfunc_declarator.args, + fused_to_specific) + + # If a cpdef, declare all specialized cpdefs (this + # also calls analyse_declarations) + copied_node.declare_cpdef_wrapper(env) + if copied_node.py_func: + env.pyfunc_entries.remove(copied_node.py_func.entry) + + self.specialize_copied_def( + copied_node.py_func, cname, self.node.entry.as_variable, + fused_to_specific, fused_types) + + if not self.replace_fused_typechecks(copied_node): + break + + # replace old entry with new entries + try: + cindex = env.cfunc_entries.index(self.node.entry) + except ValueError: + env.cfunc_entries.extend(new_cfunc_entries) + else: + env.cfunc_entries[cindex:cindex+1] = new_cfunc_entries + + if orig_py_func: + self.py_func = self.make_fused_cpdef(orig_py_func, env, + is_def=False) + else: + self.py_func = orig_py_func + + def _get_fused_base_types(self, fused_compound_types): + """ + Get a list of unique basic fused types, from a list of + (possibly) compound fused types. + """ + base_types = [] + seen = set() + for fused_type in fused_compound_types: + fused_type.get_fused_types(result=base_types, seen=seen) + return base_types + + def _specialize_function_args(self, args, fused_to_specific): + for arg in args: + if arg.type.is_fused: + arg.type = arg.type.specialize(fused_to_specific) + if arg.type.is_memoryviewslice: + arg.type.validate_memslice_dtype(arg.pos) + + def create_new_local_scope(self, node, env, f2s): + """ + Create a new local scope for the copied node and append it to + self.nodes. A new local scope is needed because the arguments with the + fused types are already in the local scope, and we need the specialized + entries created after analyse_declarations on each specialized version + of the (CFunc)DefNode. + f2s is a dict mapping each fused type to its specialized version + """ + node.create_local_scope(env) + node.local_scope.fused_to_specific = f2s + + # This is copied from the original function, set it to false to + # stop recursion + node.has_fused_arguments = False + self.nodes.append(node) + + def specialize_copied_def(self, node, cname, py_entry, f2s, fused_compound_types): + """Specialize the copy of a DefNode given the copied node, + the specialization cname and the original DefNode entry""" + fused_types = self._get_fused_base_types(fused_compound_types) + type_strings = [ + PyrexTypes.specialization_signature_string(fused_type, f2s) + for fused_type in fused_types + ] + + node.specialized_signature_string = '|'.join(type_strings) + + node.entry.pymethdef_cname = PyrexTypes.get_fused_cname( + cname, node.entry.pymethdef_cname) + node.entry.doc = py_entry.doc + node.entry.doc_cname = py_entry.doc_cname + + def replace_fused_typechecks(self, copied_node): + """ + Branch-prune fused type checks like + + if fused_t is int: + ... + + Returns whether an error was issued and whether we should stop in + in order to prevent a flood of errors. + """ + num_errors = Errors.num_errors + transform = ParseTreeTransforms.ReplaceFusedTypeChecks( + copied_node.local_scope) + transform(copied_node) + + if Errors.num_errors > num_errors: + return False + + return True + + def _fused_instance_checks(self, normal_types, pyx_code, env): + """ + Generate Cython code for instance checks, matching an object to + specialized types. + """ + for specialized_type in normal_types: + # all_numeric = all_numeric and specialized_type.is_numeric + pyx_code.context.update( + py_type_name=specialized_type.py_type_name(), + specialized_type_name=specialized_type.specialization_string, + ) + pyx_code.put_chunk( + u""" + if isinstance(arg, {{py_type_name}}): + dest_sig[{{dest_sig_idx}}] = '{{specialized_type_name}}'; break + """) + + def _dtype_name(self, dtype): + if dtype.is_typedef: + return '___pyx_%s' % dtype + return str(dtype).replace(' ', '_') + + def _dtype_type(self, dtype): + if dtype.is_typedef: + return self._dtype_name(dtype) + return str(dtype) + + def _sizeof_dtype(self, dtype): + if dtype.is_pyobject: + return 'sizeof(void *)' + else: + return "sizeof(%s)" % self._dtype_type(dtype) + + def _buffer_check_numpy_dtype_setup_cases(self, pyx_code): + "Setup some common cases to match dtypes against specializations" + if pyx_code.indenter("if kind in b'iu':"): + pyx_code.putln("pass") + pyx_code.named_insertion_point("dtype_int") + pyx_code.dedent() + + if pyx_code.indenter("elif kind == b'f':"): + pyx_code.putln("pass") + pyx_code.named_insertion_point("dtype_float") + pyx_code.dedent() + + if pyx_code.indenter("elif kind == b'c':"): + pyx_code.putln("pass") + pyx_code.named_insertion_point("dtype_complex") + pyx_code.dedent() + + if pyx_code.indenter("elif kind == b'O':"): + pyx_code.putln("pass") + pyx_code.named_insertion_point("dtype_object") + pyx_code.dedent() + + match = "dest_sig[{{dest_sig_idx}}] = '{{specialized_type_name}}'" + no_match = "dest_sig[{{dest_sig_idx}}] = None" + def _buffer_check_numpy_dtype(self, pyx_code, specialized_buffer_types, pythran_types): + """ + Match a numpy dtype object to the individual specializations. + """ + self._buffer_check_numpy_dtype_setup_cases(pyx_code) + + for specialized_type in pythran_types+specialized_buffer_types: + final_type = specialized_type + if specialized_type.is_pythran_expr: + specialized_type = specialized_type.org_buffer + dtype = specialized_type.dtype + pyx_code.context.update( + itemsize_match=self._sizeof_dtype(dtype) + " == itemsize", + signed_match="not (%s_is_signed ^ dtype_signed)" % self._dtype_name(dtype), + dtype=dtype, + specialized_type_name=final_type.specialization_string) + + dtypes = [ + (dtype.is_int, pyx_code.dtype_int), + (dtype.is_float, pyx_code.dtype_float), + (dtype.is_complex, pyx_code.dtype_complex) + ] + + for dtype_category, codewriter in dtypes: + if dtype_category: + cond = '{{itemsize_match}} and (arg.ndim) == %d' % ( + specialized_type.ndim,) + if dtype.is_int: + cond += ' and {{signed_match}}' + + if final_type.is_pythran_expr: + cond += ' and arg_is_pythran_compatible' + + if codewriter.indenter("if %s:" % cond): + #codewriter.putln("print 'buffer match found based on numpy dtype'") + codewriter.putln(self.match) + codewriter.putln("break") + codewriter.dedent() + + def _buffer_parse_format_string_check(self, pyx_code, decl_code, + specialized_type, env): + """ + For each specialized type, try to coerce the object to a memoryview + slice of that type. This means obtaining a buffer and parsing the + format string. + TODO: separate buffer acquisition from format parsing + """ + dtype = specialized_type.dtype + if specialized_type.is_buffer: + axes = [('direct', 'strided')] * specialized_type.ndim + else: + axes = specialized_type.axes + + memslice_type = PyrexTypes.MemoryViewSliceType(dtype, axes) + memslice_type.create_from_py_utility_code(env) + pyx_code.context.update( + coerce_from_py_func=memslice_type.from_py_function, + dtype=dtype) + decl_code.putln( + "{{memviewslice_cname}} {{coerce_from_py_func}}(object, int)") + + pyx_code.context.update( + specialized_type_name=specialized_type.specialization_string, + sizeof_dtype=self._sizeof_dtype(dtype)) + + pyx_code.put_chunk( + u""" + # try {{dtype}} + if itemsize == -1 or itemsize == {{sizeof_dtype}}: + memslice = {{coerce_from_py_func}}(arg, 0) + if memslice.memview: + __PYX_XDEC_MEMVIEW(&memslice, 1) + # print 'found a match for the buffer through format parsing' + %s + break + else: + __pyx_PyErr_Clear() + """ % self.match) + + def _buffer_checks(self, buffer_types, pythran_types, pyx_code, decl_code, env): + """ + Generate Cython code to match objects to buffer specializations. + First try to get a numpy dtype object and match it against the individual + specializations. If that fails, try naively to coerce the object + to each specialization, which obtains the buffer each time and tries + to match the format string. + """ + # The first thing to find a match in this loop breaks out of the loop + pyx_code.put_chunk( + u""" + """ + (u"arg_is_pythran_compatible = False" if pythran_types else u"") + u""" + if ndarray is not None: + if isinstance(arg, ndarray): + dtype = arg.dtype + """ + (u"arg_is_pythran_compatible = True" if pythran_types else u"") + u""" + elif __pyx_memoryview_check(arg): + arg_base = arg.base + if isinstance(arg_base, ndarray): + dtype = arg_base.dtype + else: + dtype = None + else: + dtype = None + + itemsize = -1 + if dtype is not None: + itemsize = dtype.itemsize + kind = ord(dtype.kind) + dtype_signed = kind == 'i' + """) + pyx_code.indent(2) + if pythran_types: + pyx_code.put_chunk( + u""" + # Pythran only supports the endianness of the current compiler + byteorder = dtype.byteorder + if byteorder == "<" and not __Pyx_Is_Little_Endian(): + arg_is_pythran_compatible = False + elif byteorder == ">" and __Pyx_Is_Little_Endian(): + arg_is_pythran_compatible = False + if arg_is_pythran_compatible: + cur_stride = itemsize + shape = arg.shape + strides = arg.strides + for i in range(arg.ndim-1, -1, -1): + if (strides[i]) != cur_stride: + arg_is_pythran_compatible = False + break + cur_stride *= shape[i] + else: + arg_is_pythran_compatible = not (arg.flags.f_contiguous and (arg.ndim) > 1) + """) + pyx_code.named_insertion_point("numpy_dtype_checks") + self._buffer_check_numpy_dtype(pyx_code, buffer_types, pythran_types) + pyx_code.dedent(2) + + for specialized_type in buffer_types: + self._buffer_parse_format_string_check( + pyx_code, decl_code, specialized_type, env) + + def _buffer_declarations(self, pyx_code, decl_code, all_buffer_types, pythran_types): + """ + If we have any buffer specializations, write out some variable + declarations and imports. + """ + decl_code.put_chunk( + u""" + ctypedef struct {{memviewslice_cname}}: + void *memview + + void __PYX_XDEC_MEMVIEW({{memviewslice_cname}} *, int have_gil) + bint __pyx_memoryview_check(object) + """) + + pyx_code.local_variable_declarations.put_chunk( + u""" + cdef {{memviewslice_cname}} memslice + cdef Py_ssize_t itemsize + cdef bint dtype_signed + cdef char kind + + itemsize = -1 + """) + + if pythran_types: + pyx_code.local_variable_declarations.put_chunk(u""" + cdef bint arg_is_pythran_compatible + cdef Py_ssize_t cur_stride + """) + + pyx_code.imports.put_chunk( + u""" + cdef type ndarray + ndarray = __Pyx_ImportNumPyArrayTypeIfAvailable() + """) + + seen_typedefs = set() + seen_int_dtypes = set() + for buffer_type in all_buffer_types: + dtype = buffer_type.dtype + dtype_name = self._dtype_name(dtype) + if dtype.is_typedef: + if dtype_name not in seen_typedefs: + seen_typedefs.add(dtype_name) + decl_code.putln( + 'ctypedef %s %s "%s"' % (dtype.resolve(), dtype_name, + dtype.empty_declaration_code())) + + if buffer_type.dtype.is_int: + if str(dtype) not in seen_int_dtypes: + seen_int_dtypes.add(str(dtype)) + pyx_code.context.update(dtype_name=dtype_name, + dtype_type=self._dtype_type(dtype)) + pyx_code.local_variable_declarations.put_chunk( + u""" + cdef bint {{dtype_name}}_is_signed + {{dtype_name}}_is_signed = not (<{{dtype_type}}> -1 > 0) + """) + + def _split_fused_types(self, arg): + """ + Specialize fused types and split into normal types and buffer types. + """ + specialized_types = PyrexTypes.get_specialized_types(arg.type) + + # Prefer long over int, etc by sorting (see type classes in PyrexTypes.py) + specialized_types.sort() + + seen_py_type_names = set() + normal_types, buffer_types, pythran_types = [], [], [] + has_object_fallback = False + for specialized_type in specialized_types: + py_type_name = specialized_type.py_type_name() + if py_type_name: + if py_type_name in seen_py_type_names: + continue + seen_py_type_names.add(py_type_name) + if py_type_name == 'object': + has_object_fallback = True + else: + normal_types.append(specialized_type) + elif specialized_type.is_pythran_expr: + pythran_types.append(specialized_type) + elif specialized_type.is_buffer or specialized_type.is_memoryviewslice: + buffer_types.append(specialized_type) + + return normal_types, buffer_types, pythran_types, has_object_fallback + + def _unpack_argument(self, pyx_code): + pyx_code.put_chunk( + u""" + # PROCESSING ARGUMENT {{arg_tuple_idx}} + if {{arg_tuple_idx}} < len(args): + arg = (args)[{{arg_tuple_idx}}] + elif kwargs is not None and '{{arg.name}}' in kwargs: + arg = (kwargs)['{{arg.name}}'] + else: + {{if arg.default}} + arg = (defaults)[{{default_idx}}] + {{else}} + {{if arg_tuple_idx < min_positional_args}} + raise TypeError("Expected at least %d argument%s, got %d" % ( + {{min_positional_args}}, {{'"s"' if min_positional_args != 1 else '""'}}, len(args))) + {{else}} + raise TypeError("Missing keyword-only argument: '%s'" % "{{arg.default}}") + {{endif}} + {{endif}} + """) + + def make_fused_cpdef(self, orig_py_func, env, is_def): + """ + This creates the function that is indexable from Python and does + runtime dispatch based on the argument types. The function gets the + arg tuple and kwargs dict (or None) and the defaults tuple + as arguments from the Binding Fused Function's tp_call. + """ + from . import TreeFragment, Code, UtilityCode + + fused_types = self._get_fused_base_types([ + arg.type for arg in self.node.args if arg.type.is_fused]) + + context = { + 'memviewslice_cname': MemoryView.memviewslice_cname, + 'func_args': self.node.args, + 'n_fused': len(fused_types), + 'min_positional_args': + self.node.num_required_args - self.node.num_required_kw_args + if is_def else + sum(1 for arg in self.node.args if arg.default is None), + 'name': orig_py_func.entry.name, + } + + pyx_code = Code.PyxCodeWriter(context=context) + decl_code = Code.PyxCodeWriter(context=context) + decl_code.put_chunk( + u""" + cdef extern from *: + void __pyx_PyErr_Clear "PyErr_Clear" () + type __Pyx_ImportNumPyArrayTypeIfAvailable() + int __Pyx_Is_Little_Endian() + """) + decl_code.indent() + + pyx_code.put_chunk( + u""" + def __pyx_fused_cpdef(signatures, args, kwargs, defaults): + # FIXME: use a typed signature - currently fails badly because + # default arguments inherit the types we specify here! + + dest_sig = [None] * {{n_fused}} + + if kwargs is not None and not kwargs: + kwargs = None + + cdef Py_ssize_t i + + # instance check body + """) + + pyx_code.indent() # indent following code to function body + pyx_code.named_insertion_point("imports") + pyx_code.named_insertion_point("func_defs") + pyx_code.named_insertion_point("local_variable_declarations") + + fused_index = 0 + default_idx = 0 + all_buffer_types = OrderedSet() + seen_fused_types = set() + for i, arg in enumerate(self.node.args): + if arg.type.is_fused: + arg_fused_types = arg.type.get_fused_types() + if len(arg_fused_types) > 1: + raise NotImplementedError("Determination of more than one fused base " + "type per argument is not implemented.") + fused_type = arg_fused_types[0] + + if arg.type.is_fused and fused_type not in seen_fused_types: + seen_fused_types.add(fused_type) + + context.update( + arg_tuple_idx=i, + arg=arg, + dest_sig_idx=fused_index, + default_idx=default_idx, + ) + + normal_types, buffer_types, pythran_types, has_object_fallback = self._split_fused_types(arg) + self._unpack_argument(pyx_code) + + # 'unrolled' loop, first match breaks out of it + if pyx_code.indenter("while 1:"): + if normal_types: + self._fused_instance_checks(normal_types, pyx_code, env) + if buffer_types or pythran_types: + env.use_utility_code(Code.UtilityCode.load_cached("IsLittleEndian", "ModuleSetupCode.c")) + self._buffer_checks(buffer_types, pythran_types, pyx_code, decl_code, env) + if has_object_fallback: + pyx_code.context.update(specialized_type_name='object') + pyx_code.putln(self.match) + else: + pyx_code.putln(self.no_match) + pyx_code.putln("break") + pyx_code.dedent() + + fused_index += 1 + all_buffer_types.update(buffer_types) + all_buffer_types.update(ty.org_buffer for ty in pythran_types) + + if arg.default: + default_idx += 1 + + if all_buffer_types: + self._buffer_declarations(pyx_code, decl_code, all_buffer_types, pythran_types) + env.use_utility_code(Code.UtilityCode.load_cached("Import", "ImportExport.c")) + env.use_utility_code(Code.UtilityCode.load_cached("ImportNumPyArray", "ImportExport.c")) + + pyx_code.put_chunk( + u""" + candidates = [] + for sig in signatures: + match_found = False + src_sig = sig.strip('()').split('|') + for i in range(len(dest_sig)): + dst_type = dest_sig[i] + if dst_type is not None: + if src_sig[i] == dst_type: + match_found = True + else: + match_found = False + break + + if match_found: + candidates.append(sig) + + if not candidates: + raise TypeError("No matching signature found") + elif len(candidates) > 1: + raise TypeError("Function call with ambiguous argument types") + else: + return (signatures)[candidates[0]] + """) + + fragment_code = pyx_code.getvalue() + # print decl_code.getvalue() + # print fragment_code + from .Optimize import ConstantFolding + fragment = TreeFragment.TreeFragment( + fragment_code, level='module', pipeline=[ConstantFolding()]) + ast = TreeFragment.SetPosTransform(self.node.pos)(fragment.root) + UtilityCode.declare_declarations_in_scope( + decl_code.getvalue(), env.global_scope()) + ast.scope = env + # FIXME: for static methods of cdef classes, we build the wrong signature here: first arg becomes 'self' + ast.analyse_declarations(env) + py_func = ast.stats[-1] # the DefNode + self.fragment_scope = ast.scope + + if isinstance(self.node, DefNode): + py_func.specialized_cpdefs = self.nodes[:] + else: + py_func.specialized_cpdefs = [n.py_func for n in self.nodes] + + return py_func + + def update_fused_defnode_entry(self, env): + copy_attributes = ( + 'name', 'pos', 'cname', 'func_cname', 'pyfunc_cname', + 'pymethdef_cname', 'doc', 'doc_cname', 'is_member', + 'scope' + ) + + entry = self.py_func.entry + + for attr in copy_attributes: + setattr(entry, attr, + getattr(self.orig_py_func.entry, attr)) + + self.py_func.name = self.orig_py_func.name + self.py_func.doc = self.orig_py_func.doc + + env.entries.pop('__pyx_fused_cpdef', None) + if isinstance(self.node, DefNode): + env.entries[entry.name] = entry + else: + env.entries[entry.name].as_variable = entry + + env.pyfunc_entries.append(entry) + + self.py_func.entry.fused_cfunction = self + for node in self.nodes: + if isinstance(self.node, DefNode): + node.fused_py_func = self.py_func + else: + node.py_func.fused_py_func = self.py_func + node.entry.as_variable = entry + + self.synthesize_defnodes() + self.stats.append(self.__signatures__) + + def analyse_expressions(self, env): + """ + Analyse the expressions. Take care to only evaluate default arguments + once and clone the result for all specializations + """ + for fused_compound_type in self.fused_compound_types: + for fused_type in fused_compound_type.get_fused_types(): + for specialization_type in fused_type.types: + if specialization_type.is_complex: + specialization_type.create_declaration_utility_code(env) + + if self.py_func: + self.__signatures__ = self.__signatures__.analyse_expressions(env) + self.py_func = self.py_func.analyse_expressions(env) + self.resulting_fused_function = self.resulting_fused_function.analyse_expressions(env) + self.fused_func_assignment = self.fused_func_assignment.analyse_expressions(env) + + self.defaults = defaults = [] + + for arg in self.node.args: + if arg.default: + arg.default = arg.default.analyse_expressions(env) + defaults.append(ProxyNode(arg.default)) + else: + defaults.append(None) + + for i, stat in enumerate(self.stats): + stat = self.stats[i] = stat.analyse_expressions(env) + if isinstance(stat, FuncDefNode): + for arg, default in zip(stat.args, defaults): + if default is not None: + arg.default = CloneNode(default).coerce_to(arg.type, env) + + if self.py_func: + args = [CloneNode(default) for default in defaults if default] + self.defaults_tuple = TupleNode(self.pos, args=args) + self.defaults_tuple = self.defaults_tuple.analyse_types(env, skip_children=True).coerce_to_pyobject(env) + self.defaults_tuple = ProxyNode(self.defaults_tuple) + self.code_object = ProxyNode(self.specialized_pycfuncs[0].code_object) + + fused_func = self.resulting_fused_function.arg + fused_func.defaults_tuple = CloneNode(self.defaults_tuple) + fused_func.code_object = CloneNode(self.code_object) + + for i, pycfunc in enumerate(self.specialized_pycfuncs): + pycfunc.code_object = CloneNode(self.code_object) + pycfunc = self.specialized_pycfuncs[i] = pycfunc.analyse_types(env) + pycfunc.defaults_tuple = CloneNode(self.defaults_tuple) + return self + + def synthesize_defnodes(self): + """ + Create the __signatures__ dict of PyCFunctionNode specializations. + """ + if isinstance(self.nodes[0], CFuncDefNode): + nodes = [node.py_func for node in self.nodes] + else: + nodes = self.nodes + + signatures = [StringEncoding.EncodedString(node.specialized_signature_string) + for node in nodes] + keys = [ExprNodes.StringNode(node.pos, value=sig) + for node, sig in zip(nodes, signatures)] + values = [ExprNodes.PyCFunctionNode.from_defnode(node, binding=True) + for node in nodes] + + self.__signatures__ = ExprNodes.DictNode.from_pairs(self.pos, zip(keys, values)) + + self.specialized_pycfuncs = values + for pycfuncnode in values: + pycfuncnode.is_specialization = True + + def generate_function_definitions(self, env, code): + if self.py_func: + self.py_func.pymethdef_required = True + self.fused_func_assignment.generate_function_definitions(env, code) + + for stat in self.stats: + if isinstance(stat, FuncDefNode) and stat.entry.used: + code.mark_pos(stat.pos) + stat.generate_function_definitions(env, code) + + def generate_execution_code(self, code): + # Note: all def function specialization are wrapped in PyCFunction + # nodes in the self.__signatures__ dictnode. + for default in self.defaults: + if default is not None: + default.generate_evaluation_code(code) + + if self.py_func: + self.defaults_tuple.generate_evaluation_code(code) + self.code_object.generate_evaluation_code(code) + + for stat in self.stats: + code.mark_pos(stat.pos) + if isinstance(stat, ExprNodes.ExprNode): + stat.generate_evaluation_code(code) + else: + stat.generate_execution_code(code) + + if self.__signatures__: + self.resulting_fused_function.generate_evaluation_code(code) + + code.putln( + "((__pyx_FusedFunctionObject *) %s)->__signatures__ = %s;" % + (self.resulting_fused_function.result(), + self.__signatures__.result())) + code.put_giveref(self.__signatures__.result()) + + self.fused_func_assignment.generate_execution_code(code) + + # Dispose of results + self.resulting_fused_function.generate_disposal_code(code) + self.defaults_tuple.generate_disposal_code(code) + self.code_object.generate_disposal_code(code) + + for default in self.defaults: + if default is not None: + default.generate_disposal_code(code) + + def annotate(self, code): + for stat in self.stats: + stat.annotate(code) diff --git a/venv/lib/python3.8/site-packages/Cython/Compiler/Future.py b/venv/lib/python3.8/site-packages/Cython/Compiler/Future.py new file mode 100644 index 0000000..848792e --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Compiler/Future.py @@ -0,0 +1,15 @@ +def _get_feature(name): + import __future__ + # fall back to a unique fake object for earlier Python versions or Python 3 + return getattr(__future__, name, object()) + +unicode_literals = _get_feature("unicode_literals") +with_statement = _get_feature("with_statement") # dummy +division = _get_feature("division") +print_function = _get_feature("print_function") +absolute_import = _get_feature("absolute_import") +nested_scopes = _get_feature("nested_scopes") # dummy +generators = _get_feature("generators") # dummy +generator_stop = _get_feature("generator_stop") + +del _get_feature diff --git a/venv/lib/python3.8/site-packages/Cython/Compiler/Interpreter.py b/venv/lib/python3.8/site-packages/Cython/Compiler/Interpreter.py new file mode 100644 index 0000000..9ec391f --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Compiler/Interpreter.py @@ -0,0 +1,64 @@ +""" +This module deals with interpreting the parse tree as Python +would have done, in the compiler. + +For now this only covers parse tree to value conversion of +compile-time values. +""" + +from __future__ import absolute_import + +from .Nodes import * +from .ExprNodes import * +from .Errors import CompileError + + +class EmptyScope(object): + def lookup(self, name): + return None + +empty_scope = EmptyScope() + +def interpret_compiletime_options(optlist, optdict, type_env=None, type_args=()): + """ + Tries to interpret a list of compile time option nodes. + The result will be a tuple (optlist, optdict) but where + all expression nodes have been interpreted. The result is + in the form of tuples (value, pos). + + optlist is a list of nodes, while optdict is a DictNode (the + result optdict is a dict) + + If type_env is set, all type nodes will be analysed and the resulting + type set. Otherwise only interpretateable ExprNodes + are allowed, other nodes raises errors. + + A CompileError will be raised if there are problems. + """ + + def interpret(node, ix): + if ix in type_args: + if type_env: + type = node.analyse_as_type(type_env) + if not type: + raise CompileError(node.pos, "Invalid type.") + return (type, node.pos) + else: + raise CompileError(node.pos, "Type not allowed here.") + else: + if (sys.version_info[0] >=3 and + isinstance(node, StringNode) and + node.unicode_value is not None): + return (node.unicode_value, node.pos) + return (node.compile_time_value(empty_scope), node.pos) + + if optlist: + optlist = [interpret(x, ix) for ix, x in enumerate(optlist)] + if optdict: + assert isinstance(optdict, DictNode) + new_optdict = {} + for item in optdict.key_value_pairs: + new_key, dummy = interpret(item.key, None) + new_optdict[new_key] = interpret(item.value, item.key.value) + optdict = new_optdict + return (optlist, new_optdict) diff --git a/venv/lib/python3.8/site-packages/Cython/Compiler/Lexicon.py b/venv/lib/python3.8/site-packages/Cython/Compiler/Lexicon.py new file mode 100644 index 0000000..72c9cea --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Compiler/Lexicon.py @@ -0,0 +1,138 @@ +# cython: language_level=3, py2_import=True +# +# Cython Scanner - Lexical Definitions +# + +from __future__ import absolute_import, unicode_literals + +raw_prefixes = "rR" +bytes_prefixes = "bB" +string_prefixes = "fFuU" + bytes_prefixes +char_prefixes = "cC" +any_string_prefix = raw_prefixes + string_prefixes + char_prefixes +IDENT = 'IDENT' + + +def make_lexicon(): + from ..Plex import \ + Str, Any, AnyBut, AnyChar, Rep, Rep1, Opt, Bol, Eol, Eof, \ + TEXT, IGNORE, State, Lexicon + from .Scanning import Method + + letter = Any("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz_") + digit = Any("0123456789") + bindigit = Any("01") + octdigit = Any("01234567") + hexdigit = Any("0123456789ABCDEFabcdef") + indentation = Bol + Rep(Any(" \t")) + + def underscore_digits(d): + return Rep1(d) + Rep(Str("_") + Rep1(d)) + + decimal = underscore_digits(digit) + dot = Str(".") + exponent = Any("Ee") + Opt(Any("+-")) + decimal + decimal_fract = (decimal + dot + Opt(decimal)) | (dot + decimal) + + name = letter + Rep(letter | digit) + intconst = decimal | (Str("0") + ((Any("Xx") + underscore_digits(hexdigit)) | + (Any("Oo") + underscore_digits(octdigit)) | + (Any("Bb") + underscore_digits(bindigit)) )) + intsuffix = (Opt(Any("Uu")) + Opt(Any("Ll")) + Opt(Any("Ll"))) | (Opt(Any("Ll")) + Opt(Any("Ll")) + Opt(Any("Uu"))) + intliteral = intconst + intsuffix + fltconst = (decimal_fract + Opt(exponent)) | (decimal + exponent) + imagconst = (intconst | fltconst) + Any("jJ") + + # invalid combinations of prefixes are caught in p_string_literal + beginstring = Opt(Rep(Any(string_prefixes + raw_prefixes)) | + Any(char_prefixes) + ) + (Str("'") | Str('"') | Str("'''") | Str('"""')) + two_oct = octdigit + octdigit + three_oct = octdigit + octdigit + octdigit + two_hex = hexdigit + hexdigit + four_hex = two_hex + two_hex + escapeseq = Str("\\") + (two_oct | three_oct | + Str('N{') + Rep(AnyBut('}')) + Str('}') | + Str('u') + four_hex | Str('x') + two_hex | + Str('U') + four_hex + four_hex | AnyChar) + + bra = Any("([{") + ket = Any(")]}") + punct = Any(":,;+-*/|&<>=.%`~^?!@") + diphthong = Str("==", "<>", "!=", "<=", ">=", "<<", ">>", "**", "//", + "+=", "-=", "*=", "/=", "%=", "|=", "^=", "&=", + "<<=", ">>=", "**=", "//=", "->", "@=") + spaces = Rep1(Any(" \t\f")) + escaped_newline = Str("\\\n") + lineterm = Eol + Opt(Str("\n")) + + comment = Str("#") + Rep(AnyBut("\n")) + + return Lexicon([ + (name, IDENT), + (intliteral, Method('strip_underscores', symbol='INT')), + (fltconst, Method('strip_underscores', symbol='FLOAT')), + (imagconst, Method('strip_underscores', symbol='IMAG')), + (punct | diphthong, TEXT), + + (bra, Method('open_bracket_action')), + (ket, Method('close_bracket_action')), + (lineterm, Method('newline_action')), + + (beginstring, Method('begin_string_action')), + + (comment, IGNORE), + (spaces, IGNORE), + (escaped_newline, IGNORE), + + State('INDENT', [ + (comment + lineterm, Method('commentline')), + (Opt(spaces) + Opt(comment) + lineterm, IGNORE), + (indentation, Method('indentation_action')), + (Eof, Method('eof_action')) + ]), + + State('SQ_STRING', [ + (escapeseq, 'ESCAPE'), + (Rep1(AnyBut("'\"\n\\")), 'CHARS'), + (Str('"'), 'CHARS'), + (Str("\n"), Method('unclosed_string_action')), + (Str("'"), Method('end_string_action')), + (Eof, 'EOF') + ]), + + State('DQ_STRING', [ + (escapeseq, 'ESCAPE'), + (Rep1(AnyBut('"\n\\')), 'CHARS'), + (Str("'"), 'CHARS'), + (Str("\n"), Method('unclosed_string_action')), + (Str('"'), Method('end_string_action')), + (Eof, 'EOF') + ]), + + State('TSQ_STRING', [ + (escapeseq, 'ESCAPE'), + (Rep1(AnyBut("'\"\n\\")), 'CHARS'), + (Any("'\""), 'CHARS'), + (Str("\n"), 'NEWLINE'), + (Str("'''"), Method('end_string_action')), + (Eof, 'EOF') + ]), + + State('TDQ_STRING', [ + (escapeseq, 'ESCAPE'), + (Rep1(AnyBut('"\'\n\\')), 'CHARS'), + (Any("'\""), 'CHARS'), + (Str("\n"), 'NEWLINE'), + (Str('"""'), Method('end_string_action')), + (Eof, 'EOF') + ]), + + (Eof, Method('eof_action')) + ], + + # FIXME: Plex 1.9 needs different args here from Plex 1.1.4 + #debug_flags = scanner_debug_flags, + #debug_file = scanner_dump_file + ) + diff --git a/venv/lib/python3.8/site-packages/Cython/Compiler/Main.py b/venv/lib/python3.8/site-packages/Cython/Compiler/Main.py new file mode 100644 index 0000000..dc4add5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Compiler/Main.py @@ -0,0 +1,904 @@ +# +# Cython Top Level +# + +from __future__ import absolute_import + +import os +import re +import sys +import io + +if sys.version_info[:2] < (2, 6) or (3, 0) <= sys.version_info[:2] < (3, 3): + sys.stderr.write("Sorry, Cython requires Python 2.6+ or 3.3+, found %d.%d\n" % tuple(sys.version_info[:2])) + sys.exit(1) + +try: + from __builtin__ import basestring +except ImportError: + basestring = str + +# Do not import Parsing here, import it when needed, because Parsing imports +# Nodes, which globally needs debug command line options initialized to set a +# conditional metaclass. These options are processed by CmdLine called from +# main() in this file. +# import Parsing +from . import Errors +from .StringEncoding import EncodedString +from .Scanning import PyrexScanner, FileSourceDescriptor +from .Errors import PyrexError, CompileError, error, warning +from .Symtab import ModuleScope +from .. import Utils +from . import Options + +from . import Version # legacy import needed by old PyTables versions +version = Version.version # legacy attribute - use "Cython.__version__" instead + +module_name_pattern = re.compile(r"[A-Za-z_][A-Za-z0-9_]*(\.[A-Za-z_][A-Za-z0-9_]*)*$") + +verbose = 0 + +standard_include_path = os.path.abspath(os.path.join(os.path.dirname(__file__), + os.path.pardir, 'Includes')) + +class CompilationData(object): + # Bundles the information that is passed from transform to transform. + # (For now, this is only) + + # While Context contains every pxd ever loaded, path information etc., + # this only contains the data related to a single compilation pass + # + # pyx ModuleNode Main code tree of this compilation. + # pxds {string : ModuleNode} Trees for the pxds used in the pyx. + # codewriter CCodeWriter Where to output final code. + # options CompilationOptions + # result CompilationResult + pass + + +class Context(object): + # This class encapsulates the context needed for compiling + # one or more Cython implementation files along with their + # associated and imported declaration files. It includes + # the root of the module import namespace and the list + # of directories to search for include files. + # + # modules {string : ModuleScope} + # include_directories [string] + # future_directives [object] + # language_level int currently 2 or 3 for Python 2/3 + + cython_scope = None + language_level = None # warn when not set but default to Py2 + + def __init__(self, include_directories, compiler_directives, cpp=False, + language_level=None, options=None): + # cython_scope is a hack, set to False by subclasses, in order to break + # an infinite loop. + # Better code organization would fix it. + + from . import Builtin, CythonScope + self.modules = {"__builtin__" : Builtin.builtin_scope} + self.cython_scope = CythonScope.create_cython_scope(self) + self.modules["cython"] = self.cython_scope + self.include_directories = include_directories + self.future_directives = set() + self.compiler_directives = compiler_directives + self.cpp = cpp + self.options = options + + self.pxds = {} # full name -> node tree + self._interned = {} # (type(value), value, *key_args) -> interned_value + + if language_level is not None: + self.set_language_level(language_level) + + self.gdb_debug_outputwriter = None + + def set_language_level(self, level): + from .Future import print_function, unicode_literals, absolute_import, division + future_directives = set() + if level == '3str': + level = 3 + else: + level = int(level) + if level >= 3: + future_directives.add(unicode_literals) + if level >= 3: + future_directives.update([print_function, absolute_import, division]) + self.language_level = level + self.future_directives = future_directives + if level >= 3: + self.modules['builtins'] = self.modules['__builtin__'] + + def intern_ustring(self, value, encoding=None): + key = (EncodedString, value, encoding) + try: + return self._interned[key] + except KeyError: + pass + value = EncodedString(value) + if encoding: + value.encoding = encoding + self._interned[key] = value + return value + + def intern_value(self, value, *key): + key = (type(value), value) + key + try: + return self._interned[key] + except KeyError: + pass + self._interned[key] = value + return value + + # pipeline creation functions can now be found in Pipeline.py + + def process_pxd(self, source_desc, scope, module_name): + from . import Pipeline + if isinstance(source_desc, FileSourceDescriptor) and source_desc._file_type == 'pyx': + source = CompilationSource(source_desc, module_name, os.getcwd()) + result_sink = create_default_resultobj(source, self.options) + pipeline = Pipeline.create_pyx_as_pxd_pipeline(self, result_sink) + result = Pipeline.run_pipeline(pipeline, source) + else: + pipeline = Pipeline.create_pxd_pipeline(self, scope, module_name) + result = Pipeline.run_pipeline(pipeline, source_desc) + return result + + def nonfatal_error(self, exc): + return Errors.report_error(exc) + + def find_module(self, module_name, relative_to=None, pos=None, need_pxd=1, + absolute_fallback=True): + # Finds and returns the module scope corresponding to + # the given relative or absolute module name. If this + # is the first time the module has been requested, finds + # the corresponding .pxd file and process it. + # If relative_to is not None, it must be a module scope, + # and the module will first be searched for relative to + # that module, provided its name is not a dotted name. + debug_find_module = 0 + if debug_find_module: + print("Context.find_module: module_name = %s, relative_to = %s, pos = %s, need_pxd = %s" % ( + module_name, relative_to, pos, need_pxd)) + + scope = None + pxd_pathname = None + if relative_to: + if module_name: + # from .module import ... + qualified_name = relative_to.qualify_name(module_name) + else: + # from . import ... + qualified_name = relative_to.qualified_name + scope = relative_to + relative_to = None + else: + qualified_name = module_name + + if not module_name_pattern.match(qualified_name): + raise CompileError(pos or (module_name, 0, 0), + "'%s' is not a valid module name" % module_name) + + if relative_to: + if debug_find_module: + print("...trying relative import") + scope = relative_to.lookup_submodule(module_name) + if not scope: + pxd_pathname = self.find_pxd_file(qualified_name, pos) + if pxd_pathname: + scope = relative_to.find_submodule(module_name) + if not scope: + if debug_find_module: + print("...trying absolute import") + if absolute_fallback: + qualified_name = module_name + scope = self + for name in qualified_name.split("."): + scope = scope.find_submodule(name) + + if debug_find_module: + print("...scope = %s" % scope) + if not scope.pxd_file_loaded: + if debug_find_module: + print("...pxd not loaded") + if not pxd_pathname: + if debug_find_module: + print("...looking for pxd file") + # Only look in sys.path if we are explicitly looking + # for a .pxd file. + pxd_pathname = self.find_pxd_file(qualified_name, pos, sys_path=need_pxd) + if debug_find_module: + print("......found %s" % pxd_pathname) + if not pxd_pathname and need_pxd: + # Set pxd_file_loaded such that we don't need to + # look for the non-existing pxd file next time. + scope.pxd_file_loaded = True + package_pathname = self.search_include_directories(qualified_name, ".py", pos) + if package_pathname and package_pathname.endswith('__init__.py'): + pass + else: + error(pos, "'%s.pxd' not found" % qualified_name.replace('.', os.sep)) + if pxd_pathname: + scope.pxd_file_loaded = True + try: + if debug_find_module: + print("Context.find_module: Parsing %s" % pxd_pathname) + rel_path = module_name.replace('.', os.sep) + os.path.splitext(pxd_pathname)[1] + if not pxd_pathname.endswith(rel_path): + rel_path = pxd_pathname # safety measure to prevent printing incorrect paths + source_desc = FileSourceDescriptor(pxd_pathname, rel_path) + err, result = self.process_pxd(source_desc, scope, qualified_name) + if err: + raise err + (pxd_codenodes, pxd_scope) = result + self.pxds[module_name] = (pxd_codenodes, pxd_scope) + except CompileError: + pass + return scope + + def find_pxd_file(self, qualified_name, pos, sys_path=True): + # Search include path (and sys.path if sys_path is True) for + # the .pxd file corresponding to the given fully-qualified + # module name. + # Will find either a dotted filename or a file in a + # package directory. If a source file position is given, + # the directory containing the source file is searched first + # for a dotted filename, and its containing package root + # directory is searched first for a non-dotted filename. + pxd = self.search_include_directories(qualified_name, ".pxd", pos, sys_path=sys_path) + if pxd is None: # XXX Keep this until Includes/Deprecated is removed + if (qualified_name.startswith('python') or + qualified_name in ('stdlib', 'stdio', 'stl')): + standard_include_path = os.path.abspath(os.path.normpath( + os.path.join(os.path.dirname(__file__), os.path.pardir, 'Includes'))) + deprecated_include_path = os.path.join(standard_include_path, 'Deprecated') + self.include_directories.append(deprecated_include_path) + try: + pxd = self.search_include_directories(qualified_name, ".pxd", pos) + finally: + self.include_directories.pop() + if pxd: + name = qualified_name + if name.startswith('python'): + warning(pos, "'%s' is deprecated, use 'cpython'" % name, 1) + elif name in ('stdlib', 'stdio'): + warning(pos, "'%s' is deprecated, use 'libc.%s'" % (name, name), 1) + elif name in ('stl'): + warning(pos, "'%s' is deprecated, use 'libcpp.*.*'" % name, 1) + if pxd is None and Options.cimport_from_pyx: + return self.find_pyx_file(qualified_name, pos) + return pxd + + def find_pyx_file(self, qualified_name, pos): + # Search include path for the .pyx file corresponding to the + # given fully-qualified module name, as for find_pxd_file(). + return self.search_include_directories(qualified_name, ".pyx", pos) + + def find_include_file(self, filename, pos): + # Search list of include directories for filename. + # Reports an error and returns None if not found. + path = self.search_include_directories(filename, "", pos, + include=True) + if not path: + error(pos, "'%s' not found" % filename) + return path + + def search_include_directories(self, qualified_name, suffix, pos, + include=False, sys_path=False): + include_dirs = self.include_directories + if sys_path: + include_dirs = include_dirs + sys.path + # include_dirs must be hashable for caching in @cached_function + include_dirs = tuple(include_dirs + [standard_include_path]) + return search_include_directories(include_dirs, qualified_name, + suffix, pos, include) + + def find_root_package_dir(self, file_path): + return Utils.find_root_package_dir(file_path) + + def check_package_dir(self, dir, package_names): + return Utils.check_package_dir(dir, tuple(package_names)) + + def c_file_out_of_date(self, source_path, output_path): + if not os.path.exists(output_path): + return 1 + c_time = Utils.modification_time(output_path) + if Utils.file_newer_than(source_path, c_time): + return 1 + pos = [source_path] + pxd_path = Utils.replace_suffix(source_path, ".pxd") + if os.path.exists(pxd_path) and Utils.file_newer_than(pxd_path, c_time): + return 1 + for kind, name in self.read_dependency_file(source_path): + if kind == "cimport": + dep_path = self.find_pxd_file(name, pos) + elif kind == "include": + dep_path = self.search_include_directories(name, pos) + else: + continue + if dep_path and Utils.file_newer_than(dep_path, c_time): + return 1 + return 0 + + def find_cimported_module_names(self, source_path): + return [ name for kind, name in self.read_dependency_file(source_path) + if kind == "cimport" ] + + def is_package_dir(self, dir_path): + return Utils.is_package_dir(dir_path) + + def read_dependency_file(self, source_path): + dep_path = Utils.replace_suffix(source_path, ".dep") + if os.path.exists(dep_path): + f = open(dep_path, "rU") + chunks = [ line.strip().split(" ", 1) + for line in f.readlines() + if " " in line.strip() ] + f.close() + return chunks + else: + return () + + def lookup_submodule(self, name): + # Look up a top-level module. Returns None if not found. + return self.modules.get(name, None) + + def find_submodule(self, name): + # Find a top-level module, creating a new one if needed. + scope = self.lookup_submodule(name) + if not scope: + scope = ModuleScope(name, + parent_module = None, context = self) + self.modules[name] = scope + return scope + + def parse(self, source_desc, scope, pxd, full_module_name): + if not isinstance(source_desc, FileSourceDescriptor): + raise RuntimeError("Only file sources for code supported") + source_filename = source_desc.filename + scope.cpp = self.cpp + # Parse the given source file and return a parse tree. + num_errors = Errors.num_errors + try: + with Utils.open_source_file(source_filename) as f: + from . import Parsing + s = PyrexScanner(f, source_desc, source_encoding = f.encoding, + scope = scope, context = self) + tree = Parsing.p_module(s, pxd, full_module_name) + if self.options.formal_grammar: + try: + from ..Parser import ConcreteSyntaxTree + except ImportError: + raise RuntimeError( + "Formal grammar can only be used with compiled Cython with an available pgen.") + ConcreteSyntaxTree.p_module(source_filename) + except UnicodeDecodeError as e: + #import traceback + #traceback.print_exc() + raise self._report_decode_error(source_desc, e) + + if Errors.num_errors > num_errors: + raise CompileError() + return tree + + def _report_decode_error(self, source_desc, exc): + msg = exc.args[-1] + position = exc.args[2] + encoding = exc.args[0] + + line = 1 + column = idx = 0 + with io.open(source_desc.filename, "r", encoding='iso8859-1', newline='') as f: + for line, data in enumerate(f, 1): + idx += len(data) + if idx >= position: + column = position - (idx - len(data)) + 1 + break + + return error((source_desc, line, column), + "Decoding error, missing or incorrect coding= " + "at top of source (cannot decode with encoding %r: %s)" % (encoding, msg)) + + def extract_module_name(self, path, options): + # Find fully_qualified module name from the full pathname + # of a source file. + dir, filename = os.path.split(path) + module_name, _ = os.path.splitext(filename) + if "." in module_name: + return module_name + names = [module_name] + while self.is_package_dir(dir): + parent, package_name = os.path.split(dir) + if parent == dir: + break + names.append(package_name) + dir = parent + names.reverse() + return ".".join(names) + + def setup_errors(self, options, result): + Errors.reset() # clear any remaining error state + if options.use_listing_file: + path = result.listing_file = Utils.replace_suffix(result.main_source_file, ".lis") + else: + path = None + Errors.open_listing_file(path=path, + echo_to_stderr=options.errors_to_stderr) + + def teardown_errors(self, err, options, result): + source_desc = result.compilation_source.source_desc + if not isinstance(source_desc, FileSourceDescriptor): + raise RuntimeError("Only file sources for code supported") + Errors.close_listing_file() + result.num_errors = Errors.num_errors + if result.num_errors > 0: + err = True + if err and result.c_file: + try: + Utils.castrate_file(result.c_file, os.stat(source_desc.filename)) + except EnvironmentError: + pass + result.c_file = None + + +def get_output_filename(source_filename, cwd, options): + if options.cplus: + c_suffix = ".cpp" + else: + c_suffix = ".c" + suggested_file_name = Utils.replace_suffix(source_filename, c_suffix) + if options.output_file: + out_path = os.path.join(cwd, options.output_file) + if os.path.isdir(out_path): + return os.path.join(out_path, os.path.basename(suggested_file_name)) + else: + return out_path + else: + return suggested_file_name + + +def create_default_resultobj(compilation_source, options): + result = CompilationResult() + result.main_source_file = compilation_source.source_desc.filename + result.compilation_source = compilation_source + source_desc = compilation_source.source_desc + result.c_file = get_output_filename(source_desc.filename, + compilation_source.cwd, options) + result.embedded_metadata = options.embedded_metadata + return result + + +def run_pipeline(source, options, full_module_name=None, context=None): + from . import Pipeline + + source_ext = os.path.splitext(source)[1] + options.configure_language_defaults(source_ext[1:]) # py/pyx + if context is None: + context = options.create_context() + + # Set up source object + cwd = os.getcwd() + abs_path = os.path.abspath(source) + full_module_name = full_module_name or context.extract_module_name(source, options) + + Utils.raise_error_if_module_name_forbidden(full_module_name) + + if options.relative_path_in_code_position_comments: + rel_path = full_module_name.replace('.', os.sep) + source_ext + if not abs_path.endswith(rel_path): + rel_path = source # safety measure to prevent printing incorrect paths + else: + rel_path = abs_path + source_desc = FileSourceDescriptor(abs_path, rel_path) + source = CompilationSource(source_desc, full_module_name, cwd) + + # Set up result object + result = create_default_resultobj(source, options) + + if options.annotate is None: + # By default, decide based on whether an html file already exists. + html_filename = os.path.splitext(result.c_file)[0] + ".html" + if os.path.exists(html_filename): + with io.open(html_filename, "r", encoding="UTF-8") as html_file: + if u' State %d\n" % (key, state['number'])) + for key in ('bol', 'eol', 'eof', 'else'): + state = special_to_state.get(key, None) + if state: + file.write(" %s --> State %d\n" % (key, state['number'])) + + def chars_to_ranges(self, char_list): + char_list.sort() + i = 0 + n = len(char_list) + result = [] + while i < n: + c1 = ord(char_list[i]) + c2 = c1 + i += 1 + while i < n and ord(char_list[i]) == c2 + 1: + i += 1 + c2 += 1 + result.append((chr(c1), chr(c2))) + return tuple(result) + + def ranges_to_string(self, range_list): + return ','.join(map(self.range_to_string, range_list)) + + def range_to_string(self, range_tuple): + (c1, c2) = range_tuple + if c1 == c2: + return repr(c1) + else: + return "%s..%s" % (repr(c1), repr(c2)) diff --git a/venv/lib/python3.8/site-packages/Cython/Plex/Regexps.py b/venv/lib/python3.8/site-packages/Cython/Plex/Regexps.py new file mode 100644 index 0000000..41816c9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Plex/Regexps.py @@ -0,0 +1,576 @@ +#======================================================================= +# +# Python Lexical Analyser +# +# Regular Expressions +# +#======================================================================= + +from __future__ import absolute_import + +import types +try: + from sys import maxsize as maxint +except ImportError: + from sys import maxint + +from . import Errors + +# +# Constants +# + +BOL = 'bol' +EOL = 'eol' +EOF = 'eof' + +nl_code = ord('\n') + + +# +# Helper functions +# + +def chars_to_ranges(s): + """ + Return a list of character codes consisting of pairs + [code1a, code1b, code2a, code2b,...] which cover all + the characters in |s|. + """ + char_list = list(s) + char_list.sort() + i = 0 + n = len(char_list) + result = [] + while i < n: + code1 = ord(char_list[i]) + code2 = code1 + 1 + i += 1 + while i < n and code2 >= ord(char_list[i]): + code2 += 1 + i += 1 + result.append(code1) + result.append(code2) + return result + + +def uppercase_range(code1, code2): + """ + If the range of characters from code1 to code2-1 includes any + lower case letters, return the corresponding upper case range. + """ + code3 = max(code1, ord('a')) + code4 = min(code2, ord('z') + 1) + if code3 < code4: + d = ord('A') - ord('a') + return (code3 + d, code4 + d) + else: + return None + + +def lowercase_range(code1, code2): + """ + If the range of characters from code1 to code2-1 includes any + upper case letters, return the corresponding lower case range. + """ + code3 = max(code1, ord('A')) + code4 = min(code2, ord('Z') + 1) + if code3 < code4: + d = ord('a') - ord('A') + return (code3 + d, code4 + d) + else: + return None + + +def CodeRanges(code_list): + """ + Given a list of codes as returned by chars_to_ranges, return + an RE which will match a character in any of the ranges. + """ + re_list = [CodeRange(code_list[i], code_list[i + 1]) for i in range(0, len(code_list), 2)] + return Alt(*re_list) + + +def CodeRange(code1, code2): + """ + CodeRange(code1, code2) is an RE which matches any character + with a code |c| in the range |code1| <= |c| < |code2|. + """ + if code1 <= nl_code < code2: + return Alt(RawCodeRange(code1, nl_code), + RawNewline, + RawCodeRange(nl_code + 1, code2)) + else: + return RawCodeRange(code1, code2) + + +# +# Abstract classes +# + +class RE(object): + """RE is the base class for regular expression constructors. + The following operators are defined on REs: + + re1 + re2 is an RE which matches |re1| followed by |re2| + re1 | re2 is an RE which matches either |re1| or |re2| + """ + + nullable = 1 # True if this RE can match 0 input symbols + match_nl = 1 # True if this RE can match a string ending with '\n' + str = None # Set to a string to override the class's __str__ result + + def build_machine(self, machine, initial_state, final_state, + match_bol, nocase): + """ + This method should add states to |machine| to implement this + RE, starting at |initial_state| and ending at |final_state|. + If |match_bol| is true, the RE must be able to match at the + beginning of a line. If nocase is true, upper and lower case + letters should be treated as equivalent. + """ + raise NotImplementedError("%s.build_machine not implemented" % + self.__class__.__name__) + + def build_opt(self, m, initial_state, c): + """ + Given a state |s| of machine |m|, return a new state + reachable from |s| on character |c| or epsilon. + """ + s = m.new_state() + initial_state.link_to(s) + initial_state.add_transition(c, s) + return s + + def __add__(self, other): + return Seq(self, other) + + def __or__(self, other): + return Alt(self, other) + + def __str__(self): + if self.str: + return self.str + else: + return self.calc_str() + + def check_re(self, num, value): + if not isinstance(value, RE): + self.wrong_type(num, value, "Plex.RE instance") + + def check_string(self, num, value): + if type(value) != type(''): + self.wrong_type(num, value, "string") + + def check_char(self, num, value): + self.check_string(num, value) + if len(value) != 1: + raise Errors.PlexValueError("Invalid value for argument %d of Plex.%s." + "Expected a string of length 1, got: %s" % ( + num, self.__class__.__name__, repr(value))) + + def wrong_type(self, num, value, expected): + if type(value) == types.InstanceType: + got = "%s.%s instance" % ( + value.__class__.__module__, value.__class__.__name__) + else: + got = type(value).__name__ + raise Errors.PlexTypeError("Invalid type for argument %d of Plex.%s " + "(expected %s, got %s" % ( + num, self.__class__.__name__, expected, got)) + +# +# Primitive RE constructors +# ------------------------- +# +# These are the basic REs from which all others are built. +# + +## class Char(RE): +## """ +## Char(c) is an RE which matches the character |c|. +## """ + +## nullable = 0 + +## def __init__(self, char): +## self.char = char +## self.match_nl = char == '\n' + +## def build_machine(self, m, initial_state, final_state, match_bol, nocase): +## c = self.char +## if match_bol and c != BOL: +## s1 = self.build_opt(m, initial_state, BOL) +## else: +## s1 = initial_state +## if c == '\n' or c == EOF: +## s1 = self.build_opt(m, s1, EOL) +## if len(c) == 1: +## code = ord(self.char) +## s1.add_transition((code, code+1), final_state) +## if nocase and is_letter_code(code): +## code2 = other_case_code(code) +## s1.add_transition((code2, code2+1), final_state) +## else: +## s1.add_transition(c, final_state) + +## def calc_str(self): +## return "Char(%s)" % repr(self.char) + + +def Char(c): + """ + Char(c) is an RE which matches the character |c|. + """ + if len(c) == 1: + result = CodeRange(ord(c), ord(c) + 1) + else: + result = SpecialSymbol(c) + result.str = "Char(%s)" % repr(c) + return result + + +class RawCodeRange(RE): + """ + RawCodeRange(code1, code2) is a low-level RE which matches any character + with a code |c| in the range |code1| <= |c| < |code2|, where the range + does not include newline. For internal use only. + """ + nullable = 0 + match_nl = 0 + range = None # (code, code) + uppercase_range = None # (code, code) or None + lowercase_range = None # (code, code) or None + + def __init__(self, code1, code2): + self.range = (code1, code2) + self.uppercase_range = uppercase_range(code1, code2) + self.lowercase_range = lowercase_range(code1, code2) + + def build_machine(self, m, initial_state, final_state, match_bol, nocase): + if match_bol: + initial_state = self.build_opt(m, initial_state, BOL) + initial_state.add_transition(self.range, final_state) + if nocase: + if self.uppercase_range: + initial_state.add_transition(self.uppercase_range, final_state) + if self.lowercase_range: + initial_state.add_transition(self.lowercase_range, final_state) + + def calc_str(self): + return "CodeRange(%d,%d)" % (self.code1, self.code2) + + +class _RawNewline(RE): + """ + RawNewline is a low-level RE which matches a newline character. + For internal use only. + """ + nullable = 0 + match_nl = 1 + + def build_machine(self, m, initial_state, final_state, match_bol, nocase): + if match_bol: + initial_state = self.build_opt(m, initial_state, BOL) + s = self.build_opt(m, initial_state, EOL) + s.add_transition((nl_code, nl_code + 1), final_state) + + +RawNewline = _RawNewline() + + +class SpecialSymbol(RE): + """ + SpecialSymbol(sym) is an RE which matches the special input + symbol |sym|, which is one of BOL, EOL or EOF. + """ + nullable = 0 + match_nl = 0 + sym = None + + def __init__(self, sym): + self.sym = sym + + def build_machine(self, m, initial_state, final_state, match_bol, nocase): + # Sequences 'bol bol' and 'bol eof' are impossible, so only need + # to allow for bol if sym is eol + if match_bol and self.sym == EOL: + initial_state = self.build_opt(m, initial_state, BOL) + initial_state.add_transition(self.sym, final_state) + + +class Seq(RE): + """Seq(re1, re2, re3...) is an RE which matches |re1| followed by + |re2| followed by |re3|...""" + + def __init__(self, *re_list): + nullable = 1 + for i, re in enumerate(re_list): + self.check_re(i, re) + nullable = nullable and re.nullable + self.re_list = re_list + self.nullable = nullable + i = len(re_list) + match_nl = 0 + while i: + i -= 1 + re = re_list[i] + if re.match_nl: + match_nl = 1 + break + if not re.nullable: + break + self.match_nl = match_nl + + def build_machine(self, m, initial_state, final_state, match_bol, nocase): + re_list = self.re_list + if len(re_list) == 0: + initial_state.link_to(final_state) + else: + s1 = initial_state + n = len(re_list) + for i, re in enumerate(re_list): + if i < n - 1: + s2 = m.new_state() + else: + s2 = final_state + re.build_machine(m, s1, s2, match_bol, nocase) + s1 = s2 + match_bol = re.match_nl or (match_bol and re.nullable) + + def calc_str(self): + return "Seq(%s)" % ','.join(map(str, self.re_list)) + + +class Alt(RE): + """Alt(re1, re2, re3...) is an RE which matches either |re1| or + |re2| or |re3|...""" + + def __init__(self, *re_list): + self.re_list = re_list + nullable = 0 + match_nl = 0 + nullable_res = [] + non_nullable_res = [] + i = 1 + for re in re_list: + self.check_re(i, re) + if re.nullable: + nullable_res.append(re) + nullable = 1 + else: + non_nullable_res.append(re) + if re.match_nl: + match_nl = 1 + i += 1 + self.nullable_res = nullable_res + self.non_nullable_res = non_nullable_res + self.nullable = nullable + self.match_nl = match_nl + + def build_machine(self, m, initial_state, final_state, match_bol, nocase): + for re in self.nullable_res: + re.build_machine(m, initial_state, final_state, match_bol, nocase) + if self.non_nullable_res: + if match_bol: + initial_state = self.build_opt(m, initial_state, BOL) + for re in self.non_nullable_res: + re.build_machine(m, initial_state, final_state, 0, nocase) + + def calc_str(self): + return "Alt(%s)" % ','.join(map(str, self.re_list)) + + +class Rep1(RE): + """Rep1(re) is an RE which matches one or more repetitions of |re|.""" + + def __init__(self, re): + self.check_re(1, re) + self.re = re + self.nullable = re.nullable + self.match_nl = re.match_nl + + def build_machine(self, m, initial_state, final_state, match_bol, nocase): + s1 = m.new_state() + s2 = m.new_state() + initial_state.link_to(s1) + self.re.build_machine(m, s1, s2, match_bol or self.re.match_nl, nocase) + s2.link_to(s1) + s2.link_to(final_state) + + def calc_str(self): + return "Rep1(%s)" % self.re + + +class SwitchCase(RE): + """ + SwitchCase(re, nocase) is an RE which matches the same strings as RE, + but treating upper and lower case letters according to |nocase|. If + |nocase| is true, case is ignored, otherwise it is not. + """ + re = None + nocase = None + + def __init__(self, re, nocase): + self.re = re + self.nocase = nocase + self.nullable = re.nullable + self.match_nl = re.match_nl + + def build_machine(self, m, initial_state, final_state, match_bol, nocase): + self.re.build_machine(m, initial_state, final_state, match_bol, + self.nocase) + + def calc_str(self): + if self.nocase: + name = "NoCase" + else: + name = "Case" + return "%s(%s)" % (name, self.re) + +# +# Composite RE constructors +# ------------------------- +# +# These REs are defined in terms of the primitive REs. +# + +Empty = Seq() +Empty.__doc__ = \ + """ + Empty is an RE which matches the empty string. + """ +Empty.str = "Empty" + + +def Str1(s): + """ + Str1(s) is an RE which matches the literal string |s|. + """ + result = Seq(*tuple(map(Char, s))) + result.str = "Str(%s)" % repr(s) + return result + + +def Str(*strs): + """ + Str(s) is an RE which matches the literal string |s|. + Str(s1, s2, s3, ...) is an RE which matches any of |s1| or |s2| or |s3|... + """ + if len(strs) == 1: + return Str1(strs[0]) + else: + result = Alt(*tuple(map(Str1, strs))) + result.str = "Str(%s)" % ','.join(map(repr, strs)) + return result + + +def Any(s): + """ + Any(s) is an RE which matches any character in the string |s|. + """ + #result = apply(Alt, tuple(map(Char, s))) + result = CodeRanges(chars_to_ranges(s)) + result.str = "Any(%s)" % repr(s) + return result + + +def AnyBut(s): + """ + AnyBut(s) is an RE which matches any character (including + newline) which is not in the string |s|. + """ + ranges = chars_to_ranges(s) + ranges.insert(0, -maxint) + ranges.append(maxint) + result = CodeRanges(ranges) + result.str = "AnyBut(%s)" % repr(s) + return result + + +AnyChar = AnyBut("") +AnyChar.__doc__ = \ + """ + AnyChar is an RE which matches any single character (including a newline). + """ +AnyChar.str = "AnyChar" + + +def Range(s1, s2=None): + """ + Range(c1, c2) is an RE which matches any single character in the range + |c1| to |c2| inclusive. + Range(s) where |s| is a string of even length is an RE which matches + any single character in the ranges |s[0]| to |s[1]|, |s[2]| to |s[3]|,... + """ + if s2: + result = CodeRange(ord(s1), ord(s2) + 1) + result.str = "Range(%s,%s)" % (s1, s2) + else: + ranges = [] + for i in range(0, len(s1), 2): + ranges.append(CodeRange(ord(s1[i]), ord(s1[i + 1]) + 1)) + result = Alt(*ranges) + result.str = "Range(%s)" % repr(s1) + return result + + +def Opt(re): + """ + Opt(re) is an RE which matches either |re| or the empty string. + """ + result = Alt(re, Empty) + result.str = "Opt(%s)" % re + return result + + +def Rep(re): + """ + Rep(re) is an RE which matches zero or more repetitions of |re|. + """ + result = Opt(Rep1(re)) + result.str = "Rep(%s)" % re + return result + + +def NoCase(re): + """ + NoCase(re) is an RE which matches the same strings as RE, but treating + upper and lower case letters as equivalent. + """ + return SwitchCase(re, nocase=1) + + +def Case(re): + """ + Case(re) is an RE which matches the same strings as RE, but treating + upper and lower case letters as distinct, i.e. it cancels the effect + of any enclosing NoCase(). + """ + return SwitchCase(re, nocase=0) + +# +# RE Constants +# + +Bol = Char(BOL) +Bol.__doc__ = \ + """ + Bol is an RE which matches the beginning of a line. + """ +Bol.str = "Bol" + +Eol = Char(EOL) +Eol.__doc__ = \ + """ + Eol is an RE which matches the end of a line. + """ +Eol.str = "Eol" + +Eof = Char(EOF) +Eof.__doc__ = \ + """ + Eof is an RE which matches the end of the file. + """ +Eof.str = "Eof" + diff --git a/venv/lib/python3.8/site-packages/Cython/Plex/Scanners.cpython-38-x86_64-linux-gnu.so b/venv/lib/python3.8/site-packages/Cython/Plex/Scanners.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 0000000..249ddb6 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Plex/Scanners.cpython-38-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.8/site-packages/Cython/Plex/Scanners.pxd b/venv/lib/python3.8/site-packages/Cython/Plex/Scanners.pxd new file mode 100644 index 0000000..6e75f55 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Plex/Scanners.pxd @@ -0,0 +1,50 @@ +from __future__ import absolute_import + +import cython + +from Cython.Plex.Actions cimport Action + +cdef class Scanner: + + cdef public lexicon + cdef public stream + cdef public name + cdef public unicode buffer + cdef public Py_ssize_t buf_start_pos + cdef public Py_ssize_t next_pos + cdef public Py_ssize_t cur_pos + cdef public Py_ssize_t cur_line + cdef public Py_ssize_t cur_line_start + cdef public Py_ssize_t start_pos + cdef public Py_ssize_t start_line + cdef public Py_ssize_t start_col + cdef public text + cdef public initial_state # int? + cdef public state_name + cdef public list queue + cdef public bint trace + cdef public cur_char + cdef public long input_state + + cdef public level + + @cython.final + @cython.locals(input_state=long) + cdef next_char(self) + @cython.locals(action=Action) + cpdef tuple read(self) + @cython.final + cdef tuple scan_a_token(self) + ##cdef tuple position(self) # used frequently by Parsing.py + + @cython.final + @cython.locals(cur_pos=Py_ssize_t, cur_line=Py_ssize_t, cur_line_start=Py_ssize_t, + input_state=long, next_pos=Py_ssize_t, state=dict, + buf_start_pos=Py_ssize_t, buf_len=Py_ssize_t, buf_index=Py_ssize_t, + trace=bint, discard=Py_ssize_t, data=unicode, buffer=unicode) + cdef run_machine_inlined(self) + + @cython.final + cdef begin(self, state) + @cython.final + cdef produce(self, value, text = *) diff --git a/venv/lib/python3.8/site-packages/Cython/Plex/Scanners.py b/venv/lib/python3.8/site-packages/Cython/Plex/Scanners.py new file mode 100644 index 0000000..88f7e2d --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Plex/Scanners.py @@ -0,0 +1,338 @@ +# cython: auto_pickle=False +#======================================================================= +# +# Python Lexical Analyser +# +# +# Scanning an input stream +# +#======================================================================= + +from __future__ import absolute_import + +import cython + +cython.declare(BOL=object, EOL=object, EOF=object, NOT_FOUND=object) + +from . import Errors +from .Regexps import BOL, EOL, EOF + +NOT_FOUND = object() + + +class Scanner(object): + """ + A Scanner is used to read tokens from a stream of characters + using the token set specified by a Plex.Lexicon. + + Constructor: + + Scanner(lexicon, stream, name = '') + + See the docstring of the __init__ method for details. + + Methods: + + See the docstrings of the individual methods for more + information. + + read() --> (value, text) + Reads the next lexical token from the stream. + + position() --> (name, line, col) + Returns the position of the last token read using the + read() method. + + begin(state_name) + Causes scanner to change state. + + produce(value [, text]) + Causes return of a token value to the caller of the + Scanner. + + """ + + # lexicon = None # Lexicon + # stream = None # file-like object + # name = '' + # buffer = '' + # buf_start_pos = 0 # position in input of start of buffer + # next_pos = 0 # position in input of next char to read + # cur_pos = 0 # position in input of current char + # cur_line = 1 # line number of current char + # cur_line_start = 0 # position in input of start of current line + # start_pos = 0 # position in input of start of token + # start_line = 0 # line number of start of token + # start_col = 0 # position in line of start of token + # text = None # text of last token read + # initial_state = None # Node + # state_name = '' # Name of initial state + # queue = None # list of tokens to be returned + # trace = 0 + + def __init__(self, lexicon, stream, name='', initial_pos=None): + """ + Scanner(lexicon, stream, name = '') + + |lexicon| is a Plex.Lexicon instance specifying the lexical tokens + to be recognised. + + |stream| can be a file object or anything which implements a + compatible read() method. + + |name| is optional, and may be the name of the file being + scanned or any other identifying string. + """ + self.trace = 0 + + self.buffer = u'' + self.buf_start_pos = 0 + self.next_pos = 0 + self.cur_pos = 0 + self.cur_line = 1 + self.start_pos = 0 + self.start_line = 0 + self.start_col = 0 + self.text = None + self.state_name = None + + self.lexicon = lexicon + self.stream = stream + self.name = name + self.queue = [] + self.initial_state = None + self.begin('') + self.next_pos = 0 + self.cur_pos = 0 + self.cur_line_start = 0 + self.cur_char = BOL + self.input_state = 1 + if initial_pos is not None: + self.cur_line, self.cur_line_start = initial_pos[1], -initial_pos[2] + + def read(self): + """ + Read the next lexical token from the stream and return a + tuple (value, text), where |value| is the value associated with + the token as specified by the Lexicon, and |text| is the actual + string read from the stream. Returns (None, '') on end of file. + """ + queue = self.queue + while not queue: + self.text, action = self.scan_a_token() + if action is None: + self.produce(None) + self.eof() + else: + value = action.perform(self, self.text) + if value is not None: + self.produce(value) + result = queue[0] + del queue[0] + return result + + def scan_a_token(self): + """ + Read the next input sequence recognised by the machine + and return (text, action). Returns ('', None) on end of + file. + """ + self.start_pos = self.cur_pos + self.start_line = self.cur_line + self.start_col = self.cur_pos - self.cur_line_start + action = self.run_machine_inlined() + if action is not None: + if self.trace: + print("Scanner: read: Performing %s %d:%d" % ( + action, self.start_pos, self.cur_pos)) + text = self.buffer[ + self.start_pos - self.buf_start_pos: + self.cur_pos - self.buf_start_pos] + return (text, action) + else: + if self.cur_pos == self.start_pos: + if self.cur_char is EOL: + self.next_char() + if self.cur_char is None or self.cur_char is EOF: + return (u'', None) + raise Errors.UnrecognizedInput(self, self.state_name) + + def run_machine_inlined(self): + """ + Inlined version of run_machine for speed. + """ + state = self.initial_state + cur_pos = self.cur_pos + cur_line = self.cur_line + cur_line_start = self.cur_line_start + cur_char = self.cur_char + input_state = self.input_state + next_pos = self.next_pos + buffer = self.buffer + buf_start_pos = self.buf_start_pos + buf_len = len(buffer) + b_action, b_cur_pos, b_cur_line, b_cur_line_start, b_cur_char, b_input_state, b_next_pos = \ + None, 0, 0, 0, u'', 0, 0 + trace = self.trace + while 1: + if trace: #TRACE# + print("State %d, %d/%d:%s -->" % ( #TRACE# + state['number'], input_state, cur_pos, repr(cur_char))) #TRACE# + # Begin inlined self.save_for_backup() + #action = state.action #@slow + action = state['action'] #@fast + if action is not None: + b_action, b_cur_pos, b_cur_line, b_cur_line_start, b_cur_char, b_input_state, b_next_pos = \ + action, cur_pos, cur_line, cur_line_start, cur_char, input_state, next_pos + # End inlined self.save_for_backup() + c = cur_char + #new_state = state.new_state(c) #@slow + new_state = state.get(c, NOT_FOUND) #@fast + if new_state is NOT_FOUND: #@fast + new_state = c and state.get('else') #@fast + if new_state: + if trace: #TRACE# + print("State %d" % new_state['number']) #TRACE# + state = new_state + # Begin inlined: self.next_char() + if input_state == 1: + cur_pos = next_pos + # Begin inlined: c = self.read_char() + buf_index = next_pos - buf_start_pos + if buf_index < buf_len: + c = buffer[buf_index] + next_pos += 1 + else: + discard = self.start_pos - buf_start_pos + data = self.stream.read(0x1000) + buffer = self.buffer[discard:] + data + self.buffer = buffer + buf_start_pos += discard + self.buf_start_pos = buf_start_pos + buf_len = len(buffer) + buf_index -= discard + if data: + c = buffer[buf_index] + next_pos += 1 + else: + c = u'' + # End inlined: c = self.read_char() + if c == u'\n': + cur_char = EOL + input_state = 2 + elif not c: + cur_char = EOL + input_state = 4 + else: + cur_char = c + elif input_state == 2: + cur_char = u'\n' + input_state = 3 + elif input_state == 3: + cur_line += 1 + cur_line_start = cur_pos = next_pos + cur_char = BOL + input_state = 1 + elif input_state == 4: + cur_char = EOF + input_state = 5 + else: # input_state = 5 + cur_char = u'' + # End inlined self.next_char() + else: # not new_state + if trace: #TRACE# + print("blocked") #TRACE# + # Begin inlined: action = self.back_up() + if b_action is not None: + (action, cur_pos, cur_line, cur_line_start, + cur_char, input_state, next_pos) = \ + (b_action, b_cur_pos, b_cur_line, b_cur_line_start, + b_cur_char, b_input_state, b_next_pos) + else: + action = None + break # while 1 + # End inlined: action = self.back_up() + self.cur_pos = cur_pos + self.cur_line = cur_line + self.cur_line_start = cur_line_start + self.cur_char = cur_char + self.input_state = input_state + self.next_pos = next_pos + if trace: #TRACE# + if action is not None: #TRACE# + print("Doing %s" % action) #TRACE# + return action + + def next_char(self): + input_state = self.input_state + if self.trace: + print("Scanner: next: %s [%d] %d" % (" " * 20, input_state, self.cur_pos)) + if input_state == 1: + self.cur_pos = self.next_pos + c = self.read_char() + if c == u'\n': + self.cur_char = EOL + self.input_state = 2 + elif not c: + self.cur_char = EOL + self.input_state = 4 + else: + self.cur_char = c + elif input_state == 2: + self.cur_char = u'\n' + self.input_state = 3 + elif input_state == 3: + self.cur_line += 1 + self.cur_line_start = self.cur_pos = self.next_pos + self.cur_char = BOL + self.input_state = 1 + elif input_state == 4: + self.cur_char = EOF + self.input_state = 5 + else: # input_state = 5 + self.cur_char = u'' + if self.trace: + print("--> [%d] %d %r" % (input_state, self.cur_pos, self.cur_char)) + + def position(self): + """ + Return a tuple (name, line, col) representing the location of + the last token read using the read() method. |name| is the + name that was provided to the Scanner constructor; |line| + is the line number in the stream (1-based); |col| is the + position within the line of the first character of the token + (0-based). + """ + return (self.name, self.start_line, self.start_col) + + def get_position(self): + """Python accessible wrapper around position(), only for error reporting. + """ + return self.position() + + def begin(self, state_name): + """Set the current state of the scanner to the named state.""" + self.initial_state = ( + self.lexicon.get_initial_state(state_name)) + self.state_name = state_name + + def produce(self, value, text=None): + """ + Called from an action procedure, causes |value| to be returned + as the token value from read(). If |text| is supplied, it is + returned in place of the scanned text. + + produce() can be called more than once during a single call to an action + procedure, in which case the tokens are queued up and returned one + at a time by subsequent calls to read(), until the queue is empty, + whereupon scanning resumes. + """ + if text is None: + text = self.text + self.queue.append((value, text)) + + def eof(self): + """ + Override this method if you want something to be done at + end of file. + """ diff --git a/venv/lib/python3.8/site-packages/Cython/Plex/Timing.py b/venv/lib/python3.8/site-packages/Cython/Plex/Timing.py new file mode 100644 index 0000000..5c36926 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Plex/Timing.py @@ -0,0 +1,23 @@ +# +# Get time in platform-dependent way +# + +from __future__ import absolute_import + +import os +from sys import platform, exit, stderr + +if platform == 'mac': + import MacOS + def time(): + return MacOS.GetTicks() / 60.0 + timekind = "real" +elif hasattr(os, 'times'): + def time(): + t = os.times() + return t[0] + t[1] + timekind = "cpu" +else: + stderr.write( + "Don't know how to get time on platform %s\n" % repr(platform)) + exit(1) diff --git a/venv/lib/python3.8/site-packages/Cython/Plex/Traditional.py b/venv/lib/python3.8/site-packages/Cython/Plex/Traditional.py new file mode 100644 index 0000000..ec7252d --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Plex/Traditional.py @@ -0,0 +1,158 @@ +#======================================================================= +# +# Python Lexical Analyser +# +# Traditional Regular Expression Syntax +# +#======================================================================= + +from __future__ import absolute_import + +from .Regexps import Alt, Seq, Rep, Rep1, Opt, Any, AnyBut, Bol, Eol, Char +from .Errors import PlexError + + +class RegexpSyntaxError(PlexError): + pass + + +def re(s): + """ + Convert traditional string representation of regular expression |s| + into Plex representation. + """ + return REParser(s).parse_re() + + +class REParser(object): + def __init__(self, s): + self.s = s + self.i = -1 + self.end = 0 + self.next() + + def parse_re(self): + re = self.parse_alt() + if not self.end: + self.error("Unexpected %s" % repr(self.c)) + return re + + def parse_alt(self): + """Parse a set of alternative regexps.""" + re = self.parse_seq() + if self.c == '|': + re_list = [re] + while self.c == '|': + self.next() + re_list.append(self.parse_seq()) + re = Alt(*re_list) + return re + + def parse_seq(self): + """Parse a sequence of regexps.""" + re_list = [] + while not self.end and not self.c in "|)": + re_list.append(self.parse_mod()) + return Seq(*re_list) + + def parse_mod(self): + """Parse a primitive regexp followed by *, +, ? modifiers.""" + re = self.parse_prim() + while not self.end and self.c in "*+?": + if self.c == '*': + re = Rep(re) + elif self.c == '+': + re = Rep1(re) + else: # self.c == '?' + re = Opt(re) + self.next() + return re + + def parse_prim(self): + """Parse a primitive regexp.""" + c = self.get() + if c == '.': + re = AnyBut("\n") + elif c == '^': + re = Bol + elif c == '$': + re = Eol + elif c == '(': + re = self.parse_alt() + self.expect(')') + elif c == '[': + re = self.parse_charset() + self.expect(']') + else: + if c == '\\': + c = self.get() + re = Char(c) + return re + + def parse_charset(self): + """Parse a charset. Does not include the surrounding [].""" + char_list = [] + invert = 0 + if self.c == '^': + invert = 1 + self.next() + if self.c == ']': + char_list.append(']') + self.next() + while not self.end and self.c != ']': + c1 = self.get() + if self.c == '-' and self.lookahead(1) != ']': + self.next() + c2 = self.get() + for a in range(ord(c1), ord(c2) + 1): + char_list.append(chr(a)) + else: + char_list.append(c1) + chars = ''.join(char_list) + if invert: + return AnyBut(chars) + else: + return Any(chars) + + def next(self): + """Advance to the next char.""" + s = self.s + i = self.i = self.i + 1 + if i < len(s): + self.c = s[i] + else: + self.c = '' + self.end = 1 + + def get(self): + if self.end: + self.error("Premature end of string") + c = self.c + self.next() + return c + + def lookahead(self, n): + """Look ahead n chars.""" + j = self.i + n + if j < len(self.s): + return self.s[j] + else: + return '' + + def expect(self, c): + """ + Expect to find character |c| at current position. + Raises an exception otherwise. + """ + if self.c == c: + self.next() + else: + self.error("Missing %s" % repr(c)) + + def error(self, mess): + """Raise exception to signal syntax error in regexp.""" + raise RegexpSyntaxError("Syntax error in regexp %s at position %d: %s" % ( + repr(self.s), self.i, mess)) + + + diff --git a/venv/lib/python3.8/site-packages/Cython/Plex/Transitions.py b/venv/lib/python3.8/site-packages/Cython/Plex/Transitions.py new file mode 100644 index 0000000..3833817 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Plex/Transitions.py @@ -0,0 +1,251 @@ +# +# Plex - Transition Maps +# +# This version represents state sets directly as dicts for speed. +# + +from __future__ import absolute_import + +try: + from sys import maxsize as maxint +except ImportError: + from sys import maxint + + +class TransitionMap(object): + """ + A TransitionMap maps an input event to a set of states. + An input event is one of: a range of character codes, + the empty string (representing an epsilon move), or one + of the special symbols BOL, EOL, EOF. + + For characters, this implementation compactly represents + the map by means of a list: + + [code_0, states_0, code_1, states_1, code_2, states_2, + ..., code_n-1, states_n-1, code_n] + + where |code_i| is a character code, and |states_i| is a + set of states corresponding to characters with codes |c| + in the range |code_i| <= |c| <= |code_i+1|. + + The following invariants hold: + n >= 1 + code_0 == -maxint + code_n == maxint + code_i < code_i+1 for i in 0..n-1 + states_0 == states_n-1 + + Mappings for the special events '', BOL, EOL, EOF are + kept separately in a dictionary. + """ + + map = None # The list of codes and states + special = None # Mapping for special events + + def __init__(self, map=None, special=None): + if not map: + map = [-maxint, {}, maxint] + if not special: + special = {} + self.map = map + self.special = special + #self.check() ### + + def add(self, event, new_state, + TupleType=tuple): + """ + Add transition to |new_state| on |event|. + """ + if type(event) is TupleType: + code0, code1 = event + i = self.split(code0) + j = self.split(code1) + map = self.map + while i < j: + map[i + 1][new_state] = 1 + i += 2 + else: + self.get_special(event)[new_state] = 1 + + def add_set(self, event, new_set, + TupleType=tuple): + """ + Add transitions to the states in |new_set| on |event|. + """ + if type(event) is TupleType: + code0, code1 = event + i = self.split(code0) + j = self.split(code1) + map = self.map + while i < j: + map[i + 1].update(new_set) + i += 2 + else: + self.get_special(event).update(new_set) + + def get_epsilon(self, + none=None): + """ + Return the mapping for epsilon, or None. + """ + return self.special.get('', none) + + def iteritems(self, + len=len): + """ + Return the mapping as an iterable of ((code1, code2), state_set) and + (special_event, state_set) pairs. + """ + result = [] + map = self.map + else_set = map[1] + i = 0 + n = len(map) - 1 + code0 = map[0] + while i < n: + set = map[i + 1] + code1 = map[i + 2] + if set or else_set: + result.append(((code0, code1), set)) + code0 = code1 + i += 2 + for event, set in self.special.items(): + if set: + result.append((event, set)) + return iter(result) + + items = iteritems + + # ------------------- Private methods -------------------- + + def split(self, code, + len=len, maxint=maxint): + """ + Search the list for the position of the split point for |code|, + inserting a new split point if necessary. Returns index |i| such + that |code| == |map[i]|. + """ + # We use a funky variation on binary search. + map = self.map + hi = len(map) - 1 + # Special case: code == map[-1] + if code == maxint: + return hi + # General case + lo = 0 + # loop invariant: map[lo] <= code < map[hi] and hi - lo >= 2 + while hi - lo >= 4: + # Find midpoint truncated to even index + mid = ((lo + hi) // 2) & ~1 + if code < map[mid]: + hi = mid + else: + lo = mid + # map[lo] <= code < map[hi] and hi - lo == 2 + if map[lo] == code: + return lo + else: + map[hi:hi] = [code, map[hi - 1].copy()] + #self.check() ### + return hi + + def get_special(self, event): + """ + Get state set for special event, adding a new entry if necessary. + """ + special = self.special + set = special.get(event, None) + if not set: + set = {} + special[event] = set + return set + + # --------------------- Conversion methods ----------------------- + + def __str__(self): + map_strs = [] + map = self.map + n = len(map) + i = 0 + while i < n: + code = map[i] + if code == -maxint: + code_str = "-inf" + elif code == maxint: + code_str = "inf" + else: + code_str = str(code) + map_strs.append(code_str) + i += 1 + if i < n: + map_strs.append(state_set_str(map[i])) + i += 1 + special_strs = {} + for event, set in self.special.items(): + special_strs[event] = state_set_str(set) + return "[%s]+%s" % ( + ','.join(map_strs), + special_strs + ) + + # --------------------- Debugging methods ----------------------- + + def check(self): + """Check data structure integrity.""" + if not self.map[-3] < self.map[-1]: + print(self) + assert 0 + + def dump(self, file): + map = self.map + i = 0 + n = len(map) - 1 + while i < n: + self.dump_range(map[i], map[i + 2], map[i + 1], file) + i += 2 + for event, set in self.special.items(): + if set: + if not event: + event = 'empty' + self.dump_trans(event, set, file) + + def dump_range(self, code0, code1, set, file): + if set: + if code0 == -maxint: + if code1 == maxint: + k = "any" + else: + k = "< %s" % self.dump_char(code1) + elif code1 == maxint: + k = "> %s" % self.dump_char(code0 - 1) + elif code0 == code1 - 1: + k = self.dump_char(code0) + else: + k = "%s..%s" % (self.dump_char(code0), + self.dump_char(code1 - 1)) + self.dump_trans(k, set, file) + + def dump_char(self, code): + if 0 <= code <= 255: + return repr(chr(code)) + else: + return "chr(%d)" % code + + def dump_trans(self, key, set, file): + file.write(" %s --> %s\n" % (key, self.dump_set(set))) + + def dump_set(self, set): + return state_set_str(set) + + +# +# State set manipulation functions +# + +#def merge_state_sets(set1, set2): +# for state in set2.keys(): +# set1[state] = 1 + +def state_set_str(set): + return "[%s]" % ','.join(["S%d" % state.number for state in set]) diff --git a/venv/lib/python3.8/site-packages/Cython/Plex/__init__.py b/venv/lib/python3.8/site-packages/Cython/Plex/__init__.py new file mode 100644 index 0000000..81a066f --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Plex/__init__.py @@ -0,0 +1,39 @@ +#======================================================================= +# +# Python Lexical Analyser +# +#======================================================================= + +""" +The Plex module provides lexical analysers with similar capabilities +to GNU Flex. The following classes and functions are exported; +see the attached docstrings for more information. + + Scanner For scanning a character stream under the + direction of a Lexicon. + + Lexicon For constructing a lexical definition + to be used by a Scanner. + + Str, Any, AnyBut, AnyChar, Seq, Alt, Opt, Rep, Rep1, + Bol, Eol, Eof, Empty + + Regular expression constructors, for building pattern + definitions for a Lexicon. + + State For defining scanner states when creating a + Lexicon. + + TEXT, IGNORE, Begin + + Actions for associating with patterns when + creating a Lexicon. +""" + +from __future__ import absolute_import + +from .Actions import TEXT, IGNORE, Begin +from .Lexicons import Lexicon, State +from .Regexps import RE, Seq, Alt, Rep1, Empty, Str, Any, AnyBut, AnyChar, Range +from .Regexps import Opt, Rep, Bol, Eol, Eof, Case, NoCase +from .Scanners import Scanner diff --git a/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Actions.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Actions.cpython-38.pyc new file mode 100644 index 0000000..0dbcc89 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Actions.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/DFA.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/DFA.cpython-38.pyc new file mode 100644 index 0000000..b1d13ac Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/DFA.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Errors.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Errors.cpython-38.pyc new file mode 100644 index 0000000..422978a Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Errors.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Lexicons.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Lexicons.cpython-38.pyc new file mode 100644 index 0000000..65a732c Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Lexicons.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Machines.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Machines.cpython-38.pyc new file mode 100644 index 0000000..1491de5 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Machines.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Regexps.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Regexps.cpython-38.pyc new file mode 100644 index 0000000..8628973 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Regexps.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Scanners.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Scanners.cpython-38.pyc new file mode 100644 index 0000000..b2f820c Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Scanners.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Timing.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Timing.cpython-38.pyc new file mode 100644 index 0000000..b5822e8 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Timing.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Traditional.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Traditional.cpython-38.pyc new file mode 100644 index 0000000..c1e1cbf Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Traditional.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Transitions.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Transitions.cpython-38.pyc new file mode 100644 index 0000000..5f7d240 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/Transitions.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..67ec8e1 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Plex/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Runtime/__init__.py b/venv/lib/python3.8/site-packages/Cython/Runtime/__init__.py new file mode 100644 index 0000000..fa81ada --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Runtime/__init__.py @@ -0,0 +1 @@ +# empty file diff --git a/venv/lib/python3.8/site-packages/Cython/Runtime/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Runtime/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..55e747e Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Runtime/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Runtime/refnanny.cpython-38-x86_64-linux-gnu.so b/venv/lib/python3.8/site-packages/Cython/Runtime/refnanny.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 0000000..751e0d8 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Runtime/refnanny.cpython-38-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.8/site-packages/Cython/Runtime/refnanny.pyx b/venv/lib/python3.8/site-packages/Cython/Runtime/refnanny.pyx new file mode 100644 index 0000000..d4b873f --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Runtime/refnanny.pyx @@ -0,0 +1,194 @@ +# cython: language_level=3, auto_pickle=False + +from cpython.ref cimport PyObject, Py_INCREF, Py_DECREF, Py_XDECREF, Py_XINCREF +from cpython.exc cimport PyErr_Fetch, PyErr_Restore +from cpython.pystate cimport PyThreadState_Get + +cimport cython + +loglevel = 0 +reflog = [] + +cdef log(level, action, obj, lineno): + if loglevel >= level: + reflog.append((lineno, action, id(obj))) + +LOG_NONE, LOG_ALL = range(2) + +@cython.final +cdef class Context(object): + cdef readonly object name, filename + cdef readonly dict refs + cdef readonly list errors + cdef readonly Py_ssize_t start + + def __cinit__(self, name, line=0, filename=None): + self.name = name + self.start = line + self.filename = filename + self.refs = {} # id -> (count, [lineno]) + self.errors = [] + + cdef regref(self, obj, lineno, bint is_null): + log(LOG_ALL, u'regref', u"" if is_null else obj, lineno) + if is_null: + self.errors.append(f"NULL argument on line {lineno}") + return + id_ = id(obj) + count, linenumbers = self.refs.get(id_, (0, [])) + self.refs[id_] = (count + 1, linenumbers) + linenumbers.append(lineno) + + cdef bint delref(self, obj, lineno, bint is_null) except -1: + # returns whether it is ok to do the decref operation + log(LOG_ALL, u'delref', u"" if is_null else obj, lineno) + if is_null: + self.errors.append(f"NULL argument on line {lineno}") + return False + id_ = id(obj) + count, linenumbers = self.refs.get(id_, (0, [])) + if count == 0: + self.errors.append(f"Too many decrefs on line {lineno}, reference acquired on lines {linenumbers!r}") + return False + elif count == 1: + del self.refs[id_] + return True + else: + self.refs[id_] = (count - 1, linenumbers) + return True + + cdef end(self): + if self.refs: + msg = u"References leaked:" + for count, linenos in self.refs.itervalues(): + msg += f"\n ({count}) acquired on lines: {u', '.join([f'{x}' for x in linenos])}" + self.errors.append(msg) + if self.errors: + return u"\n".join([u'REFNANNY: '+error for error in self.errors]) + else: + return None + +cdef void report_unraisable(object e=None): + try: + if e is None: + import sys + e = sys.exc_info()[1] + print(f"refnanny raised an exception: {e}") + except: + pass # We absolutely cannot exit with an exception + +# All Python operations must happen after any existing +# exception has been fetched, in case we are called from +# exception-handling code. + +cdef PyObject* SetupContext(char* funcname, int lineno, char* filename) except NULL: + if Context is None: + # Context may be None during finalize phase. + # In that case, we don't want to be doing anything fancy + # like caching and resetting exceptions. + return NULL + cdef (PyObject*) type = NULL, value = NULL, tb = NULL, result = NULL + PyThreadState_Get() + PyErr_Fetch(&type, &value, &tb) + try: + ctx = Context(funcname, lineno, filename) + Py_INCREF(ctx) + result = ctx + except Exception, e: + report_unraisable(e) + PyErr_Restore(type, value, tb) + return result + +cdef void GOTREF(PyObject* ctx, PyObject* p_obj, int lineno): + if ctx == NULL: return + cdef (PyObject*) type = NULL, value = NULL, tb = NULL + PyErr_Fetch(&type, &value, &tb) + try: + try: + if p_obj is NULL: + (ctx).regref(None, lineno, True) + else: + (ctx).regref(p_obj, lineno, False) + except: + report_unraisable() + except: + # __Pyx_GetException may itself raise errors + pass + PyErr_Restore(type, value, tb) + +cdef int GIVEREF_and_report(PyObject* ctx, PyObject* p_obj, int lineno): + if ctx == NULL: return 1 + cdef (PyObject*) type = NULL, value = NULL, tb = NULL + cdef bint decref_ok = False + PyErr_Fetch(&type, &value, &tb) + try: + try: + if p_obj is NULL: + decref_ok = (ctx).delref(None, lineno, True) + else: + decref_ok = (ctx).delref(p_obj, lineno, False) + except: + report_unraisable() + except: + # __Pyx_GetException may itself raise errors + pass + PyErr_Restore(type, value, tb) + return decref_ok + +cdef void GIVEREF(PyObject* ctx, PyObject* p_obj, int lineno): + GIVEREF_and_report(ctx, p_obj, lineno) + +cdef void INCREF(PyObject* ctx, PyObject* obj, int lineno): + Py_XINCREF(obj) + PyThreadState_Get() + GOTREF(ctx, obj, lineno) + +cdef void DECREF(PyObject* ctx, PyObject* obj, int lineno): + if GIVEREF_and_report(ctx, obj, lineno): + Py_XDECREF(obj) + PyThreadState_Get() + +cdef void FinishContext(PyObject** ctx): + if ctx == NULL or ctx[0] == NULL: return + cdef (PyObject*) type = NULL, value = NULL, tb = NULL + cdef object errors = None + cdef Context context + PyThreadState_Get() + PyErr_Fetch(&type, &value, &tb) + try: + try: + context = ctx[0] + errors = context.end() + if errors: + print(f"{context.filename.decode('latin1')}: {context.name.decode('latin1')}()") + print(errors) + context = None + except: + report_unraisable() + except: + # __Pyx_GetException may itself raise errors + pass + Py_XDECREF(ctx[0]) + ctx[0] = NULL + PyErr_Restore(type, value, tb) + +ctypedef struct RefNannyAPIStruct: + void (*INCREF)(PyObject*, PyObject*, int) + void (*DECREF)(PyObject*, PyObject*, int) + void (*GOTREF)(PyObject*, PyObject*, int) + void (*GIVEREF)(PyObject*, PyObject*, int) + PyObject* (*SetupContext)(char*, int, char*) except NULL + void (*FinishContext)(PyObject**) + +cdef RefNannyAPIStruct api +api.INCREF = INCREF +api.DECREF = DECREF +api.GOTREF = GOTREF +api.GIVEREF = GIVEREF +api.SetupContext = SetupContext +api.FinishContext = FinishContext + +cdef extern from "Python.h": + object PyLong_FromVoidPtr(void*) + +RefNannyAPI = PyLong_FromVoidPtr(&api) diff --git a/venv/lib/python3.8/site-packages/Cython/Shadow.py b/venv/lib/python3.8/site-packages/Cython/Shadow.py new file mode 100644 index 0000000..5d8d628 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Shadow.py @@ -0,0 +1,474 @@ +# cython.* namespace for pure mode. +from __future__ import absolute_import + +__version__ = "0.29.19" + +try: + from __builtin__ import basestring +except ImportError: + basestring = str + + +# BEGIN shameless copy from Cython/minivect/minitypes.py + +class _ArrayType(object): + + is_array = True + subtypes = ['dtype'] + + def __init__(self, dtype, ndim, is_c_contig=False, is_f_contig=False, + inner_contig=False, broadcasting=None): + self.dtype = dtype + self.ndim = ndim + self.is_c_contig = is_c_contig + self.is_f_contig = is_f_contig + self.inner_contig = inner_contig or is_c_contig or is_f_contig + self.broadcasting = broadcasting + + def __repr__(self): + axes = [":"] * self.ndim + if self.is_c_contig: + axes[-1] = "::1" + elif self.is_f_contig: + axes[0] = "::1" + + return "%s[%s]" % (self.dtype, ", ".join(axes)) + + +def index_type(base_type, item): + """ + Support array type creation by slicing, e.g. double[:, :] specifies + a 2D strided array of doubles. The syntax is the same as for + Cython memoryviews. + """ + class InvalidTypeSpecification(Exception): + pass + + def verify_slice(s): + if s.start or s.stop or s.step not in (None, 1): + raise InvalidTypeSpecification( + "Only a step of 1 may be provided to indicate C or " + "Fortran contiguity") + + if isinstance(item, tuple): + step_idx = None + for idx, s in enumerate(item): + verify_slice(s) + if s.step and (step_idx or idx not in (0, len(item) - 1)): + raise InvalidTypeSpecification( + "Step may only be provided once, and only in the " + "first or last dimension.") + + if s.step == 1: + step_idx = idx + + return _ArrayType(base_type, len(item), + is_c_contig=step_idx == len(item) - 1, + is_f_contig=step_idx == 0) + elif isinstance(item, slice): + verify_slice(item) + return _ArrayType(base_type, 1, is_c_contig=bool(item.step)) + else: + # int[8] etc. + assert int(item) == item # array size must be a plain integer + array(base_type, item) + +# END shameless copy + + +compiled = False + +_Unspecified = object() + +# Function decorators + +def _empty_decorator(x): + return x + +def locals(**arg_types): + return _empty_decorator + +def test_assert_path_exists(*paths): + return _empty_decorator + +def test_fail_if_path_exists(*paths): + return _empty_decorator + +class _EmptyDecoratorAndManager(object): + def __call__(self, x): + return x + def __enter__(self): + pass + def __exit__(self, exc_type, exc_value, traceback): + pass + +class _Optimization(object): + pass + +cclass = ccall = cfunc = _EmptyDecoratorAndManager() + +returns = wraparound = boundscheck = initializedcheck = nonecheck = \ + embedsignature = cdivision = cdivision_warnings = \ + always_allows_keywords = profile = linetrace = infer_types = \ + unraisable_tracebacks = freelist = \ + lambda _: _EmptyDecoratorAndManager() + +exceptval = lambda _=None, check=True: _EmptyDecoratorAndManager() + +overflowcheck = lambda _: _EmptyDecoratorAndManager() +optimization = _Optimization() + +overflowcheck.fold = optimization.use_switch = \ + optimization.unpack_method_calls = lambda arg: _EmptyDecoratorAndManager() + +final = internal = type_version_tag = no_gc_clear = no_gc = _empty_decorator + +binding = lambda _: _empty_decorator + + +_cython_inline = None +def inline(f, *args, **kwds): + if isinstance(f, basestring): + global _cython_inline + if _cython_inline is None: + from Cython.Build.Inline import cython_inline as _cython_inline + return _cython_inline(f, *args, **kwds) + else: + assert len(args) == len(kwds) == 0 + return f + + +def compile(f): + from Cython.Build.Inline import RuntimeCompiledFunction + return RuntimeCompiledFunction(f) + + +# Special functions + +def cdiv(a, b): + q = a / b + if q < 0: + q += 1 + return q + +def cmod(a, b): + r = a % b + if (a*b) < 0: + r -= b + return r + + +# Emulated language constructs + +def cast(type, *args, **kwargs): + kwargs.pop('typecheck', None) + assert not kwargs + if hasattr(type, '__call__'): + return type(*args) + else: + return args[0] + +def sizeof(arg): + return 1 + +def typeof(arg): + return arg.__class__.__name__ + # return type(arg) + +def address(arg): + return pointer(type(arg))([arg]) + +def declare(type=None, value=_Unspecified, **kwds): + if type not in (None, object) and hasattr(type, '__call__'): + if value is not _Unspecified: + return type(value) + else: + return type() + else: + return value + +class _nogil(object): + """Support for 'with nogil' statement and @nogil decorator. + """ + def __call__(self, x): + if callable(x): + # Used as function decorator => return the function unchanged. + return x + # Used as conditional context manager or to create an "@nogil(True/False)" decorator => keep going. + return self + + def __enter__(self): + pass + def __exit__(self, exc_class, exc, tb): + return exc_class is None + +nogil = _nogil() +gil = _nogil() +del _nogil + + +# Emulated types + +class CythonMetaType(type): + + def __getitem__(type, ix): + return array(type, ix) + +CythonTypeObject = CythonMetaType('CythonTypeObject', (object,), {}) + +class CythonType(CythonTypeObject): + + def _pointer(self, n=1): + for i in range(n): + self = pointer(self) + return self + +class PointerType(CythonType): + + def __init__(self, value=None): + if isinstance(value, (ArrayType, PointerType)): + self._items = [cast(self._basetype, a) for a in value._items] + elif isinstance(value, list): + self._items = [cast(self._basetype, a) for a in value] + elif value is None or value == 0: + self._items = [] + else: + raise ValueError + + def __getitem__(self, ix): + if ix < 0: + raise IndexError("negative indexing not allowed in C") + return self._items[ix] + + def __setitem__(self, ix, value): + if ix < 0: + raise IndexError("negative indexing not allowed in C") + self._items[ix] = cast(self._basetype, value) + + def __eq__(self, value): + if value is None and not self._items: + return True + elif type(self) != type(value): + return False + else: + return not self._items and not value._items + + def __repr__(self): + return "%s *" % (self._basetype,) + +class ArrayType(PointerType): + + def __init__(self): + self._items = [None] * self._n + + +class StructType(CythonType): + + def __init__(self, cast_from=_Unspecified, **data): + if cast_from is not _Unspecified: + # do cast + if len(data) > 0: + raise ValueError('Cannot accept keyword arguments when casting.') + if type(cast_from) is not type(self): + raise ValueError('Cannot cast from %s'%cast_from) + for key, value in cast_from.__dict__.items(): + setattr(self, key, value) + else: + for key, value in data.items(): + setattr(self, key, value) + + def __setattr__(self, key, value): + if key in self._members: + self.__dict__[key] = cast(self._members[key], value) + else: + raise AttributeError("Struct has no member '%s'" % key) + + +class UnionType(CythonType): + + def __init__(self, cast_from=_Unspecified, **data): + if cast_from is not _Unspecified: + # do type cast + if len(data) > 0: + raise ValueError('Cannot accept keyword arguments when casting.') + if isinstance(cast_from, dict): + datadict = cast_from + elif type(cast_from) is type(self): + datadict = cast_from.__dict__ + else: + raise ValueError('Cannot cast from %s'%cast_from) + else: + datadict = data + if len(datadict) > 1: + raise AttributeError("Union can only store one field at a time.") + for key, value in datadict.items(): + setattr(self, key, value) + + def __setattr__(self, key, value): + if key in '__dict__': + CythonType.__setattr__(self, key, value) + elif key in self._members: + self.__dict__ = {key: cast(self._members[key], value)} + else: + raise AttributeError("Union has no member '%s'" % key) + +def pointer(basetype): + class PointerInstance(PointerType): + _basetype = basetype + return PointerInstance + +def array(basetype, n): + class ArrayInstance(ArrayType): + _basetype = basetype + _n = n + return ArrayInstance + +def struct(**members): + class StructInstance(StructType): + _members = members + for key in members: + setattr(StructInstance, key, None) + return StructInstance + +def union(**members): + class UnionInstance(UnionType): + _members = members + for key in members: + setattr(UnionInstance, key, None) + return UnionInstance + +class typedef(CythonType): + + def __init__(self, type, name=None): + self._basetype = type + self.name = name + + def __call__(self, *arg): + value = cast(self._basetype, *arg) + return value + + def __repr__(self): + return self.name or str(self._basetype) + + __getitem__ = index_type + +class _FusedType(CythonType): + pass + + +def fused_type(*args): + if not args: + raise TypeError("Expected at least one type as argument") + + # Find the numeric type with biggest rank if all types are numeric + rank = -1 + for type in args: + if type not in (py_int, py_long, py_float, py_complex): + break + + if type_ordering.index(type) > rank: + result_type = type + else: + return result_type + + # Not a simple numeric type, return a fused type instance. The result + # isn't really meant to be used, as we can't keep track of the context in + # pure-mode. Casting won't do anything in this case. + return _FusedType() + + +def _specialized_from_args(signatures, args, kwargs): + "Perhaps this should be implemented in a TreeFragment in Cython code" + raise Exception("yet to be implemented") + + +py_int = typedef(int, "int") +try: + py_long = typedef(long, "long") +except NameError: # Py3 + py_long = typedef(int, "long") +py_float = typedef(float, "float") +py_complex = typedef(complex, "double complex") + + +# Predefined types + +int_types = ['char', 'short', 'Py_UNICODE', 'int', 'Py_UCS4', 'long', 'longlong', 'Py_ssize_t', 'size_t'] +float_types = ['longdouble', 'double', 'float'] +complex_types = ['longdoublecomplex', 'doublecomplex', 'floatcomplex', 'complex'] +other_types = ['bint', 'void', 'Py_tss_t'] + +to_repr = { + 'longlong': 'long long', + 'longdouble': 'long double', + 'longdoublecomplex': 'long double complex', + 'doublecomplex': 'double complex', + 'floatcomplex': 'float complex', +}.get + +gs = globals() + +# note: cannot simply name the unicode type here as 2to3 gets in the way and replaces it by str +try: + import __builtin__ as builtins +except ImportError: # Py3 + import builtins + +gs['unicode'] = typedef(getattr(builtins, 'unicode', str), 'unicode') +del builtins + +for name in int_types: + reprname = to_repr(name, name) + gs[name] = typedef(py_int, reprname) + if name not in ('Py_UNICODE', 'Py_UCS4') and not name.endswith('size_t'): + gs['u'+name] = typedef(py_int, "unsigned " + reprname) + gs['s'+name] = typedef(py_int, "signed " + reprname) + +for name in float_types: + gs[name] = typedef(py_float, to_repr(name, name)) + +for name in complex_types: + gs[name] = typedef(py_complex, to_repr(name, name)) + +bint = typedef(bool, "bint") +void = typedef(None, "void") +Py_tss_t = typedef(None, "Py_tss_t") + +for t in int_types + float_types + complex_types + other_types: + for i in range(1, 4): + gs["%s_%s" % ('p'*i, t)] = gs[t]._pointer(i) + +NULL = gs['p_void'](0) + +# looks like 'gs' has some users out there by now... +#del gs + +integral = floating = numeric = _FusedType() + +type_ordering = [py_int, py_long, py_float, py_complex] + +class CythonDotParallel(object): + """ + The cython.parallel module. + """ + + __all__ = ['parallel', 'prange', 'threadid'] + + def parallel(self, num_threads=None): + return nogil + + def prange(self, start=0, stop=None, step=1, nogil=False, schedule=None, chunksize=None, num_threads=None): + if stop is None: + stop = start + start = 0 + return range(start, stop, step) + + def threadid(self): + return 0 + + # def threadsavailable(self): + # return 1 + +import sys +sys.modules['cython.parallel'] = CythonDotParallel() +del sys diff --git a/venv/lib/python3.8/site-packages/Cython/StringIOTree.py b/venv/lib/python3.8/site-packages/Cython/StringIOTree.py new file mode 100644 index 0000000..d8239ef --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/StringIOTree.py @@ -0,0 +1,108 @@ +# cython: auto_pickle=False + +r""" +Implements a buffer with insertion points. When you know you need to +"get back" to a place and write more later, simply call insertion_point() +at that spot and get a new StringIOTree object that is "left behind". + +EXAMPLE: + +>>> a = StringIOTree() +>>> _= a.write('first\n') +>>> b = a.insertion_point() +>>> _= a.write('third\n') +>>> _= b.write('second\n') +>>> a.getvalue().split() +['first', 'second', 'third'] + +>>> c = b.insertion_point() +>>> d = c.insertion_point() +>>> _= d.write('alpha\n') +>>> _= b.write('gamma\n') +>>> _= c.write('beta\n') +>>> b.getvalue().split() +['second', 'alpha', 'beta', 'gamma'] + +>>> i = StringIOTree() +>>> d.insert(i) +>>> _= i.write('inserted\n') +>>> out = StringIO() +>>> a.copyto(out) +>>> out.getvalue().split() +['first', 'second', 'alpha', 'inserted', 'beta', 'gamma', 'third'] +""" + +from __future__ import absolute_import #, unicode_literals + +try: + # Prefer cStringIO since io.StringIO() does not support writing 'str' in Py2. + from cStringIO import StringIO +except ImportError: + from io import StringIO + + +class StringIOTree(object): + """ + See module docs. + """ + + def __init__(self, stream=None): + self.prepended_children = [] + if stream is None: + stream = StringIO() + self.stream = stream + self.write = stream.write + self.markers = [] + + def getvalue(self): + content = [x.getvalue() for x in self.prepended_children] + content.append(self.stream.getvalue()) + return "".join(content) + + def copyto(self, target): + """Potentially cheaper than getvalue as no string concatenation + needs to happen.""" + for child in self.prepended_children: + child.copyto(target) + stream_content = self.stream.getvalue() + if stream_content: + target.write(stream_content) + + def commit(self): + # Save what we have written until now so that the buffer + # itself is empty -- this makes it ready for insertion + if self.stream.tell(): + self.prepended_children.append(StringIOTree(self.stream)) + self.prepended_children[-1].markers = self.markers + self.markers = [] + self.stream = StringIO() + self.write = self.stream.write + + def insert(self, iotree): + """ + Insert a StringIOTree (and all of its contents) at this location. + Further writing to self appears after what is inserted. + """ + self.commit() + self.prepended_children.append(iotree) + + def insertion_point(self): + """ + Returns a new StringIOTree, which is left behind at the current position + (it what is written to the result will appear right before whatever is + next written to self). + + Calling getvalue() or copyto() on the result will only return the + contents written to it. + """ + # Save what we have written until now + # This is so that getvalue on the result doesn't include it. + self.commit() + # Construct the new forked object to return + other = StringIOTree() + self.prepended_children.append(other) + return other + + def allmarkers(self): + children = self.prepended_children + return [m for c in children for m in c.allmarkers()] + self.markers diff --git a/venv/lib/python3.8/site-packages/Cython/Tempita/__init__.py b/venv/lib/python3.8/site-packages/Cython/Tempita/__init__.py new file mode 100644 index 0000000..41a0ce3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Tempita/__init__.py @@ -0,0 +1,4 @@ +# The original Tempita implements all of its templating code here. +# Moved it to _tempita.py to make the compilation portable. + +from ._tempita import * diff --git a/venv/lib/python3.8/site-packages/Cython/Tempita/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Tempita/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..87178cb Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Tempita/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Tempita/__pycache__/_looper.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Tempita/__pycache__/_looper.cpython-38.pyc new file mode 100644 index 0000000..3645845 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Tempita/__pycache__/_looper.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Tempita/__pycache__/_tempita.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Tempita/__pycache__/_tempita.cpython-38.pyc new file mode 100644 index 0000000..3b6ec20 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Tempita/__pycache__/_tempita.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Tempita/__pycache__/compat3.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Tempita/__pycache__/compat3.cpython-38.pyc new file mode 100644 index 0000000..c9c6abe Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Tempita/__pycache__/compat3.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Tempita/_looper.py b/venv/lib/python3.8/site-packages/Cython/Tempita/_looper.py new file mode 100644 index 0000000..4010988 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Tempita/_looper.py @@ -0,0 +1,163 @@ +""" +Helper for looping over sequences, particular in templates. + +Often in a loop in a template it's handy to know what's next up, +previously up, if this is the first or last item in the sequence, etc. +These can be awkward to manage in a normal Python loop, but using the +looper you can get a better sense of the context. Use like:: + + >>> for loop, item in looper(['a', 'b', 'c']): + ... print loop.number, item + ... if not loop.last: + ... print '---' + 1 a + --- + 2 b + --- + 3 c + +""" + +import sys +from Cython.Tempita.compat3 import basestring_ + +__all__ = ['looper'] + + +class looper(object): + """ + Helper for looping (particularly in templates) + + Use this like:: + + for loop, item in looper(seq): + if loop.first: + ... + """ + + def __init__(self, seq): + self.seq = seq + + def __iter__(self): + return looper_iter(self.seq) + + def __repr__(self): + return '<%s for %r>' % ( + self.__class__.__name__, self.seq) + + +class looper_iter(object): + + def __init__(self, seq): + self.seq = list(seq) + self.pos = 0 + + def __iter__(self): + return self + + def __next__(self): + if self.pos >= len(self.seq): + raise StopIteration + result = loop_pos(self.seq, self.pos), self.seq[self.pos] + self.pos += 1 + return result + + if sys.version < "3": + next = __next__ + + +class loop_pos(object): + + def __init__(self, seq, pos): + self.seq = seq + self.pos = pos + + def __repr__(self): + return '' % ( + self.seq[self.pos], self.pos) + + def index(self): + return self.pos + index = property(index) + + def number(self): + return self.pos + 1 + number = property(number) + + def item(self): + return self.seq[self.pos] + item = property(item) + + def __next__(self): + try: + return self.seq[self.pos + 1] + except IndexError: + return None + __next__ = property(__next__) + + if sys.version < "3": + next = __next__ + + def previous(self): + if self.pos == 0: + return None + return self.seq[self.pos - 1] + previous = property(previous) + + def odd(self): + return not self.pos % 2 + odd = property(odd) + + def even(self): + return self.pos % 2 + even = property(even) + + def first(self): + return self.pos == 0 + first = property(first) + + def last(self): + return self.pos == len(self.seq) - 1 + last = property(last) + + def length(self): + return len(self.seq) + length = property(length) + + def first_group(self, getter=None): + """ + Returns true if this item is the start of a new group, + where groups mean that some attribute has changed. The getter + can be None (the item itself changes), an attribute name like + ``'.attr'``, a function, or a dict key or list index. + """ + if self.first: + return True + return self._compare_group(self.item, self.previous, getter) + + def last_group(self, getter=None): + """ + Returns true if this item is the end of a new group, + where groups mean that some attribute has changed. The getter + can be None (the item itself changes), an attribute name like + ``'.attr'``, a function, or a dict key or list index. + """ + if self.last: + return True + return self._compare_group(self.item, self.__next__, getter) + + def _compare_group(self, item, other, getter): + if getter is None: + return item != other + elif (isinstance(getter, basestring_) + and getter.startswith('.')): + getter = getter[1:] + if getter.endswith('()'): + getter = getter[:-2] + return getattr(item, getter)() != getattr(other, getter)() + else: + return getattr(item, getter) != getattr(other, getter) + elif hasattr(getter, '__call__'): + return getter(item) != getter(other) + else: + return item[getter] != other[getter] diff --git a/venv/lib/python3.8/site-packages/Cython/Tempita/_tempita.cpython-38-x86_64-linux-gnu.so b/venv/lib/python3.8/site-packages/Cython/Tempita/_tempita.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 0000000..3cde3f3 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Tempita/_tempita.cpython-38-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.8/site-packages/Cython/Tempita/_tempita.py b/venv/lib/python3.8/site-packages/Cython/Tempita/_tempita.py new file mode 100644 index 0000000..22a7d23 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Tempita/_tempita.py @@ -0,0 +1,1182 @@ +""" +A small templating language + +This implements a small templating language. This language implements +if/elif/else, for/continue/break, expressions, and blocks of Python +code. The syntax is:: + + {{any expression (function calls etc)}} + {{any expression | filter}} + {{for x in y}}...{{endfor}} + {{if x}}x{{elif y}}y{{else}}z{{endif}} + {{py:x=1}} + {{py: + def foo(bar): + return 'baz' + }} + {{default var = default_value}} + {{# comment}} + +You use this with the ``Template`` class or the ``sub`` shortcut. +The ``Template`` class takes the template string and the name of +the template (for errors) and a default namespace. Then (like +``string.Template``) you can call the ``tmpl.substitute(**kw)`` +method to make a substitution (or ``tmpl.substitute(a_dict)``). + +``sub(content, **kw)`` substitutes the template immediately. You +can use ``__name='tmpl.html'`` to set the name of the template. + +If there are syntax errors ``TemplateError`` will be raised. +""" + +from __future__ import absolute_import + +import re +import sys +import cgi +try: + from urllib import quote as url_quote +except ImportError: # Py3 + from urllib.parse import quote as url_quote +import os +import tokenize +from io import StringIO + +from ._looper import looper +from .compat3 import bytes, unicode_, basestring_, next, is_unicode, coerce_text + +__all__ = ['TemplateError', 'Template', 'sub', 'HTMLTemplate', + 'sub_html', 'html', 'bunch'] + +in_re = re.compile(r'\s+in\s+') +var_re = re.compile(r'^[a-z_][a-z0-9_]*$', re.I) + + +class TemplateError(Exception): + """Exception raised while parsing a template + """ + + def __init__(self, message, position, name=None): + Exception.__init__(self, message) + self.position = position + self.name = name + + def __str__(self): + msg = ' '.join(self.args) + if self.position: + msg = '%s at line %s column %s' % ( + msg, self.position[0], self.position[1]) + if self.name: + msg += ' in %s' % self.name + return msg + + +class _TemplateContinue(Exception): + pass + + +class _TemplateBreak(Exception): + pass + + +def get_file_template(name, from_template): + path = os.path.join(os.path.dirname(from_template.name), name) + return from_template.__class__.from_filename( + path, namespace=from_template.namespace, + get_template=from_template.get_template) + + +class Template(object): + + default_namespace = { + 'start_braces': '{{', + 'end_braces': '}}', + 'looper': looper, + } + + default_encoding = 'utf8' + default_inherit = None + + def __init__(self, content, name=None, namespace=None, stacklevel=None, + get_template=None, default_inherit=None, line_offset=0, + delimeters=None): + self.content = content + + # set delimeters + if delimeters is None: + delimeters = (self.default_namespace['start_braces'], + self.default_namespace['end_braces']) + else: + #assert len(delimeters) == 2 and all([isinstance(delimeter, basestring) + # for delimeter in delimeters]) + self.default_namespace = self.__class__.default_namespace.copy() + self.default_namespace['start_braces'] = delimeters[0] + self.default_namespace['end_braces'] = delimeters[1] + self.delimeters = delimeters + + self._unicode = is_unicode(content) + if name is None and stacklevel is not None: + try: + caller = sys._getframe(stacklevel) + except ValueError: + pass + else: + globals = caller.f_globals + lineno = caller.f_lineno + if '__file__' in globals: + name = globals['__file__'] + if name.endswith('.pyc') or name.endswith('.pyo'): + name = name[:-1] + elif '__name__' in globals: + name = globals['__name__'] + else: + name = '' + if lineno: + name += ':%s' % lineno + self.name = name + self._parsed = parse(content, name=name, line_offset=line_offset, delimeters=self.delimeters) + if namespace is None: + namespace = {} + self.namespace = namespace + self.get_template = get_template + if default_inherit is not None: + self.default_inherit = default_inherit + + def from_filename(cls, filename, namespace=None, encoding=None, + default_inherit=None, get_template=get_file_template): + f = open(filename, 'rb') + c = f.read() + f.close() + if encoding: + c = c.decode(encoding) + return cls(content=c, name=filename, namespace=namespace, + default_inherit=default_inherit, get_template=get_template) + + from_filename = classmethod(from_filename) + + def __repr__(self): + return '<%s %s name=%r>' % ( + self.__class__.__name__, + hex(id(self))[2:], self.name) + + def substitute(self, *args, **kw): + if args: + if kw: + raise TypeError( + "You can only give positional *or* keyword arguments") + if len(args) > 1: + raise TypeError( + "You can only give one positional argument") + if not hasattr(args[0], 'items'): + raise TypeError( + "If you pass in a single argument, you must pass in a dictionary-like object (with a .items() method); you gave %r" + % (args[0],)) + kw = args[0] + ns = kw + ns['__template_name__'] = self.name + if self.namespace: + ns.update(self.namespace) + result, defs, inherit = self._interpret(ns) + if not inherit: + inherit = self.default_inherit + if inherit: + result = self._interpret_inherit(result, defs, inherit, ns) + return result + + def _interpret(self, ns): + __traceback_hide__ = True + parts = [] + defs = {} + self._interpret_codes(self._parsed, ns, out=parts, defs=defs) + if '__inherit__' in defs: + inherit = defs.pop('__inherit__') + else: + inherit = None + return ''.join(parts), defs, inherit + + def _interpret_inherit(self, body, defs, inherit_template, ns): + __traceback_hide__ = True + if not self.get_template: + raise TemplateError( + 'You cannot use inheritance without passing in get_template', + position=None, name=self.name) + templ = self.get_template(inherit_template, self) + self_ = TemplateObject(self.name) + for name, value in defs.items(): + setattr(self_, name, value) + self_.body = body + ns = ns.copy() + ns['self'] = self_ + return templ.substitute(ns) + + def _interpret_codes(self, codes, ns, out, defs): + __traceback_hide__ = True + for item in codes: + if isinstance(item, basestring_): + out.append(item) + else: + self._interpret_code(item, ns, out, defs) + + def _interpret_code(self, code, ns, out, defs): + __traceback_hide__ = True + name, pos = code[0], code[1] + if name == 'py': + self._exec(code[2], ns, pos) + elif name == 'continue': + raise _TemplateContinue() + elif name == 'break': + raise _TemplateBreak() + elif name == 'for': + vars, expr, content = code[2], code[3], code[4] + expr = self._eval(expr, ns, pos) + self._interpret_for(vars, expr, content, ns, out, defs) + elif name == 'cond': + parts = code[2:] + self._interpret_if(parts, ns, out, defs) + elif name == 'expr': + parts = code[2].split('|') + base = self._eval(parts[0], ns, pos) + for part in parts[1:]: + func = self._eval(part, ns, pos) + base = func(base) + out.append(self._repr(base, pos)) + elif name == 'default': + var, expr = code[2], code[3] + if var not in ns: + result = self._eval(expr, ns, pos) + ns[var] = result + elif name == 'inherit': + expr = code[2] + value = self._eval(expr, ns, pos) + defs['__inherit__'] = value + elif name == 'def': + name = code[2] + signature = code[3] + parts = code[4] + ns[name] = defs[name] = TemplateDef(self, name, signature, body=parts, ns=ns, + pos=pos) + elif name == 'comment': + return + else: + assert 0, "Unknown code: %r" % name + + def _interpret_for(self, vars, expr, content, ns, out, defs): + __traceback_hide__ = True + for item in expr: + if len(vars) == 1: + ns[vars[0]] = item + else: + if len(vars) != len(item): + raise ValueError( + 'Need %i items to unpack (got %i items)' + % (len(vars), len(item))) + for name, value in zip(vars, item): + ns[name] = value + try: + self._interpret_codes(content, ns, out, defs) + except _TemplateContinue: + continue + except _TemplateBreak: + break + + def _interpret_if(self, parts, ns, out, defs): + __traceback_hide__ = True + # @@: if/else/else gets through + for part in parts: + assert not isinstance(part, basestring_) + name, pos = part[0], part[1] + if name == 'else': + result = True + else: + result = self._eval(part[2], ns, pos) + if result: + self._interpret_codes(part[3], ns, out, defs) + break + + def _eval(self, code, ns, pos): + __traceback_hide__ = True + try: + try: + value = eval(code, self.default_namespace, ns) + except SyntaxError as e: + raise SyntaxError( + 'invalid syntax in expression: %s' % code) + return value + except Exception as e: + if getattr(e, 'args', None): + arg0 = e.args[0] + else: + arg0 = coerce_text(e) + e.args = (self._add_line_info(arg0, pos),) + raise + + def _exec(self, code, ns, pos): + __traceback_hide__ = True + try: + exec(code, self.default_namespace, ns) + except Exception as e: + if e.args: + e.args = (self._add_line_info(e.args[0], pos),) + else: + e.args = (self._add_line_info(None, pos),) + raise + + def _repr(self, value, pos): + __traceback_hide__ = True + try: + if value is None: + return '' + if self._unicode: + try: + value = unicode_(value) + except UnicodeDecodeError: + value = bytes(value) + else: + if not isinstance(value, basestring_): + value = coerce_text(value) + if (is_unicode(value) + and self.default_encoding): + value = value.encode(self.default_encoding) + except Exception as e: + e.args = (self._add_line_info(e.args[0], pos),) + raise + else: + if self._unicode and isinstance(value, bytes): + if not self.default_encoding: + raise UnicodeDecodeError( + 'Cannot decode bytes value %r into unicode ' + '(no default_encoding provided)' % value) + try: + value = value.decode(self.default_encoding) + except UnicodeDecodeError as e: + raise UnicodeDecodeError( + e.encoding, + e.object, + e.start, + e.end, + e.reason + ' in string %r' % value) + elif not self._unicode and is_unicode(value): + if not self.default_encoding: + raise UnicodeEncodeError( + 'Cannot encode unicode value %r into bytes ' + '(no default_encoding provided)' % value) + value = value.encode(self.default_encoding) + return value + + def _add_line_info(self, msg, pos): + msg = "%s at line %s column %s" % ( + msg, pos[0], pos[1]) + if self.name: + msg += " in file %s" % self.name + return msg + + +def sub(content, delimeters=None, **kw): + name = kw.get('__name') + tmpl = Template(content, name=name, delimeters=delimeters) + return tmpl.substitute(kw) + + +def paste_script_template_renderer(content, vars, filename=None): + tmpl = Template(content, name=filename) + return tmpl.substitute(vars) + + +class bunch(dict): + + def __init__(self, **kw): + for name, value in kw.items(): + setattr(self, name, value) + + def __setattr__(self, name, value): + self[name] = value + + def __getattr__(self, name): + try: + return self[name] + except KeyError: + raise AttributeError(name) + + def __getitem__(self, key): + if 'default' in self: + try: + return dict.__getitem__(self, key) + except KeyError: + return dict.__getitem__(self, 'default') + else: + return dict.__getitem__(self, key) + + def __repr__(self): + return '<%s %s>' % ( + self.__class__.__name__, + ' '.join(['%s=%r' % (k, v) for k, v in sorted(self.items())])) + +############################################################ +## HTML Templating +############################################################ + + +class html(object): + + def __init__(self, value): + self.value = value + + def __str__(self): + return self.value + + def __html__(self): + return self.value + + def __repr__(self): + return '<%s %r>' % ( + self.__class__.__name__, self.value) + + +def html_quote(value, force=True): + if not force and hasattr(value, '__html__'): + return value.__html__() + if value is None: + return '' + if not isinstance(value, basestring_): + value = coerce_text(value) + if sys.version >= "3" and isinstance(value, bytes): + value = cgi.escape(value.decode('latin1'), 1) + value = value.encode('latin1') + else: + value = cgi.escape(value, 1) + if sys.version < "3": + if is_unicode(value): + value = value.encode('ascii', 'xmlcharrefreplace') + return value + + +def url(v): + v = coerce_text(v) + if is_unicode(v): + v = v.encode('utf8') + return url_quote(v) + + +def attr(**kw): + parts = [] + for name, value in sorted(kw.items()): + if value is None: + continue + if name.endswith('_'): + name = name[:-1] + parts.append('%s="%s"' % (html_quote(name), html_quote(value))) + return html(' '.join(parts)) + + +class HTMLTemplate(Template): + + default_namespace = Template.default_namespace.copy() + default_namespace.update(dict( + html=html, + attr=attr, + url=url, + html_quote=html_quote, + )) + + def _repr(self, value, pos): + if hasattr(value, '__html__'): + value = value.__html__() + quote = False + else: + quote = True + plain = Template._repr(self, value, pos) + if quote: + return html_quote(plain) + else: + return plain + + +def sub_html(content, **kw): + name = kw.get('__name') + tmpl = HTMLTemplate(content, name=name) + return tmpl.substitute(kw) + + +class TemplateDef(object): + def __init__(self, template, func_name, func_signature, + body, ns, pos, bound_self=None): + self._template = template + self._func_name = func_name + self._func_signature = func_signature + self._body = body + self._ns = ns + self._pos = pos + self._bound_self = bound_self + + def __repr__(self): + return '' % ( + self._func_name, self._func_signature, + self._template.name, self._pos) + + def __str__(self): + return self() + + def __call__(self, *args, **kw): + values = self._parse_signature(args, kw) + ns = self._ns.copy() + ns.update(values) + if self._bound_self is not None: + ns['self'] = self._bound_self + out = [] + subdefs = {} + self._template._interpret_codes(self._body, ns, out, subdefs) + return ''.join(out) + + def __get__(self, obj, type=None): + if obj is None: + return self + return self.__class__( + self._template, self._func_name, self._func_signature, + self._body, self._ns, self._pos, bound_self=obj) + + def _parse_signature(self, args, kw): + values = {} + sig_args, var_args, var_kw, defaults = self._func_signature + extra_kw = {} + for name, value in kw.items(): + if not var_kw and name not in sig_args: + raise TypeError( + 'Unexpected argument %s' % name) + if name in sig_args: + values[sig_args] = value + else: + extra_kw[name] = value + args = list(args) + sig_args = list(sig_args) + while args: + while sig_args and sig_args[0] in values: + sig_args.pop(0) + if sig_args: + name = sig_args.pop(0) + values[name] = args.pop(0) + elif var_args: + values[var_args] = tuple(args) + break + else: + raise TypeError( + 'Extra position arguments: %s' + % ', '.join([repr(v) for v in args])) + for name, value_expr in defaults.items(): + if name not in values: + values[name] = self._template._eval( + value_expr, self._ns, self._pos) + for name in sig_args: + if name not in values: + raise TypeError( + 'Missing argument: %s' % name) + if var_kw: + values[var_kw] = extra_kw + return values + + +class TemplateObject(object): + + def __init__(self, name): + self.__name = name + self.get = TemplateObjectGetter(self) + + def __repr__(self): + return '<%s %s>' % (self.__class__.__name__, self.__name) + + +class TemplateObjectGetter(object): + + def __init__(self, template_obj): + self.__template_obj = template_obj + + def __getattr__(self, attr): + return getattr(self.__template_obj, attr, Empty) + + def __repr__(self): + return '<%s around %r>' % (self.__class__.__name__, self.__template_obj) + + +class _Empty(object): + def __call__(self, *args, **kw): + return self + + def __str__(self): + return '' + + def __repr__(self): + return 'Empty' + + def __unicode__(self): + return u'' + + def __iter__(self): + return iter(()) + + def __bool__(self): + return False + + if sys.version < "3": + __nonzero__ = __bool__ + +Empty = _Empty() +del _Empty + +############################################################ +## Lexing and Parsing +############################################################ + + +def lex(s, name=None, trim_whitespace=True, line_offset=0, delimeters=None): + """ + Lex a string into chunks: + + >>> lex('hey') + ['hey'] + >>> lex('hey {{you}}') + ['hey ', ('you', (1, 7))] + >>> lex('hey {{') + Traceback (most recent call last): + ... + TemplateError: No }} to finish last expression at line 1 column 7 + >>> lex('hey }}') + Traceback (most recent call last): + ... + TemplateError: }} outside expression at line 1 column 7 + >>> lex('hey {{ {{') + Traceback (most recent call last): + ... + TemplateError: {{ inside expression at line 1 column 10 + + """ + if delimeters is None: + delimeters = ( Template.default_namespace['start_braces'], + Template.default_namespace['end_braces'] ) + in_expr = False + chunks = [] + last = 0 + last_pos = (line_offset + 1, 1) + + token_re = re.compile(r'%s|%s' % (re.escape(delimeters[0]), + re.escape(delimeters[1]))) + for match in token_re.finditer(s): + expr = match.group(0) + pos = find_position(s, match.end(), last, last_pos) + if expr == delimeters[0] and in_expr: + raise TemplateError('%s inside expression' % delimeters[0], + position=pos, + name=name) + elif expr == delimeters[1] and not in_expr: + raise TemplateError('%s outside expression' % delimeters[1], + position=pos, + name=name) + if expr == delimeters[0]: + part = s[last:match.start()] + if part: + chunks.append(part) + in_expr = True + else: + chunks.append((s[last:match.start()], last_pos)) + in_expr = False + last = match.end() + last_pos = pos + if in_expr: + raise TemplateError('No %s to finish last expression' % delimeters[1], + name=name, position=last_pos) + part = s[last:] + if part: + chunks.append(part) + if trim_whitespace: + chunks = trim_lex(chunks) + return chunks + +statement_re = re.compile(r'^(?:if |elif |for |def |inherit |default |py:)') +single_statements = ['else', 'endif', 'endfor', 'enddef', 'continue', 'break'] +trail_whitespace_re = re.compile(r'\n\r?[\t ]*$') +lead_whitespace_re = re.compile(r'^[\t ]*\n') + + +def trim_lex(tokens): + r""" + Takes a lexed set of tokens, and removes whitespace when there is + a directive on a line by itself: + + >>> tokens = lex('{{if x}}\nx\n{{endif}}\ny', trim_whitespace=False) + >>> tokens + [('if x', (1, 3)), '\nx\n', ('endif', (3, 3)), '\ny'] + >>> trim_lex(tokens) + [('if x', (1, 3)), 'x\n', ('endif', (3, 3)), 'y'] + """ + last_trim = None + for i, current in enumerate(tokens): + if isinstance(current, basestring_): + # we don't trim this + continue + item = current[0] + if not statement_re.search(item) and item not in single_statements: + continue + if not i: + prev = '' + else: + prev = tokens[i - 1] + if i + 1 >= len(tokens): + next_chunk = '' + else: + next_chunk = tokens[i + 1] + if (not isinstance(next_chunk, basestring_) + or not isinstance(prev, basestring_)): + continue + prev_ok = not prev or trail_whitespace_re.search(prev) + if i == 1 and not prev.strip(): + prev_ok = True + if last_trim is not None and last_trim + 2 == i and not prev.strip(): + prev_ok = 'last' + if (prev_ok + and (not next_chunk or lead_whitespace_re.search(next_chunk) + or (i == len(tokens) - 2 and not next_chunk.strip()))): + if prev: + if ((i == 1 and not prev.strip()) + or prev_ok == 'last'): + tokens[i - 1] = '' + else: + m = trail_whitespace_re.search(prev) + # +1 to leave the leading \n on: + prev = prev[:m.start() + 1] + tokens[i - 1] = prev + if next_chunk: + last_trim = i + if i == len(tokens) - 2 and not next_chunk.strip(): + tokens[i + 1] = '' + else: + m = lead_whitespace_re.search(next_chunk) + next_chunk = next_chunk[m.end():] + tokens[i + 1] = next_chunk + return tokens + + +def find_position(string, index, last_index, last_pos): + """Given a string and index, return (line, column)""" + lines = string.count('\n', last_index, index) + if lines > 0: + column = index - string.rfind('\n', last_index, index) + else: + column = last_pos[1] + (index - last_index) + return (last_pos[0] + lines, column) + + +def parse(s, name=None, line_offset=0, delimeters=None): + r""" + Parses a string into a kind of AST + + >>> parse('{{x}}') + [('expr', (1, 3), 'x')] + >>> parse('foo') + ['foo'] + >>> parse('{{if x}}test{{endif}}') + [('cond', (1, 3), ('if', (1, 3), 'x', ['test']))] + >>> parse('series->{{for x in y}}x={{x}}{{endfor}}') + ['series->', ('for', (1, 11), ('x',), 'y', ['x=', ('expr', (1, 27), 'x')])] + >>> parse('{{for x, y in z:}}{{continue}}{{endfor}}') + [('for', (1, 3), ('x', 'y'), 'z', [('continue', (1, 21))])] + >>> parse('{{py:x=1}}') + [('py', (1, 3), 'x=1')] + >>> parse('{{if x}}a{{elif y}}b{{else}}c{{endif}}') + [('cond', (1, 3), ('if', (1, 3), 'x', ['a']), ('elif', (1, 12), 'y', ['b']), ('else', (1, 23), None, ['c']))] + + Some exceptions:: + + >>> parse('{{continue}}') + Traceback (most recent call last): + ... + TemplateError: continue outside of for loop at line 1 column 3 + >>> parse('{{if x}}foo') + Traceback (most recent call last): + ... + TemplateError: No {{endif}} at line 1 column 3 + >>> parse('{{else}}') + Traceback (most recent call last): + ... + TemplateError: else outside of an if block at line 1 column 3 + >>> parse('{{if x}}{{for x in y}}{{endif}}{{endfor}}') + Traceback (most recent call last): + ... + TemplateError: Unexpected endif at line 1 column 25 + >>> parse('{{if}}{{endif}}') + Traceback (most recent call last): + ... + TemplateError: if with no expression at line 1 column 3 + >>> parse('{{for x y}}{{endfor}}') + Traceback (most recent call last): + ... + TemplateError: Bad for (no "in") in 'x y' at line 1 column 3 + >>> parse('{{py:x=1\ny=2}}') + Traceback (most recent call last): + ... + TemplateError: Multi-line py blocks must start with a newline at line 1 column 3 + """ + if delimeters is None: + delimeters = ( Template.default_namespace['start_braces'], + Template.default_namespace['end_braces'] ) + tokens = lex(s, name=name, line_offset=line_offset, delimeters=delimeters) + result = [] + while tokens: + next_chunk, tokens = parse_expr(tokens, name) + result.append(next_chunk) + return result + + +def parse_expr(tokens, name, context=()): + if isinstance(tokens[0], basestring_): + return tokens[0], tokens[1:] + expr, pos = tokens[0] + expr = expr.strip() + if expr.startswith('py:'): + expr = expr[3:].lstrip(' \t') + if expr.startswith('\n') or expr.startswith('\r'): + expr = expr.lstrip('\r\n') + if '\r' in expr: + expr = expr.replace('\r\n', '\n') + expr = expr.replace('\r', '') + expr += '\n' + else: + if '\n' in expr: + raise TemplateError( + 'Multi-line py blocks must start with a newline', + position=pos, name=name) + return ('py', pos, expr), tokens[1:] + elif expr in ('continue', 'break'): + if 'for' not in context: + raise TemplateError( + 'continue outside of for loop', + position=pos, name=name) + return (expr, pos), tokens[1:] + elif expr.startswith('if '): + return parse_cond(tokens, name, context) + elif (expr.startswith('elif ') + or expr == 'else'): + raise TemplateError( + '%s outside of an if block' % expr.split()[0], + position=pos, name=name) + elif expr in ('if', 'elif', 'for'): + raise TemplateError( + '%s with no expression' % expr, + position=pos, name=name) + elif expr in ('endif', 'endfor', 'enddef'): + raise TemplateError( + 'Unexpected %s' % expr, + position=pos, name=name) + elif expr.startswith('for '): + return parse_for(tokens, name, context) + elif expr.startswith('default '): + return parse_default(tokens, name, context) + elif expr.startswith('inherit '): + return parse_inherit(tokens, name, context) + elif expr.startswith('def '): + return parse_def(tokens, name, context) + elif expr.startswith('#'): + return ('comment', pos, tokens[0][0]), tokens[1:] + return ('expr', pos, tokens[0][0]), tokens[1:] + + +def parse_cond(tokens, name, context): + start = tokens[0][1] + pieces = [] + context = context + ('if',) + while 1: + if not tokens: + raise TemplateError( + 'Missing {{endif}}', + position=start, name=name) + if (isinstance(tokens[0], tuple) + and tokens[0][0] == 'endif'): + return ('cond', start) + tuple(pieces), tokens[1:] + next_chunk, tokens = parse_one_cond(tokens, name, context) + pieces.append(next_chunk) + + +def parse_one_cond(tokens, name, context): + (first, pos), tokens = tokens[0], tokens[1:] + content = [] + if first.endswith(':'): + first = first[:-1] + if first.startswith('if '): + part = ('if', pos, first[3:].lstrip(), content) + elif first.startswith('elif '): + part = ('elif', pos, first[5:].lstrip(), content) + elif first == 'else': + part = ('else', pos, None, content) + else: + assert 0, "Unexpected token %r at %s" % (first, pos) + while 1: + if not tokens: + raise TemplateError( + 'No {{endif}}', + position=pos, name=name) + if (isinstance(tokens[0], tuple) + and (tokens[0][0] == 'endif' + or tokens[0][0].startswith('elif ') + or tokens[0][0] == 'else')): + return part, tokens + next_chunk, tokens = parse_expr(tokens, name, context) + content.append(next_chunk) + + +def parse_for(tokens, name, context): + first, pos = tokens[0] + tokens = tokens[1:] + context = ('for',) + context + content = [] + assert first.startswith('for ') + if first.endswith(':'): + first = first[:-1] + first = first[3:].strip() + match = in_re.search(first) + if not match: + raise TemplateError( + 'Bad for (no "in") in %r' % first, + position=pos, name=name) + vars = first[:match.start()] + if '(' in vars: + raise TemplateError( + 'You cannot have () in the variable section of a for loop (%r)' + % vars, position=pos, name=name) + vars = tuple([ + v.strip() for v in first[:match.start()].split(',') + if v.strip()]) + expr = first[match.end():] + while 1: + if not tokens: + raise TemplateError( + 'No {{endfor}}', + position=pos, name=name) + if (isinstance(tokens[0], tuple) + and tokens[0][0] == 'endfor'): + return ('for', pos, vars, expr, content), tokens[1:] + next_chunk, tokens = parse_expr(tokens, name, context) + content.append(next_chunk) + + +def parse_default(tokens, name, context): + first, pos = tokens[0] + assert first.startswith('default ') + first = first.split(None, 1)[1] + parts = first.split('=', 1) + if len(parts) == 1: + raise TemplateError( + "Expression must be {{default var=value}}; no = found in %r" % first, + position=pos, name=name) + var = parts[0].strip() + if ',' in var: + raise TemplateError( + "{{default x, y = ...}} is not supported", + position=pos, name=name) + if not var_re.search(var): + raise TemplateError( + "Not a valid variable name for {{default}}: %r" + % var, position=pos, name=name) + expr = parts[1].strip() + return ('default', pos, var, expr), tokens[1:] + + +def parse_inherit(tokens, name, context): + first, pos = tokens[0] + assert first.startswith('inherit ') + expr = first.split(None, 1)[1] + return ('inherit', pos, expr), tokens[1:] + + +def parse_def(tokens, name, context): + first, start = tokens[0] + tokens = tokens[1:] + assert first.startswith('def ') + first = first.split(None, 1)[1] + if first.endswith(':'): + first = first[:-1] + if '(' not in first: + func_name = first + sig = ((), None, None, {}) + elif not first.endswith(')'): + raise TemplateError("Function definition doesn't end with ): %s" % first, + position=start, name=name) + else: + first = first[:-1] + func_name, sig_text = first.split('(', 1) + sig = parse_signature(sig_text, name, start) + context = context + ('def',) + content = [] + while 1: + if not tokens: + raise TemplateError( + 'Missing {{enddef}}', + position=start, name=name) + if (isinstance(tokens[0], tuple) + and tokens[0][0] == 'enddef'): + return ('def', start, func_name, sig, content), tokens[1:] + next_chunk, tokens = parse_expr(tokens, name, context) + content.append(next_chunk) + + +def parse_signature(sig_text, name, pos): + tokens = tokenize.generate_tokens(StringIO(sig_text).readline) + sig_args = [] + var_arg = None + var_kw = None + defaults = {} + + def get_token(pos=False): + try: + tok_type, tok_string, (srow, scol), (erow, ecol), line = next(tokens) + except StopIteration: + return tokenize.ENDMARKER, '' + if pos: + return tok_type, tok_string, (srow, scol), (erow, ecol) + else: + return tok_type, tok_string + while 1: + var_arg_type = None + tok_type, tok_string = get_token() + if tok_type == tokenize.ENDMARKER: + break + if tok_type == tokenize.OP and (tok_string == '*' or tok_string == '**'): + var_arg_type = tok_string + tok_type, tok_string = get_token() + if tok_type != tokenize.NAME: + raise TemplateError('Invalid signature: (%s)' % sig_text, + position=pos, name=name) + var_name = tok_string + tok_type, tok_string = get_token() + if tok_type == tokenize.ENDMARKER or (tok_type == tokenize.OP and tok_string == ','): + if var_arg_type == '*': + var_arg = var_name + elif var_arg_type == '**': + var_kw = var_name + else: + sig_args.append(var_name) + if tok_type == tokenize.ENDMARKER: + break + continue + if var_arg_type is not None: + raise TemplateError('Invalid signature: (%s)' % sig_text, + position=pos, name=name) + if tok_type == tokenize.OP and tok_string == '=': + nest_type = None + unnest_type = None + nest_count = 0 + start_pos = end_pos = None + parts = [] + while 1: + tok_type, tok_string, s, e = get_token(True) + if start_pos is None: + start_pos = s + end_pos = e + if tok_type == tokenize.ENDMARKER and nest_count: + raise TemplateError('Invalid signature: (%s)' % sig_text, + position=pos, name=name) + if (not nest_count and + (tok_type == tokenize.ENDMARKER or (tok_type == tokenize.OP and tok_string == ','))): + default_expr = isolate_expression(sig_text, start_pos, end_pos) + defaults[var_name] = default_expr + sig_args.append(var_name) + break + parts.append((tok_type, tok_string)) + if nest_count and tok_type == tokenize.OP and tok_string == nest_type: + nest_count += 1 + elif nest_count and tok_type == tokenize.OP and tok_string == unnest_type: + nest_count -= 1 + if not nest_count: + nest_type = unnest_type = None + elif not nest_count and tok_type == tokenize.OP and tok_string in ('(', '[', '{'): + nest_type = tok_string + nest_count = 1 + unnest_type = {'(': ')', '[': ']', '{': '}'}[nest_type] + return sig_args, var_arg, var_kw, defaults + + +def isolate_expression(string, start_pos, end_pos): + srow, scol = start_pos + srow -= 1 + erow, ecol = end_pos + erow -= 1 + lines = string.splitlines(True) + if srow == erow: + return lines[srow][scol:ecol] + parts = [lines[srow][scol:]] + parts.extend(lines[srow+1:erow]) + if erow < len(lines): + # It'll sometimes give (end_row_past_finish, 0) + parts.append(lines[erow][:ecol]) + return ''.join(parts) + +_fill_command_usage = """\ +%prog [OPTIONS] TEMPLATE arg=value + +Use py:arg=value to set a Python value; otherwise all values are +strings. +""" + + +def fill_command(args=None): + import sys + import optparse + import pkg_resources + import os + if args is None: + args = sys.argv[1:] + dist = pkg_resources.get_distribution('Paste') + parser = optparse.OptionParser( + version=coerce_text(dist), + usage=_fill_command_usage) + parser.add_option( + '-o', '--output', + dest='output', + metavar="FILENAME", + help="File to write output to (default stdout)") + parser.add_option( + '--html', + dest='use_html', + action='store_true', + help="Use HTML style filling (including automatic HTML quoting)") + parser.add_option( + '--env', + dest='use_env', + action='store_true', + help="Put the environment in as top-level variables") + options, args = parser.parse_args(args) + if len(args) < 1: + print('You must give a template filename') + sys.exit(2) + template_name = args[0] + args = args[1:] + vars = {} + if options.use_env: + vars.update(os.environ) + for value in args: + if '=' not in value: + print('Bad argument: %r' % value) + sys.exit(2) + name, value = value.split('=', 1) + if name.startswith('py:'): + name = name[:3] + value = eval(value) + vars[name] = value + if template_name == '-': + template_content = sys.stdin.read() + template_name = '' + else: + f = open(template_name, 'rb') + template_content = f.read() + f.close() + if options.use_html: + TemplateClass = HTMLTemplate + else: + TemplateClass = Template + template = TemplateClass(template_content, name=template_name) + result = template.substitute(vars) + if options.output: + f = open(options.output, 'wb') + f.write(result) + f.close() + else: + sys.stdout.write(result) + +if __name__ == '__main__': + fill_command() diff --git a/venv/lib/python3.8/site-packages/Cython/Tempita/compat3.py b/venv/lib/python3.8/site-packages/Cython/Tempita/compat3.py new file mode 100644 index 0000000..9905530 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Tempita/compat3.py @@ -0,0 +1,47 @@ +import sys + +__all__ = ['b', 'basestring_', 'bytes', 'unicode_', 'next', 'is_unicode'] + +if sys.version < "3": + b = bytes = str + basestring_ = basestring + unicode_ = unicode +else: + + def b(s): + if isinstance(s, str): + return s.encode('latin1') + return bytes(s) + basestring_ = (bytes, str) + bytes = bytes + unicode_ = str +text = str + +if sys.version < "3": + + def next(obj): + return obj.next() +else: + next = next + +if sys.version < "3": + + def is_unicode(obj): + return isinstance(obj, unicode) +else: + + def is_unicode(obj): + return isinstance(obj, str) + + +def coerce_text(v): + if not isinstance(v, basestring_): + if sys.version < "3": + attr = '__unicode__' + else: + attr = '__str__' + if hasattr(v, attr): + return unicode(v) + else: + return bytes(v) + return v diff --git a/venv/lib/python3.8/site-packages/Cython/TestUtils.py b/venv/lib/python3.8/site-packages/Cython/TestUtils.py new file mode 100644 index 0000000..9d6eb67 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/TestUtils.py @@ -0,0 +1,217 @@ +from __future__ import absolute_import + +import os +import unittest +import tempfile + +from .Compiler import Errors +from .CodeWriter import CodeWriter +from .Compiler.TreeFragment import TreeFragment, strip_common_indent +from .Compiler.Visitor import TreeVisitor, VisitorTransform +from .Compiler import TreePath + + +class NodeTypeWriter(TreeVisitor): + def __init__(self): + super(NodeTypeWriter, self).__init__() + self._indents = 0 + self.result = [] + + def visit_Node(self, node): + if not self.access_path: + name = u"(root)" + else: + tip = self.access_path[-1] + if tip[2] is not None: + name = u"%s[%d]" % tip[1:3] + else: + name = tip[1] + + self.result.append(u" " * self._indents + + u"%s: %s" % (name, node.__class__.__name__)) + self._indents += 1 + self.visitchildren(node) + self._indents -= 1 + + +def treetypes(root): + """Returns a string representing the tree by class names. + There's a leading and trailing whitespace so that it can be + compared by simple string comparison while still making test + cases look ok.""" + w = NodeTypeWriter() + w.visit(root) + return u"\n".join([u""] + w.result + [u""]) + + +class CythonTest(unittest.TestCase): + + def setUp(self): + self.listing_file = Errors.listing_file + self.echo_file = Errors.echo_file + Errors.listing_file = Errors.echo_file = None + + def tearDown(self): + Errors.listing_file = self.listing_file + Errors.echo_file = self.echo_file + + def assertLines(self, expected, result): + "Checks that the given strings or lists of strings are equal line by line" + if not isinstance(expected, list): + expected = expected.split(u"\n") + if not isinstance(result, list): + result = result.split(u"\n") + for idx, (expected_line, result_line) in enumerate(zip(expected, result)): + self.assertEqual(expected_line, result_line, + "Line %d:\nExp: %s\nGot: %s" % (idx, expected_line, result_line)) + self.assertEqual(len(expected), len(result), + "Unmatched lines. Got:\n%s\nExpected:\n%s" % ("\n".join(expected), u"\n".join(result))) + + def codeToLines(self, tree): + writer = CodeWriter() + writer.write(tree) + return writer.result.lines + + def codeToString(self, tree): + return "\n".join(self.codeToLines(tree)) + + def assertCode(self, expected, result_tree): + result_lines = self.codeToLines(result_tree) + + expected_lines = strip_common_indent(expected.split("\n")) + + for idx, (line, expected_line) in enumerate(zip(result_lines, expected_lines)): + self.assertEqual(expected_line, line, + "Line %d:\nGot: %s\nExp: %s" % (idx, line, expected_line)) + self.assertEqual(len(result_lines), len(expected_lines), + "Unmatched lines. Got:\n%s\nExpected:\n%s" % ("\n".join(result_lines), expected)) + + def assertNodeExists(self, path, result_tree): + self.assertNotEqual(TreePath.find_first(result_tree, path), None, + "Path '%s' not found in result tree" % path) + + def fragment(self, code, pxds=None, pipeline=None): + "Simply create a tree fragment using the name of the test-case in parse errors." + if pxds is None: + pxds = {} + if pipeline is None: + pipeline = [] + name = self.id() + if name.startswith("__main__."): + name = name[len("__main__."):] + name = name.replace(".", "_") + return TreeFragment(code, name, pxds, pipeline=pipeline) + + def treetypes(self, root): + return treetypes(root) + + def should_fail(self, func, exc_type=Exception): + """Calls "func" and fails if it doesn't raise the right exception + (any exception by default). Also returns the exception in question. + """ + try: + func() + self.fail("Expected an exception of type %r" % exc_type) + except exc_type as e: + self.assertTrue(isinstance(e, exc_type)) + return e + + def should_not_fail(self, func): + """Calls func and succeeds if and only if no exception is raised + (i.e. converts exception raising into a failed testcase). Returns + the return value of func.""" + try: + return func() + except Exception as exc: + self.fail(str(exc)) + + +class TransformTest(CythonTest): + """ + Utility base class for transform unit tests. It is based around constructing + test trees (either explicitly or by parsing a Cython code string); running + the transform, serialize it using a customized Cython serializer (with + special markup for nodes that cannot be represented in Cython), + and do a string-comparison line-by-line of the result. + + To create a test case: + - Call run_pipeline. The pipeline should at least contain the transform you + are testing; pyx should be either a string (passed to the parser to + create a post-parse tree) or a node representing input to pipeline. + The result will be a transformed result. + + - Check that the tree is correct. If wanted, assertCode can be used, which + takes a code string as expected, and a ModuleNode in result_tree + (it serializes the ModuleNode to a string and compares line-by-line). + + All code strings are first stripped for whitespace lines and then common + indentation. + + Plans: One could have a pxd dictionary parameter to run_pipeline. + """ + + def run_pipeline(self, pipeline, pyx, pxds=None): + if pxds is None: + pxds = {} + tree = self.fragment(pyx, pxds).root + # Run pipeline + for T in pipeline: + tree = T(tree) + return tree + + +class TreeAssertVisitor(VisitorTransform): + # actually, a TreeVisitor would be enough, but this needs to run + # as part of the compiler pipeline + + def visit_CompilerDirectivesNode(self, node): + directives = node.directives + if 'test_assert_path_exists' in directives: + for path in directives['test_assert_path_exists']: + if TreePath.find_first(node, path) is None: + Errors.error( + node.pos, + "Expected path '%s' not found in result tree" % path) + if 'test_fail_if_path_exists' in directives: + for path in directives['test_fail_if_path_exists']: + if TreePath.find_first(node, path) is not None: + Errors.error( + node.pos, + "Unexpected path '%s' found in result tree" % path) + self.visitchildren(node) + return node + + visit_Node = VisitorTransform.recurse_to_children + + +def unpack_source_tree(tree_file, dir=None): + if dir is None: + dir = tempfile.mkdtemp() + header = [] + cur_file = None + f = open(tree_file) + try: + lines = f.readlines() + finally: + f.close() + del f + try: + for line in lines: + if line[:5] == '#####': + filename = line.strip().strip('#').strip().replace('/', os.path.sep) + path = os.path.join(dir, filename) + if not os.path.exists(os.path.dirname(path)): + os.makedirs(os.path.dirname(path)) + if cur_file is not None: + f, cur_file = cur_file, None + f.close() + cur_file = open(path, 'w') + elif cur_file is not None: + cur_file.write(line) + elif line.strip() and not line.lstrip().startswith('#'): + if line.strip() not in ('"""', "'''"): + header.append(line) + finally: + if cur_file is not None: + cur_file.close() + return dir, ''.join(header) diff --git a/venv/lib/python3.8/site-packages/Cython/Tests/TestCodeWriter.py b/venv/lib/python3.8/site-packages/Cython/Tests/TestCodeWriter.py new file mode 100644 index 0000000..42e457d --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Tests/TestCodeWriter.py @@ -0,0 +1,82 @@ +from Cython.TestUtils import CythonTest + +class TestCodeWriter(CythonTest): + # CythonTest uses the CodeWriter heavily, so do some checking by + # roundtripping Cython code through the test framework. + + # Note that this test is dependent upon the normal Cython parser + # to generate the input trees to the CodeWriter. This save *a lot* + # of time; better to spend that time writing other tests than perfecting + # this one... + + # Whitespace is very significant in this process: + # - always newline on new block (!) + # - indent 4 spaces + # - 1 space around every operator + + def t(self, codestr): + self.assertCode(codestr, self.fragment(codestr).root) + + def test_print(self): + self.t(u""" + print x, y + print x + y ** 2 + print x, y, z, + """) + + def test_if(self): + self.t(u"if x:\n pass") + + def test_ifelifelse(self): + self.t(u""" + if x: + pass + elif y: + pass + elif z + 34 ** 34 - 2: + pass + else: + pass + """) + + def test_def(self): + self.t(u""" + def f(x, y, z): + pass + def f(x = 34, y = 54, z): + pass + """) + + def test_longness_and_signedness(self): + self.t(u"def f(unsigned long long long long long int y):\n pass") + + def test_signed_short(self): + self.t(u"def f(signed short int y):\n pass") + + def test_typed_args(self): + self.t(u"def f(int x, unsigned long int y):\n pass") + + def test_cdef_var(self): + self.t(u""" + cdef int hello + cdef int hello = 4, x = 3, y, z + """) + + def test_for_loop(self): + self.t(u""" + for x, y, z in f(g(h(34) * 2) + 23): + print x, y, z + else: + print 43 + """) + + def test_inplace_assignment(self): + self.t(u"x += 43") + + def test_attribute(self): + self.t(u"a.x") + +if __name__ == "__main__": + import unittest + unittest.main() + diff --git a/venv/lib/python3.8/site-packages/Cython/Tests/TestCythonUtils.py b/venv/lib/python3.8/site-packages/Cython/Tests/TestCythonUtils.py new file mode 100644 index 0000000..2641900 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Tests/TestCythonUtils.py @@ -0,0 +1,11 @@ +import unittest + +from ..Utils import build_hex_version + +class TestCythonUtils(unittest.TestCase): + def test_build_hex_version(self): + self.assertEqual('0x001D00A1', build_hex_version('0.29a1')) + self.assertEqual('0x001D00A1', build_hex_version('0.29a1')) + self.assertEqual('0x001D03C4', build_hex_version('0.29.3rc4')) + self.assertEqual('0x001D00F0', build_hex_version('0.29')) + self.assertEqual('0x040000F0', build_hex_version('4.0')) diff --git a/venv/lib/python3.8/site-packages/Cython/Tests/TestJediTyper.py b/venv/lib/python3.8/site-packages/Cython/Tests/TestJediTyper.py new file mode 100644 index 0000000..253adef --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Tests/TestJediTyper.py @@ -0,0 +1,225 @@ +# -*- coding: utf-8 -*- +# tag: jedi + +from __future__ import absolute_import + +import sys +import os.path + +from textwrap import dedent +from contextlib import contextmanager +from tempfile import NamedTemporaryFile + +from Cython.Compiler.ParseTreeTransforms import NormalizeTree, InterpretCompilerDirectives +from Cython.Compiler import Main, Symtab, Visitor +from Cython.TestUtils import TransformTest + +TOOLS_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'Tools')) + + +@contextmanager +def _tempfile(code): + code = dedent(code) + if not isinstance(code, bytes): + code = code.encode('utf8') + + with NamedTemporaryFile(suffix='.py') as f: + f.write(code) + f.seek(0) + yield f + + +def _test_typing(code, inject=False): + sys.path.insert(0, TOOLS_DIR) + try: + import jedityper + finally: + sys.path.remove(TOOLS_DIR) + lines = [] + with _tempfile(code) as f: + types = jedityper.analyse(f.name) + if inject: + lines = jedityper.inject_types(f.name, types) + return types, lines + + +class DeclarationsFinder(Visitor.VisitorTransform): + directives = None + + visit_Node = Visitor.VisitorTransform.recurse_to_children + + def visit_CompilerDirectivesNode(self, node): + if not self.directives: + self.directives = [] + self.directives.append(node) + self.visitchildren(node) + return node + + +class TestJediTyper(TransformTest): + def _test(self, code): + return _test_typing(code)[0] + + def test_typing_global_int_loop(self): + code = '''\ + for i in range(10): + a = i + 1 + ''' + types = self._test(code) + self.assertIn((None, (1, 0)), types) + variables = types.pop((None, (1, 0))) + self.assertFalse(types) + self.assertEqual({'a': set(['int']), 'i': set(['int'])}, variables) + + def test_typing_function_int_loop(self): + code = '''\ + def func(x): + for i in range(x): + a = i + 1 + return a + ''' + types = self._test(code) + self.assertIn(('func', (1, 0)), types) + variables = types.pop(('func', (1, 0))) + self.assertFalse(types) + self.assertEqual({'a': set(['int']), 'i': set(['int'])}, variables) + + def test_conflicting_types_in_function(self): + code = '''\ + def func(a, b): + print(a) + a = 1 + b += a + a = 'abc' + return a, str(b) + + print(func(1.5, 2)) + ''' + types = self._test(code) + self.assertIn(('func', (1, 0)), types) + variables = types.pop(('func', (1, 0))) + self.assertFalse(types) + self.assertEqual({'a': set(['float', 'int', 'str']), 'b': set(['int'])}, variables) + + def _test_typing_function_char_loop(self): + code = '''\ + def func(x): + l = [] + for c in x: + l.append(c) + return l + + print(func('abcdefg')) + ''' + types = self._test(code) + self.assertIn(('func', (1, 0)), types) + variables = types.pop(('func', (1, 0))) + self.assertFalse(types) + self.assertEqual({'a': set(['int']), 'i': set(['int'])}, variables) + + def test_typing_global_list(self): + code = '''\ + a = [x for x in range(10)] + b = list(range(10)) + c = a + b + d = [0]*10 + ''' + types = self._test(code) + self.assertIn((None, (1, 0)), types) + variables = types.pop((None, (1, 0))) + self.assertFalse(types) + self.assertEqual({'a': set(['list']), 'b': set(['list']), 'c': set(['list']), 'd': set(['list'])}, variables) + + def test_typing_function_list(self): + code = '''\ + def func(x): + a = [[], []] + b = [0]* 10 + a + c = a[0] + + print(func([0]*100)) + ''' + types = self._test(code) + self.assertIn(('func', (1, 0)), types) + variables = types.pop(('func', (1, 0))) + self.assertFalse(types) + self.assertEqual({'a': set(['list']), 'b': set(['list']), 'c': set(['list']), 'x': set(['list'])}, variables) + + def test_typing_global_dict(self): + code = '''\ + a = dict() + b = {i: i**2 for i in range(10)} + c = a + ''' + types = self._test(code) + self.assertIn((None, (1, 0)), types) + variables = types.pop((None, (1, 0))) + self.assertFalse(types) + self.assertEqual({'a': set(['dict']), 'b': set(['dict']), 'c': set(['dict'])}, variables) + + def test_typing_function_dict(self): + code = '''\ + def func(x): + a = dict() + b = {i: i**2 for i in range(10)} + c = x + + print(func({1:2, 'x':7})) + ''' + types = self._test(code) + self.assertIn(('func', (1, 0)), types) + variables = types.pop(('func', (1, 0))) + self.assertFalse(types) + self.assertEqual({'a': set(['dict']), 'b': set(['dict']), 'c': set(['dict']), 'x': set(['dict'])}, variables) + + + def test_typing_global_set(self): + code = '''\ + a = set() + # b = {i for i in range(10)} # jedi does not support set comprehension yet + c = a + d = {1,2,3} + e = a | b + ''' + types = self._test(code) + self.assertIn((None, (1, 0)), types) + variables = types.pop((None, (1, 0))) + self.assertFalse(types) + self.assertEqual({'a': set(['set']), 'c': set(['set']), 'd': set(['set']), 'e': set(['set'])}, variables) + + def test_typing_function_set(self): + code = '''\ + def func(x): + a = set() + # b = {i for i in range(10)} # jedi does not support set comprehension yet + c = a + d = a | b + + print(func({1,2,3})) + ''' + types = self._test(code) + self.assertIn(('func', (1, 0)), types) + variables = types.pop(('func', (1, 0))) + self.assertFalse(types) + self.assertEqual({'a': set(['set']), 'c': set(['set']), 'd': set(['set']), 'x': set(['set'])}, variables) + + +class TestTypeInjection(TestJediTyper): + """ + Subtype of TestJediTyper that additionally tests type injection and compilation. + """ + def setUp(self): + super(TestTypeInjection, self).setUp() + compilation_options = Main.CompilationOptions(Main.default_options) + ctx = compilation_options.create_context() + transform = InterpretCompilerDirectives(ctx, ctx.compiler_directives) + transform.module_scope = Symtab.ModuleScope('__main__', None, ctx) + self.declarations_finder = DeclarationsFinder() + self.pipeline = [NormalizeTree(None), transform, self.declarations_finder] + + def _test(self, code): + types, lines = _test_typing(code, inject=True) + tree = self.run_pipeline(self.pipeline, ''.join(lines)) + directives = self.declarations_finder.directives + # TODO: validate directives + return types diff --git a/venv/lib/python3.8/site-packages/Cython/Tests/TestStringIOTree.py b/venv/lib/python3.8/site-packages/Cython/Tests/TestStringIOTree.py new file mode 100644 index 0000000..a15f2cd --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Tests/TestStringIOTree.py @@ -0,0 +1,67 @@ +import unittest + +from Cython import StringIOTree as stringtree + +code = """ +cdef int spam # line 1 + +cdef ham(): + a = 1 + b = 2 + c = 3 + d = 4 + +def eggs(): + pass + +cpdef bacon(): + print spam + print 'scotch' + print 'tea?' + print 'or coffee?' # line 16 +""" + +linemap = dict(enumerate(code.splitlines())) + +class TestStringIOTree(unittest.TestCase): + + def setUp(self): + self.tree = stringtree.StringIOTree() + + def test_markers(self): + assert not self.tree.allmarkers() + + def test_insertion(self): + self.write_lines((1, 2, 3)) + line_4_to_6_insertion_point = self.tree.insertion_point() + self.write_lines((7, 8)) + line_9_to_13_insertion_point = self.tree.insertion_point() + self.write_lines((14, 15, 16)) + + line_4_insertion_point = line_4_to_6_insertion_point.insertion_point() + self.write_lines((5, 6), tree=line_4_to_6_insertion_point) + + line_9_to_12_insertion_point = ( + line_9_to_13_insertion_point.insertion_point()) + self.write_line(13, tree=line_9_to_13_insertion_point) + + self.write_line(4, tree=line_4_insertion_point) + self.write_line(9, tree=line_9_to_12_insertion_point) + line_10_insertion_point = line_9_to_12_insertion_point.insertion_point() + self.write_line(11, tree=line_9_to_12_insertion_point) + self.write_line(10, tree=line_10_insertion_point) + self.write_line(12, tree=line_9_to_12_insertion_point) + + self.assertEqual(self.tree.allmarkers(), list(range(1, 17))) + self.assertEqual(code.strip(), self.tree.getvalue().strip()) + + + def write_lines(self, linenos, tree=None): + for lineno in linenos: + self.write_line(lineno, tree=tree) + + def write_line(self, lineno, tree=None): + if tree is None: + tree = self.tree + tree.markers.append(lineno) + tree.write(linemap[lineno] + '\n') diff --git a/venv/lib/python3.8/site-packages/Cython/Tests/__init__.py b/venv/lib/python3.8/site-packages/Cython/Tests/__init__.py new file mode 100644 index 0000000..fa81ada --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Tests/__init__.py @@ -0,0 +1 @@ +# empty file diff --git a/venv/lib/python3.8/site-packages/Cython/Tests/__pycache__/TestCodeWriter.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Tests/__pycache__/TestCodeWriter.cpython-38.pyc new file mode 100644 index 0000000..68b90f9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Tests/__pycache__/TestCodeWriter.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Tests/__pycache__/TestCythonUtils.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Tests/__pycache__/TestCythonUtils.cpython-38.pyc new file mode 100644 index 0000000..7f2191c Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Tests/__pycache__/TestCythonUtils.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Tests/__pycache__/TestJediTyper.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Tests/__pycache__/TestJediTyper.cpython-38.pyc new file mode 100644 index 0000000..e497c72 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Tests/__pycache__/TestJediTyper.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Tests/__pycache__/TestStringIOTree.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Tests/__pycache__/TestStringIOTree.cpython-38.pyc new file mode 100644 index 0000000..e47c888 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Tests/__pycache__/TestStringIOTree.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Tests/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Tests/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..95f94fd Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Tests/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Tests/__pycache__/xmlrunner.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Tests/__pycache__/xmlrunner.cpython-38.pyc new file mode 100644 index 0000000..b7b7b3f Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Tests/__pycache__/xmlrunner.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Tests/xmlrunner.py b/venv/lib/python3.8/site-packages/Cython/Tests/xmlrunner.py new file mode 100644 index 0000000..665f3c2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Tests/xmlrunner.py @@ -0,0 +1,397 @@ +# -*- coding: utf-8 -*- + +"""unittest-xml-reporting is a PyUnit-based TestRunner that can export test +results to XML files that can be consumed by a wide range of tools, such as +build systems, IDEs and Continuous Integration servers. + +This module provides the XMLTestRunner class, which is heavily based on the +default TextTestRunner. This makes the XMLTestRunner very simple to use. + +The script below, adapted from the unittest documentation, shows how to use +XMLTestRunner in a very simple way. In fact, the only difference between this +script and the original one is the last line: + +import random +import unittest +import xmlrunner + +class TestSequenceFunctions(unittest.TestCase): + def setUp(self): + self.seq = range(10) + + def test_shuffle(self): + # make sure the shuffled sequence does not lose any elements + random.shuffle(self.seq) + self.seq.sort() + self.assertEqual(self.seq, range(10)) + + def test_choice(self): + element = random.choice(self.seq) + self.assert_(element in self.seq) + + def test_sample(self): + self.assertRaises(ValueError, random.sample, self.seq, 20) + for element in random.sample(self.seq, 5): + self.assert_(element in self.seq) + +if __name__ == '__main__': + unittest.main(testRunner=xmlrunner.XMLTestRunner(output='test-reports')) +""" + +from __future__ import absolute_import + +import os +import sys +import time +from unittest import TestResult, _TextTestResult, TextTestRunner +import xml.dom.minidom +try: + from StringIO import StringIO +except ImportError: + from io import StringIO # doesn't accept 'str' in Py2 + + +class XMLDocument(xml.dom.minidom.Document): + def createCDATAOrText(self, data): + if ']]>' in data: + return self.createTextNode(data) + return self.createCDATASection(data) + + +class _TestInfo(object): + """This class is used to keep useful information about the execution of a + test method. + """ + + # Possible test outcomes + (SUCCESS, FAILURE, ERROR) = range(3) + + def __init__(self, test_result, test_method, outcome=SUCCESS, err=None): + "Create a new instance of _TestInfo." + self.test_result = test_result + self.test_method = test_method + self.outcome = outcome + self.err = err + self.stdout = test_result.stdout and test_result.stdout.getvalue().strip() or '' + self.stderr = test_result.stdout and test_result.stderr.getvalue().strip() or '' + + def get_elapsed_time(self): + """Return the time that shows how long the test method took to + execute. + """ + return self.test_result.stop_time - self.test_result.start_time + + def get_description(self): + "Return a text representation of the test method." + return self.test_result.getDescription(self.test_method) + + def get_error_info(self): + """Return a text representation of an exception thrown by a test + method. + """ + if not self.err: + return '' + return self.test_result._exc_info_to_string( + self.err, self.test_method) + + +class _XMLTestResult(_TextTestResult): + """A test result class that can express test results in a XML report. + + Used by XMLTestRunner. + """ + def __init__(self, stream=sys.stderr, descriptions=1, verbosity=1, + elapsed_times=True): + "Create a new instance of _XMLTestResult." + _TextTestResult.__init__(self, stream, descriptions, verbosity) + self.successes = [] + self.callback = None + self.elapsed_times = elapsed_times + self.output_patched = False + + def _prepare_callback(self, test_info, target_list, verbose_str, + short_str): + """Append a _TestInfo to the given target list and sets a callback + method to be called by stopTest method. + """ + target_list.append(test_info) + def callback(): + """This callback prints the test method outcome to the stream, + as well as the elapsed time. + """ + + # Ignore the elapsed times for a more reliable unit testing + if not self.elapsed_times: + self.start_time = self.stop_time = 0 + + if self.showAll: + self.stream.writeln('(%.3fs) %s' % \ + (test_info.get_elapsed_time(), verbose_str)) + elif self.dots: + self.stream.write(short_str) + self.callback = callback + + def _patch_standard_output(self): + """Replace the stdout and stderr streams with string-based streams + in order to capture the tests' output. + """ + if not self.output_patched: + (self.old_stdout, self.old_stderr) = (sys.stdout, sys.stderr) + self.output_patched = True + (sys.stdout, sys.stderr) = (self.stdout, self.stderr) = \ + (StringIO(), StringIO()) + + def _restore_standard_output(self): + "Restore the stdout and stderr streams." + (sys.stdout, sys.stderr) = (self.old_stdout, self.old_stderr) + self.output_patched = False + + def startTest(self, test): + "Called before execute each test method." + self._patch_standard_output() + self.start_time = time.time() + TestResult.startTest(self, test) + + if self.showAll: + self.stream.write(' ' + self.getDescription(test)) + self.stream.write(" ... ") + + def stopTest(self, test): + "Called after execute each test method." + self._restore_standard_output() + _TextTestResult.stopTest(self, test) + self.stop_time = time.time() + + if self.callback and callable(self.callback): + self.callback() + self.callback = None + + def addSuccess(self, test): + "Called when a test executes successfully." + self._prepare_callback(_TestInfo(self, test), + self.successes, 'OK', '.') + + def addFailure(self, test, err): + "Called when a test method fails." + self._prepare_callback(_TestInfo(self, test, _TestInfo.FAILURE, err), + self.failures, 'FAIL', 'F') + + def addError(self, test, err): + "Called when a test method raises an error." + self._prepare_callback(_TestInfo(self, test, _TestInfo.ERROR, err), + self.errors, 'ERROR', 'E') + + def printErrorList(self, flavour, errors): + "Write some information about the FAIL or ERROR to the stream." + for test_info in errors: + if isinstance(test_info, tuple): + test_info, exc_info = test_info + + try: + t = test_info.get_elapsed_time() + except AttributeError: + t = 0 + try: + descr = test_info.get_description() + except AttributeError: + try: + descr = test_info.getDescription() + except AttributeError: + descr = str(test_info) + try: + err_info = test_info.get_error_info() + except AttributeError: + err_info = str(test_info) + + self.stream.writeln(self.separator1) + self.stream.writeln('%s [%.3fs]: %s' % (flavour, t, descr)) + self.stream.writeln(self.separator2) + self.stream.writeln('%s' % err_info) + + def _get_info_by_testcase(self): + """This method organizes test results by TestCase module. This + information is used during the report generation, where a XML report + will be generated for each TestCase. + """ + tests_by_testcase = {} + + for tests in (self.successes, self.failures, self.errors): + for test_info in tests: + if not isinstance(test_info, _TestInfo): + print("Unexpected test result type: %r" % (test_info,)) + continue + testcase = type(test_info.test_method) + + # Ignore module name if it is '__main__' + module = testcase.__module__ + '.' + if module == '__main__.': + module = '' + testcase_name = module + testcase.__name__ + + if testcase_name not in tests_by_testcase: + tests_by_testcase[testcase_name] = [] + tests_by_testcase[testcase_name].append(test_info) + + return tests_by_testcase + + def _report_testsuite(suite_name, tests, xml_document): + "Appends the testsuite section to the XML document." + testsuite = xml_document.createElement('testsuite') + xml_document.appendChild(testsuite) + + testsuite.setAttribute('name', str(suite_name)) + testsuite.setAttribute('tests', str(len(tests))) + + testsuite.setAttribute('time', '%.3f' % + sum([e.get_elapsed_time() for e in tests])) + + failures = len([1 for e in tests if e.outcome == _TestInfo.FAILURE]) + testsuite.setAttribute('failures', str(failures)) + + errors = len([1 for e in tests if e.outcome == _TestInfo.ERROR]) + testsuite.setAttribute('errors', str(errors)) + + return testsuite + + _report_testsuite = staticmethod(_report_testsuite) + + def _report_testcase(suite_name, test_result, xml_testsuite, xml_document): + "Appends a testcase section to the XML document." + testcase = xml_document.createElement('testcase') + xml_testsuite.appendChild(testcase) + + testcase.setAttribute('classname', str(suite_name)) + testcase.setAttribute('name', test_result.test_method.shortDescription() + or getattr(test_result.test_method, '_testMethodName', + str(test_result.test_method))) + testcase.setAttribute('time', '%.3f' % test_result.get_elapsed_time()) + + if (test_result.outcome != _TestInfo.SUCCESS): + elem_name = ('failure', 'error')[test_result.outcome-1] + failure = xml_document.createElement(elem_name) + testcase.appendChild(failure) + + failure.setAttribute('type', str(test_result.err[0].__name__)) + failure.setAttribute('message', str(test_result.err[1])) + + error_info = test_result.get_error_info() + failureText = xml_document.createCDATAOrText(error_info) + failure.appendChild(failureText) + + _report_testcase = staticmethod(_report_testcase) + + def _report_output(test_runner, xml_testsuite, xml_document, stdout, stderr): + "Appends the system-out and system-err sections to the XML document." + systemout = xml_document.createElement('system-out') + xml_testsuite.appendChild(systemout) + + systemout_text = xml_document.createCDATAOrText(stdout) + systemout.appendChild(systemout_text) + + systemerr = xml_document.createElement('system-err') + xml_testsuite.appendChild(systemerr) + + systemerr_text = xml_document.createCDATAOrText(stderr) + systemerr.appendChild(systemerr_text) + + _report_output = staticmethod(_report_output) + + def generate_reports(self, test_runner): + "Generates the XML reports to a given XMLTestRunner object." + all_results = self._get_info_by_testcase() + + if type(test_runner.output) == str and not \ + os.path.exists(test_runner.output): + os.makedirs(test_runner.output) + + for suite, tests in all_results.items(): + doc = XMLDocument() + + # Build the XML file + testsuite = _XMLTestResult._report_testsuite(suite, tests, doc) + stdout, stderr = [], [] + for test in tests: + _XMLTestResult._report_testcase(suite, test, testsuite, doc) + if test.stdout: + stdout.extend(['*****************', test.get_description(), test.stdout]) + if test.stderr: + stderr.extend(['*****************', test.get_description(), test.stderr]) + _XMLTestResult._report_output(test_runner, testsuite, doc, + '\n'.join(stdout), '\n'.join(stderr)) + xml_content = doc.toprettyxml(indent='\t') + + if type(test_runner.output) is str: + report_file = open('%s%sTEST-%s.xml' % \ + (test_runner.output, os.sep, suite), 'w') + try: + report_file.write(xml_content) + finally: + report_file.close() + else: + # Assume that test_runner.output is a stream + test_runner.output.write(xml_content) + + +class XMLTestRunner(TextTestRunner): + """A test runner class that outputs the results in JUnit like XML files. + """ + def __init__(self, output='.', stream=None, descriptions=True, verbose=False, elapsed_times=True): + "Create a new instance of XMLTestRunner." + if stream is None: + stream = sys.stderr + verbosity = (1, 2)[verbose] + TextTestRunner.__init__(self, stream, descriptions, verbosity) + self.output = output + self.elapsed_times = elapsed_times + + def _make_result(self): + """Create the TestResult object which will be used to store + information about the executed tests. + """ + return _XMLTestResult(self.stream, self.descriptions, \ + self.verbosity, self.elapsed_times) + + def run(self, test): + "Run the given test case or test suite." + # Prepare the test execution + result = self._make_result() + + # Print a nice header + self.stream.writeln() + self.stream.writeln('Running tests...') + self.stream.writeln(result.separator2) + + # Execute tests + start_time = time.time() + test(result) + stop_time = time.time() + time_taken = stop_time - start_time + + # Generate reports + self.stream.writeln() + self.stream.writeln('Generating XML reports...') + result.generate_reports(self) + + # Print results + result.printErrors() + self.stream.writeln(result.separator2) + run = result.testsRun + self.stream.writeln("Ran %d test%s in %.3fs" % + (run, run != 1 and "s" or "", time_taken)) + self.stream.writeln() + + # Error traces + if not result.wasSuccessful(): + self.stream.write("FAILED (") + failed, errored = (len(result.failures), len(result.errors)) + if failed: + self.stream.write("failures=%d" % failed) + if errored: + if failed: + self.stream.write(", ") + self.stream.write("errors=%d" % errored) + self.stream.writeln(")") + else: + self.stream.writeln("OK") + + return result diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/AsyncGen.c b/venv/lib/python3.8/site-packages/Cython/Utility/AsyncGen.c new file mode 100644 index 0000000..4e95218 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/AsyncGen.c @@ -0,0 +1,1112 @@ +// This is copied from genobject.c in CPython 3.6. +// Try to keep it in sync by doing this from time to time: +// sed -e 's|__pyx_||ig' Cython/Utility/AsyncGen.c | diff -udw - cpython/Objects/genobject.c | less + +//////////////////// AsyncGenerator.proto //////////////////// +//@requires: Coroutine.c::Coroutine + +#define __Pyx_AsyncGen_USED +typedef struct { + __pyx_CoroutineObject coro; + PyObject *ag_finalizer; + int ag_hooks_inited; + int ag_closed; +} __pyx_PyAsyncGenObject; + +static PyTypeObject *__pyx__PyAsyncGenWrappedValueType = 0; +static PyTypeObject *__pyx__PyAsyncGenASendType = 0; +static PyTypeObject *__pyx__PyAsyncGenAThrowType = 0; +static PyTypeObject *__pyx_AsyncGenType = 0; + +#define __Pyx_AsyncGen_CheckExact(obj) (Py_TYPE(obj) == __pyx_AsyncGenType) +#define __pyx_PyAsyncGenASend_CheckExact(o) \ + (Py_TYPE(o) == __pyx__PyAsyncGenASendType) +#define __pyx_PyAsyncGenAThrow_CheckExact(o) \ + (Py_TYPE(o) == __pyx__PyAsyncGenAThrowType) + +static PyObject *__Pyx_async_gen_anext(PyObject *o); +static CYTHON_INLINE PyObject *__Pyx_async_gen_asend_iternext(PyObject *o); +static PyObject *__Pyx_async_gen_asend_send(PyObject *o, PyObject *arg); +static PyObject *__Pyx_async_gen_asend_close(PyObject *o, PyObject *args); +static PyObject *__Pyx_async_gen_athrow_close(PyObject *o, PyObject *args); + +static PyObject *__Pyx__PyAsyncGenValueWrapperNew(PyObject *val); + + +static __pyx_CoroutineObject *__Pyx_AsyncGen_New( + __pyx_coroutine_body_t body, PyObject *code, PyObject *closure, + PyObject *name, PyObject *qualname, PyObject *module_name) { + __pyx_PyAsyncGenObject *gen = PyObject_GC_New(__pyx_PyAsyncGenObject, __pyx_AsyncGenType); + if (unlikely(!gen)) + return NULL; + gen->ag_finalizer = NULL; + gen->ag_closed = 0; + gen->ag_hooks_inited = 0; + return __Pyx__Coroutine_NewInit((__pyx_CoroutineObject*)gen, body, code, closure, name, qualname, module_name); +} + +static int __pyx_AsyncGen_init(void); +static void __Pyx_PyAsyncGen_Fini(void); + +//////////////////// AsyncGenerator.cleanup //////////////////// + +__Pyx_PyAsyncGen_Fini(); + +//////////////////// AsyncGeneratorInitFinalizer //////////////////// + +// this is separated out because it needs more adaptation + +#if PY_VERSION_HEX < 0x030600B0 +static int __Pyx_async_gen_init_hooks(__pyx_PyAsyncGenObject *o) { +#if 0 + // TODO: implement finalizer support in older Python versions + PyThreadState *tstate; + PyObject *finalizer; + PyObject *firstiter; +#endif + + if (likely(o->ag_hooks_inited)) { + return 0; + } + + o->ag_hooks_inited = 1; + +#if 0 + tstate = __Pyx_PyThreadState_Current; + + finalizer = tstate->async_gen_finalizer; + if (finalizer) { + Py_INCREF(finalizer); + o->ag_finalizer = finalizer; + } + + firstiter = tstate->async_gen_firstiter; + if (firstiter) { + PyObject *res; + + Py_INCREF(firstiter); + res = __Pyx_PyObject_CallOneArg(firstiter, (PyObject*)o); + Py_DECREF(firstiter); + if (res == NULL) { + return 1; + } + Py_DECREF(res); + } +#endif + + return 0; +} +#endif + + +//////////////////// AsyncGenerator //////////////////// +//@requires: AsyncGeneratorInitFinalizer +//@requires: Coroutine.c::Coroutine +//@requires: Coroutine.c::ReturnWithStopIteration +//@requires: ObjectHandling.c::PyObjectCall2Args +//@requires: ObjectHandling.c::PyObject_GenericGetAttrNoDict + +PyDoc_STRVAR(__Pyx_async_gen_send_doc, +"send(arg) -> send 'arg' into generator,\n\ +return next yielded value or raise StopIteration."); + +PyDoc_STRVAR(__Pyx_async_gen_close_doc, +"close() -> raise GeneratorExit inside generator."); + +PyDoc_STRVAR(__Pyx_async_gen_throw_doc, +"throw(typ[,val[,tb]]) -> raise exception in generator,\n\ +return next yielded value or raise StopIteration."); + +PyDoc_STRVAR(__Pyx_async_gen_await_doc, +"__await__() -> return a representation that can be passed into the 'await' expression."); + +// COPY STARTS HERE: + +static PyObject *__Pyx_async_gen_asend_new(__pyx_PyAsyncGenObject *, PyObject *); +static PyObject *__Pyx_async_gen_athrow_new(__pyx_PyAsyncGenObject *, PyObject *); + +static const char *__Pyx_NON_INIT_CORO_MSG = "can't send non-None value to a just-started coroutine"; +static const char *__Pyx_ASYNC_GEN_IGNORED_EXIT_MSG = "async generator ignored GeneratorExit"; + +typedef enum { + __PYX_AWAITABLE_STATE_INIT, /* new awaitable, has not yet been iterated */ + __PYX_AWAITABLE_STATE_ITER, /* being iterated */ + __PYX_AWAITABLE_STATE_CLOSED, /* closed */ +} __pyx_AwaitableState; + +typedef struct { + PyObject_HEAD + __pyx_PyAsyncGenObject *ags_gen; + + /* Can be NULL, when in the __anext__() mode (equivalent of "asend(None)") */ + PyObject *ags_sendval; + + __pyx_AwaitableState ags_state; +} __pyx_PyAsyncGenASend; + + +typedef struct { + PyObject_HEAD + __pyx_PyAsyncGenObject *agt_gen; + + /* Can be NULL, when in the "aclose()" mode (equivalent of "athrow(GeneratorExit)") */ + PyObject *agt_args; + + __pyx_AwaitableState agt_state; +} __pyx_PyAsyncGenAThrow; + + +typedef struct { + PyObject_HEAD + PyObject *agw_val; +} __pyx__PyAsyncGenWrappedValue; + + +#ifndef _PyAsyncGen_MAXFREELIST +#define _PyAsyncGen_MAXFREELIST 80 +#endif + +// Freelists boost performance 6-10%; they also reduce memory +// fragmentation, as _PyAsyncGenWrappedValue and PyAsyncGenASend +// are short-living objects that are instantiated for every +// __anext__ call. + +static __pyx__PyAsyncGenWrappedValue *__Pyx_ag_value_freelist[_PyAsyncGen_MAXFREELIST]; +static int __Pyx_ag_value_freelist_free = 0; + +static __pyx_PyAsyncGenASend *__Pyx_ag_asend_freelist[_PyAsyncGen_MAXFREELIST]; +static int __Pyx_ag_asend_freelist_free = 0; + +#define __pyx__PyAsyncGenWrappedValue_CheckExact(o) \ + (Py_TYPE(o) == __pyx__PyAsyncGenWrappedValueType) + + +static int +__Pyx_async_gen_traverse(__pyx_PyAsyncGenObject *gen, visitproc visit, void *arg) +{ + Py_VISIT(gen->ag_finalizer); + return __Pyx_Coroutine_traverse((__pyx_CoroutineObject*)gen, visit, arg); +} + + +static PyObject * +__Pyx_async_gen_repr(__pyx_CoroutineObject *o) +{ + // avoid NULL pointer dereference for qualname during garbage collection + return PyUnicode_FromFormat("", + o->gi_qualname ? o->gi_qualname : Py_None, o); +} + + +#if PY_VERSION_HEX >= 0x030600B0 +static int +__Pyx_async_gen_init_hooks(__pyx_PyAsyncGenObject *o) +{ + PyThreadState *tstate; + PyObject *finalizer; + PyObject *firstiter; + + if (o->ag_hooks_inited) { + return 0; + } + + o->ag_hooks_inited = 1; + + tstate = __Pyx_PyThreadState_Current; + + finalizer = tstate->async_gen_finalizer; + if (finalizer) { + Py_INCREF(finalizer); + o->ag_finalizer = finalizer; + } + + firstiter = tstate->async_gen_firstiter; + if (firstiter) { + PyObject *res; +#if CYTHON_UNPACK_METHODS + PyObject *self; +#endif + + Py_INCREF(firstiter); + // at least asyncio stores methods here => optimise the call +#if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(firstiter)) && likely((self = PyMethod_GET_SELF(firstiter)) != NULL)) { + PyObject *function = PyMethod_GET_FUNCTION(firstiter); + res = __Pyx_PyObject_Call2Args(function, self, (PyObject*)o); + } else +#endif + res = __Pyx_PyObject_CallOneArg(firstiter, (PyObject*)o); + + Py_DECREF(firstiter); + if (unlikely(res == NULL)) { + return 1; + } + Py_DECREF(res); + } + + return 0; +} +#endif + + +static PyObject * +__Pyx_async_gen_anext(PyObject *g) +{ + __pyx_PyAsyncGenObject *o = (__pyx_PyAsyncGenObject*) g; + if (__Pyx_async_gen_init_hooks(o)) { + return NULL; + } + return __Pyx_async_gen_asend_new(o, NULL); +} + +static PyObject * +__Pyx_async_gen_anext_method(PyObject *g, CYTHON_UNUSED PyObject *arg) { + return __Pyx_async_gen_anext(g); +} + + +static PyObject * +__Pyx_async_gen_asend(__pyx_PyAsyncGenObject *o, PyObject *arg) +{ + if (__Pyx_async_gen_init_hooks(o)) { + return NULL; + } + return __Pyx_async_gen_asend_new(o, arg); +} + + +static PyObject * +__Pyx_async_gen_aclose(__pyx_PyAsyncGenObject *o, CYTHON_UNUSED PyObject *arg) +{ + if (__Pyx_async_gen_init_hooks(o)) { + return NULL; + } + return __Pyx_async_gen_athrow_new(o, NULL); +} + + +static PyObject * +__Pyx_async_gen_athrow(__pyx_PyAsyncGenObject *o, PyObject *args) +{ + if (__Pyx_async_gen_init_hooks(o)) { + return NULL; + } + return __Pyx_async_gen_athrow_new(o, args); +} + + +static PyObject * +__Pyx_async_gen_self_method(PyObject *g, CYTHON_UNUSED PyObject *arg) { + return __Pyx_NewRef(g); +} + + +static PyGetSetDef __Pyx_async_gen_getsetlist[] = { + {(char*) "__name__", (getter)__Pyx_Coroutine_get_name, (setter)__Pyx_Coroutine_set_name, + (char*) PyDoc_STR("name of the async generator"), 0}, + {(char*) "__qualname__", (getter)__Pyx_Coroutine_get_qualname, (setter)__Pyx_Coroutine_set_qualname, + (char*) PyDoc_STR("qualified name of the async generator"), 0}, + //REMOVED: {(char*) "ag_await", (getter)coro_get_cr_await, NULL, + //REMOVED: (char*) PyDoc_STR("object being awaited on, or None")}, + {0, 0, 0, 0, 0} /* Sentinel */ +}; + +static PyMemberDef __Pyx_async_gen_memberlist[] = { + //REMOVED: {(char*) "ag_frame", T_OBJECT, offsetof(__pyx_PyAsyncGenObject, ag_frame), READONLY}, + {(char*) "ag_running", T_BOOL, offsetof(__pyx_CoroutineObject, is_running), READONLY, NULL}, + //REMOVED: {(char*) "ag_code", T_OBJECT, offsetof(__pyx_PyAsyncGenObject, ag_code), READONLY}, + //ADDED: "ag_await" + {(char*) "ag_await", T_OBJECT, offsetof(__pyx_CoroutineObject, yieldfrom), READONLY, + (char*) PyDoc_STR("object being awaited on, or None")}, + {0, 0, 0, 0, 0} /* Sentinel */ +}; + +PyDoc_STRVAR(__Pyx_async_aclose_doc, +"aclose() -> raise GeneratorExit inside generator."); + +PyDoc_STRVAR(__Pyx_async_asend_doc, +"asend(v) -> send 'v' in generator."); + +PyDoc_STRVAR(__Pyx_async_athrow_doc, +"athrow(typ[,val[,tb]]) -> raise exception in generator."); + +PyDoc_STRVAR(__Pyx_async_aiter_doc, +"__aiter__(v) -> return an asynchronous iterator."); + +PyDoc_STRVAR(__Pyx_async_anext_doc, +"__anext__(v) -> continue asynchronous iteration and return the next element."); + +static PyMethodDef __Pyx_async_gen_methods[] = { + {"asend", (PyCFunction)__Pyx_async_gen_asend, METH_O, __Pyx_async_asend_doc}, + {"athrow",(PyCFunction)__Pyx_async_gen_athrow, METH_VARARGS, __Pyx_async_athrow_doc}, + {"aclose", (PyCFunction)__Pyx_async_gen_aclose, METH_NOARGS, __Pyx_async_aclose_doc}, + {"__aiter__", (PyCFunction)__Pyx_async_gen_self_method, METH_NOARGS, __Pyx_async_aiter_doc}, + {"__anext__", (PyCFunction)__Pyx_async_gen_anext_method, METH_NOARGS, __Pyx_async_anext_doc}, + {0, 0, 0, 0} /* Sentinel */ +}; + + +#if CYTHON_USE_ASYNC_SLOTS +static __Pyx_PyAsyncMethodsStruct __Pyx_async_gen_as_async = { + 0, /* am_await */ + PyObject_SelfIter, /* am_aiter */ + (unaryfunc)__Pyx_async_gen_anext /* am_anext */ +}; +#endif + +static PyTypeObject __pyx_AsyncGenType_type = { + PyVarObject_HEAD_INIT(0, 0) + "async_generator", /* tp_name */ + sizeof(__pyx_PyAsyncGenObject), /* tp_basicsize */ + 0, /* tp_itemsize */ + (destructor)__Pyx_Coroutine_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ +#if CYTHON_USE_ASYNC_SLOTS + &__Pyx_async_gen_as_async, /* tp_as_async */ +#else + 0, /*tp_reserved*/ +#endif + (reprfunc)__Pyx_async_gen_repr, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_HAVE_FINALIZE, /* tp_flags */ + 0, /* tp_doc */ + (traverseproc)__Pyx_async_gen_traverse, /* tp_traverse */ + 0, /* tp_clear */ +#if CYTHON_USE_ASYNC_SLOTS && CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 && PY_VERSION_HEX < 0x030500B1 + // in order to (mis-)use tp_reserved above, we must also implement tp_richcompare + __Pyx_Coroutine_compare, /*tp_richcompare*/ +#else + 0, /*tp_richcompare*/ +#endif + offsetof(__pyx_CoroutineObject, gi_weakreflist), /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + __Pyx_async_gen_methods, /* tp_methods */ + __Pyx_async_gen_memberlist, /* tp_members */ + __Pyx_async_gen_getsetlist, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + 0, /* tp_alloc */ + 0, /* tp_new */ + 0, /* tp_free */ + 0, /* tp_is_gc */ + 0, /* tp_bases */ + 0, /* tp_mro */ + 0, /* tp_cache */ + 0, /* tp_subclasses */ + 0, /* tp_weaklist */ +#if CYTHON_USE_TP_FINALIZE + 0, /*tp_del*/ +#else + __Pyx_Coroutine_del, /*tp_del*/ +#endif + 0, /* tp_version_tag */ +#if CYTHON_USE_TP_FINALIZE + __Pyx_Coroutine_del, /* tp_finalize */ +#elif PY_VERSION_HEX >= 0x030400a1 + 0, /* tp_finalize */ +#endif +#if PY_VERSION_HEX >= 0x030800b1 + 0, /*tp_vectorcall*/ +#endif +#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ +#endif +}; + + +static int +__Pyx_PyAsyncGen_ClearFreeLists(void) +{ + int ret = __Pyx_ag_value_freelist_free + __Pyx_ag_asend_freelist_free; + + while (__Pyx_ag_value_freelist_free) { + __pyx__PyAsyncGenWrappedValue *o; + o = __Pyx_ag_value_freelist[--__Pyx_ag_value_freelist_free]; + assert(__pyx__PyAsyncGenWrappedValue_CheckExact(o)); + PyObject_GC_Del(o); + } + + while (__Pyx_ag_asend_freelist_free) { + __pyx_PyAsyncGenASend *o; + o = __Pyx_ag_asend_freelist[--__Pyx_ag_asend_freelist_free]; + assert(Py_TYPE(o) == __pyx__PyAsyncGenASendType); + PyObject_GC_Del(o); + } + + return ret; +} + +static void +__Pyx_PyAsyncGen_Fini(void) +{ + __Pyx_PyAsyncGen_ClearFreeLists(); +} + + +static PyObject * +__Pyx_async_gen_unwrap_value(__pyx_PyAsyncGenObject *gen, PyObject *result) +{ + if (result == NULL) { + PyObject *exc_type = PyErr_Occurred(); + if (!exc_type) { + PyErr_SetNone(__Pyx_PyExc_StopAsyncIteration); + gen->ag_closed = 1; + } else if (__Pyx_PyErr_GivenExceptionMatches2(exc_type, __Pyx_PyExc_StopAsyncIteration, PyExc_GeneratorExit)) { + gen->ag_closed = 1; + } + + return NULL; + } + + if (__pyx__PyAsyncGenWrappedValue_CheckExact(result)) { + /* async yield */ + __Pyx_ReturnWithStopIteration(((__pyx__PyAsyncGenWrappedValue*)result)->agw_val); + Py_DECREF(result); + return NULL; + } + + return result; +} + + +/* ---------- Async Generator ASend Awaitable ------------ */ + + +static void +__Pyx_async_gen_asend_dealloc(__pyx_PyAsyncGenASend *o) +{ + PyObject_GC_UnTrack((PyObject *)o); + Py_CLEAR(o->ags_gen); + Py_CLEAR(o->ags_sendval); + if (__Pyx_ag_asend_freelist_free < _PyAsyncGen_MAXFREELIST) { + assert(__pyx_PyAsyncGenASend_CheckExact(o)); + __Pyx_ag_asend_freelist[__Pyx_ag_asend_freelist_free++] = o; + } else { + PyObject_GC_Del(o); + } +} + +static int +__Pyx_async_gen_asend_traverse(__pyx_PyAsyncGenASend *o, visitproc visit, void *arg) +{ + Py_VISIT(o->ags_gen); + Py_VISIT(o->ags_sendval); + return 0; +} + + +static PyObject * +__Pyx_async_gen_asend_send(PyObject *g, PyObject *arg) +{ + __pyx_PyAsyncGenASend *o = (__pyx_PyAsyncGenASend*) g; + PyObject *result; + + if (unlikely(o->ags_state == __PYX_AWAITABLE_STATE_CLOSED)) { + PyErr_SetNone(PyExc_StopIteration); + return NULL; + } + + if (o->ags_state == __PYX_AWAITABLE_STATE_INIT) { + if (arg == NULL || arg == Py_None) { + arg = o->ags_sendval ? o->ags_sendval : Py_None; + } + o->ags_state = __PYX_AWAITABLE_STATE_ITER; + } + + result = __Pyx_Coroutine_Send((PyObject*)o->ags_gen, arg); + result = __Pyx_async_gen_unwrap_value(o->ags_gen, result); + + if (result == NULL) { + o->ags_state = __PYX_AWAITABLE_STATE_CLOSED; + } + + return result; +} + + +static CYTHON_INLINE PyObject * +__Pyx_async_gen_asend_iternext(PyObject *o) +{ + return __Pyx_async_gen_asend_send(o, Py_None); +} + + +static PyObject * +__Pyx_async_gen_asend_throw(__pyx_PyAsyncGenASend *o, PyObject *args) +{ + PyObject *result; + + if (unlikely(o->ags_state == __PYX_AWAITABLE_STATE_CLOSED)) { + PyErr_SetNone(PyExc_StopIteration); + return NULL; + } + + result = __Pyx_Coroutine_Throw((PyObject*)o->ags_gen, args); + result = __Pyx_async_gen_unwrap_value(o->ags_gen, result); + + if (result == NULL) { + o->ags_state = __PYX_AWAITABLE_STATE_CLOSED; + } + + return result; +} + + +static PyObject * +__Pyx_async_gen_asend_close(PyObject *g, CYTHON_UNUSED PyObject *args) +{ + __pyx_PyAsyncGenASend *o = (__pyx_PyAsyncGenASend*) g; + o->ags_state = __PYX_AWAITABLE_STATE_CLOSED; + Py_RETURN_NONE; +} + + +static PyMethodDef __Pyx_async_gen_asend_methods[] = { + {"send", (PyCFunction)__Pyx_async_gen_asend_send, METH_O, __Pyx_async_gen_send_doc}, + {"throw", (PyCFunction)__Pyx_async_gen_asend_throw, METH_VARARGS, __Pyx_async_gen_throw_doc}, + {"close", (PyCFunction)__Pyx_async_gen_asend_close, METH_NOARGS, __Pyx_async_gen_close_doc}, + {"__await__", (PyCFunction)__Pyx_async_gen_self_method, METH_NOARGS, __Pyx_async_gen_await_doc}, + {0, 0, 0, 0} /* Sentinel */ +}; + + +#if CYTHON_USE_ASYNC_SLOTS +static __Pyx_PyAsyncMethodsStruct __Pyx_async_gen_asend_as_async = { + PyObject_SelfIter, /* am_await */ + 0, /* am_aiter */ + 0 /* am_anext */ +}; +#endif + + +static PyTypeObject __pyx__PyAsyncGenASendType_type = { + PyVarObject_HEAD_INIT(0, 0) + "async_generator_asend", /* tp_name */ + sizeof(__pyx_PyAsyncGenASend), /* tp_basicsize */ + 0, /* tp_itemsize */ + /* methods */ + (destructor)__Pyx_async_gen_asend_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ +#if CYTHON_USE_ASYNC_SLOTS + &__Pyx_async_gen_asend_as_async, /* tp_as_async */ +#else + 0, /*tp_reserved*/ +#endif + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */ + 0, /* tp_doc */ + (traverseproc)__Pyx_async_gen_asend_traverse, /* tp_traverse */ + 0, /* tp_clear */ +#if CYTHON_USE_ASYNC_SLOTS && CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 && PY_VERSION_HEX < 0x030500B1 + // in order to (mis-)use tp_reserved above, we must also implement tp_richcompare + __Pyx_Coroutine_compare, /*tp_richcompare*/ +#else + 0, /*tp_richcompare*/ +#endif + 0, /* tp_weaklistoffset */ + PyObject_SelfIter, /* tp_iter */ + (iternextfunc)__Pyx_async_gen_asend_iternext, /* tp_iternext */ + __Pyx_async_gen_asend_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + 0, /* tp_alloc */ + 0, /* tp_new */ + 0, /* tp_free */ + 0, /* tp_is_gc */ + 0, /* tp_bases */ + 0, /* tp_mro */ + 0, /* tp_cache */ + 0, /* tp_subclasses */ + 0, /* tp_weaklist */ + 0, /* tp_del */ + 0, /* tp_version_tag */ +#if PY_VERSION_HEX >= 0x030400a1 + 0, /* tp_finalize */ +#endif +#if PY_VERSION_HEX >= 0x030800b1 + 0, /*tp_vectorcall*/ +#endif +#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ +#endif +}; + + +static PyObject * +__Pyx_async_gen_asend_new(__pyx_PyAsyncGenObject *gen, PyObject *sendval) +{ + __pyx_PyAsyncGenASend *o; + if (__Pyx_ag_asend_freelist_free) { + __Pyx_ag_asend_freelist_free--; + o = __Pyx_ag_asend_freelist[__Pyx_ag_asend_freelist_free]; + _Py_NewReference((PyObject *)o); + } else { + o = PyObject_GC_New(__pyx_PyAsyncGenASend, __pyx__PyAsyncGenASendType); + if (o == NULL) { + return NULL; + } + } + + Py_INCREF(gen); + o->ags_gen = gen; + + Py_XINCREF(sendval); + o->ags_sendval = sendval; + + o->ags_state = __PYX_AWAITABLE_STATE_INIT; + + PyObject_GC_Track((PyObject*)o); + return (PyObject*)o; +} + + +/* ---------- Async Generator Value Wrapper ------------ */ + + +static void +__Pyx_async_gen_wrapped_val_dealloc(__pyx__PyAsyncGenWrappedValue *o) +{ + PyObject_GC_UnTrack((PyObject *)o); + Py_CLEAR(o->agw_val); + if (__Pyx_ag_value_freelist_free < _PyAsyncGen_MAXFREELIST) { + assert(__pyx__PyAsyncGenWrappedValue_CheckExact(o)); + __Pyx_ag_value_freelist[__Pyx_ag_value_freelist_free++] = o; + } else { + PyObject_GC_Del(o); + } +} + + +static int +__Pyx_async_gen_wrapped_val_traverse(__pyx__PyAsyncGenWrappedValue *o, + visitproc visit, void *arg) +{ + Py_VISIT(o->agw_val); + return 0; +} + + +static PyTypeObject __pyx__PyAsyncGenWrappedValueType_type = { + PyVarObject_HEAD_INIT(0, 0) + "async_generator_wrapped_value", /* tp_name */ + sizeof(__pyx__PyAsyncGenWrappedValue), /* tp_basicsize */ + 0, /* tp_itemsize */ + /* methods */ + (destructor)__Pyx_async_gen_wrapped_val_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_as_async */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */ + 0, /* tp_doc */ + (traverseproc)__Pyx_async_gen_wrapped_val_traverse, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + 0, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + 0, /* tp_alloc */ + 0, /* tp_new */ + 0, /* tp_free */ + 0, /* tp_is_gc */ + 0, /* tp_bases */ + 0, /* tp_mro */ + 0, /* tp_cache */ + 0, /* tp_subclasses */ + 0, /* tp_weaklist */ + 0, /* tp_del */ + 0, /* tp_version_tag */ +#if PY_VERSION_HEX >= 0x030400a1 + 0, /* tp_finalize */ +#endif +#if PY_VERSION_HEX >= 0x030800b1 + 0, /*tp_vectorcall*/ +#endif +#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ +#endif +}; + + +static PyObject * +__Pyx__PyAsyncGenValueWrapperNew(PyObject *val) +{ + // NOTE: steals a reference to val ! + __pyx__PyAsyncGenWrappedValue *o; + assert(val); + + if (__Pyx_ag_value_freelist_free) { + __Pyx_ag_value_freelist_free--; + o = __Pyx_ag_value_freelist[__Pyx_ag_value_freelist_free]; + assert(__pyx__PyAsyncGenWrappedValue_CheckExact(o)); + _Py_NewReference((PyObject*)o); + } else { + o = PyObject_GC_New(__pyx__PyAsyncGenWrappedValue, __pyx__PyAsyncGenWrappedValueType); + if (unlikely(!o)) { + Py_DECREF(val); + return NULL; + } + } + o->agw_val = val; + // no Py_INCREF(val) - steals reference! + PyObject_GC_Track((PyObject*)o); + return (PyObject*)o; +} + + +/* ---------- Async Generator AThrow awaitable ------------ */ + + +static void +__Pyx_async_gen_athrow_dealloc(__pyx_PyAsyncGenAThrow *o) +{ + PyObject_GC_UnTrack((PyObject *)o); + Py_CLEAR(o->agt_gen); + Py_CLEAR(o->agt_args); + PyObject_GC_Del(o); +} + + +static int +__Pyx_async_gen_athrow_traverse(__pyx_PyAsyncGenAThrow *o, visitproc visit, void *arg) +{ + Py_VISIT(o->agt_gen); + Py_VISIT(o->agt_args); + return 0; +} + + +static PyObject * +__Pyx_async_gen_athrow_send(__pyx_PyAsyncGenAThrow *o, PyObject *arg) +{ + __pyx_CoroutineObject *gen = (__pyx_CoroutineObject*)o->agt_gen; + PyObject *retval; + + if (o->agt_state == __PYX_AWAITABLE_STATE_CLOSED) { + PyErr_SetNone(PyExc_StopIteration); + return NULL; + } + + if (o->agt_state == __PYX_AWAITABLE_STATE_INIT) { + if (o->agt_gen->ag_closed) { + PyErr_SetNone(PyExc_StopIteration); + return NULL; + } + + if (arg != Py_None) { + PyErr_SetString(PyExc_RuntimeError, __Pyx_NON_INIT_CORO_MSG); + return NULL; + } + + o->agt_state = __PYX_AWAITABLE_STATE_ITER; + + if (o->agt_args == NULL) { + /* aclose() mode */ + o->agt_gen->ag_closed = 1; + + retval = __Pyx__Coroutine_Throw((PyObject*)gen, + /* Do not close generator when + PyExc_GeneratorExit is passed */ + PyExc_GeneratorExit, NULL, NULL, NULL, 0); + + if (retval && __pyx__PyAsyncGenWrappedValue_CheckExact(retval)) { + Py_DECREF(retval); + goto yield_close; + } + } else { + PyObject *typ; + PyObject *tb = NULL; + PyObject *val = NULL; + + if (!PyArg_UnpackTuple(o->agt_args, "athrow", 1, 3, + &typ, &val, &tb)) { + return NULL; + } + + retval = __Pyx__Coroutine_Throw((PyObject*)gen, + /* Do not close generator when PyExc_GeneratorExit is passed */ + typ, val, tb, o->agt_args, 0); + retval = __Pyx_async_gen_unwrap_value(o->agt_gen, retval); + } + if (retval == NULL) { + goto check_error; + } + return retval; + } + + assert (o->agt_state == __PYX_AWAITABLE_STATE_ITER); + + retval = __Pyx_Coroutine_Send((PyObject *)gen, arg); + if (o->agt_args) { + return __Pyx_async_gen_unwrap_value(o->agt_gen, retval); + } else { + /* aclose() mode */ + if (retval) { + if (__pyx__PyAsyncGenWrappedValue_CheckExact(retval)) { + Py_DECREF(retval); + goto yield_close; + } + else { + return retval; + } + } + else { + goto check_error; + } + } + +yield_close: + PyErr_SetString( + PyExc_RuntimeError, __Pyx_ASYNC_GEN_IGNORED_EXIT_MSG); + return NULL; + +check_error: + if (PyErr_ExceptionMatches(__Pyx_PyExc_StopAsyncIteration)) { + o->agt_state = __PYX_AWAITABLE_STATE_CLOSED; + if (o->agt_args == NULL) { + // when aclose() is called we don't want to propagate + // StopAsyncIteration; just raise StopIteration, signalling + // that 'aclose()' is done. + PyErr_Clear(); + PyErr_SetNone(PyExc_StopIteration); + } + } + else if (PyErr_ExceptionMatches(PyExc_GeneratorExit)) { + o->agt_state = __PYX_AWAITABLE_STATE_CLOSED; + PyErr_Clear(); /* ignore these errors */ + PyErr_SetNone(PyExc_StopIteration); + } + return NULL; +} + + +static PyObject * +__Pyx_async_gen_athrow_throw(__pyx_PyAsyncGenAThrow *o, PyObject *args) +{ + PyObject *retval; + + if (o->agt_state == __PYX_AWAITABLE_STATE_INIT) { + PyErr_SetString(PyExc_RuntimeError, __Pyx_NON_INIT_CORO_MSG); + return NULL; + } + + if (o->agt_state == __PYX_AWAITABLE_STATE_CLOSED) { + PyErr_SetNone(PyExc_StopIteration); + return NULL; + } + + retval = __Pyx_Coroutine_Throw((PyObject*)o->agt_gen, args); + if (o->agt_args) { + return __Pyx_async_gen_unwrap_value(o->agt_gen, retval); + } else { + /* aclose() mode */ + if (retval && __pyx__PyAsyncGenWrappedValue_CheckExact(retval)) { + Py_DECREF(retval); + PyErr_SetString(PyExc_RuntimeError, __Pyx_ASYNC_GEN_IGNORED_EXIT_MSG); + return NULL; + } + return retval; + } +} + + +static PyObject * +__Pyx_async_gen_athrow_iternext(__pyx_PyAsyncGenAThrow *o) +{ + return __Pyx_async_gen_athrow_send(o, Py_None); +} + + +static PyObject * +__Pyx_async_gen_athrow_close(PyObject *g, CYTHON_UNUSED PyObject *args) +{ + __pyx_PyAsyncGenAThrow *o = (__pyx_PyAsyncGenAThrow*) g; + o->agt_state = __PYX_AWAITABLE_STATE_CLOSED; + Py_RETURN_NONE; +} + + +static PyMethodDef __Pyx_async_gen_athrow_methods[] = { + {"send", (PyCFunction)__Pyx_async_gen_athrow_send, METH_O, __Pyx_async_gen_send_doc}, + {"throw", (PyCFunction)__Pyx_async_gen_athrow_throw, METH_VARARGS, __Pyx_async_gen_throw_doc}, + {"close", (PyCFunction)__Pyx_async_gen_athrow_close, METH_NOARGS, __Pyx_async_gen_close_doc}, + {"__await__", (PyCFunction)__Pyx_async_gen_self_method, METH_NOARGS, __Pyx_async_gen_await_doc}, + {0, 0, 0, 0} /* Sentinel */ +}; + + +#if CYTHON_USE_ASYNC_SLOTS +static __Pyx_PyAsyncMethodsStruct __Pyx_async_gen_athrow_as_async = { + PyObject_SelfIter, /* am_await */ + 0, /* am_aiter */ + 0 /* am_anext */ +}; +#endif + + +static PyTypeObject __pyx__PyAsyncGenAThrowType_type = { + PyVarObject_HEAD_INIT(0, 0) + "async_generator_athrow", /* tp_name */ + sizeof(__pyx_PyAsyncGenAThrow), /* tp_basicsize */ + 0, /* tp_itemsize */ + (destructor)__Pyx_async_gen_athrow_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ +#if CYTHON_USE_ASYNC_SLOTS + &__Pyx_async_gen_athrow_as_async, /* tp_as_async */ +#else + 0, /*tp_reserved*/ +#endif + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */ + 0, /* tp_doc */ + (traverseproc)__Pyx_async_gen_athrow_traverse, /* tp_traverse */ + 0, /* tp_clear */ +#if CYTHON_USE_ASYNC_SLOTS && CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 && PY_VERSION_HEX < 0x030500B1 + // in order to (mis-)use tp_reserved above, we must also implement tp_richcompare + __Pyx_Coroutine_compare, /*tp_richcompare*/ +#else + 0, /*tp_richcompare*/ +#endif + 0, /* tp_weaklistoffset */ + PyObject_SelfIter, /* tp_iter */ + (iternextfunc)__Pyx_async_gen_athrow_iternext, /* tp_iternext */ + __Pyx_async_gen_athrow_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + 0, /* tp_alloc */ + 0, /* tp_new */ + 0, /* tp_free */ + 0, /* tp_is_gc */ + 0, /* tp_bases */ + 0, /* tp_mro */ + 0, /* tp_cache */ + 0, /* tp_subclasses */ + 0, /* tp_weaklist */ + 0, /* tp_del */ + 0, /* tp_version_tag */ +#if PY_VERSION_HEX >= 0x030400a1 + 0, /* tp_finalize */ +#endif +#if PY_VERSION_HEX >= 0x030800b1 + 0, /*tp_vectorcall*/ +#endif +#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ +#endif +}; + + +static PyObject * +__Pyx_async_gen_athrow_new(__pyx_PyAsyncGenObject *gen, PyObject *args) +{ + __pyx_PyAsyncGenAThrow *o; + o = PyObject_GC_New(__pyx_PyAsyncGenAThrow, __pyx__PyAsyncGenAThrowType); + if (o == NULL) { + return NULL; + } + o->agt_gen = gen; + o->agt_args = args; + o->agt_state = __PYX_AWAITABLE_STATE_INIT; + Py_INCREF(gen); + Py_XINCREF(args); + PyObject_GC_Track((PyObject*)o); + return (PyObject*)o; +} + + +/* ---------- global type sharing ------------ */ + +static int __pyx_AsyncGen_init(void) { + // on Windows, C-API functions can't be used in slots statically + __pyx_AsyncGenType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; + __pyx__PyAsyncGenWrappedValueType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; + __pyx__PyAsyncGenAThrowType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; + __pyx__PyAsyncGenASendType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; + + __pyx_AsyncGenType = __Pyx_FetchCommonType(&__pyx_AsyncGenType_type); + if (unlikely(!__pyx_AsyncGenType)) + return -1; + + __pyx__PyAsyncGenAThrowType = __Pyx_FetchCommonType(&__pyx__PyAsyncGenAThrowType_type); + if (unlikely(!__pyx__PyAsyncGenAThrowType)) + return -1; + + __pyx__PyAsyncGenWrappedValueType = __Pyx_FetchCommonType(&__pyx__PyAsyncGenWrappedValueType_type); + if (unlikely(!__pyx__PyAsyncGenWrappedValueType)) + return -1; + + __pyx__PyAsyncGenASendType = __Pyx_FetchCommonType(&__pyx__PyAsyncGenASendType_type); + if (unlikely(!__pyx__PyAsyncGenASendType)) + return -1; + + return 0; +} diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/Buffer.c b/venv/lib/python3.8/site-packages/Cython/Utility/Buffer.c new file mode 100644 index 0000000..3c7105f --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/Buffer.c @@ -0,0 +1,921 @@ +/////////////// BufferStructDeclare.proto /////////////// + +/* structs for buffer access */ + +typedef struct { + Py_ssize_t shape, strides, suboffsets; +} __Pyx_Buf_DimInfo; + +typedef struct { + size_t refcount; + Py_buffer pybuffer; +} __Pyx_Buffer; + +typedef struct { + __Pyx_Buffer *rcbuffer; + char *data; + __Pyx_Buf_DimInfo diminfo[{{max_dims}}]; +} __Pyx_LocalBuf_ND; + +/////////////// BufferIndexError.proto /////////////// +static void __Pyx_RaiseBufferIndexError(int axis); /*proto*/ + +/////////////// BufferIndexError /////////////// +static void __Pyx_RaiseBufferIndexError(int axis) { + PyErr_Format(PyExc_IndexError, + "Out of bounds on buffer access (axis %d)", axis); +} + +/////////////// BufferIndexErrorNogil.proto /////////////// +//@requires: BufferIndexError + +static void __Pyx_RaiseBufferIndexErrorNogil(int axis); /*proto*/ + +/////////////// BufferIndexErrorNogil /////////////// +static void __Pyx_RaiseBufferIndexErrorNogil(int axis) { + #ifdef WITH_THREAD + PyGILState_STATE gilstate = PyGILState_Ensure(); + #endif + __Pyx_RaiseBufferIndexError(axis); + #ifdef WITH_THREAD + PyGILState_Release(gilstate); + #endif +} + +/////////////// BufferFallbackError.proto /////////////// +static void __Pyx_RaiseBufferFallbackError(void); /*proto*/ + +/////////////// BufferFallbackError /////////////// +static void __Pyx_RaiseBufferFallbackError(void) { + PyErr_SetString(PyExc_ValueError, + "Buffer acquisition failed on assignment; and then reacquiring the old buffer failed too!"); +} + +/////////////// BufferFormatStructs.proto /////////////// +//@proto_block: utility_code_proto_before_types + +#define IS_UNSIGNED(type) (((type) -1) > 0) + +/* Run-time type information about structs used with buffers */ +struct __Pyx_StructField_; + +#define __PYX_BUF_FLAGS_PACKED_STRUCT (1 << 0) + +typedef struct { + const char* name; /* for error messages only */ + struct __Pyx_StructField_* fields; + size_t size; /* sizeof(type) */ + size_t arraysize[8]; /* length of array in each dimension */ + int ndim; + char typegroup; /* _R_eal, _C_omplex, Signed _I_nt, _U_nsigned int, _S_truct, _P_ointer, _O_bject, c_H_ar */ + char is_unsigned; + int flags; +} __Pyx_TypeInfo; + +typedef struct __Pyx_StructField_ { + __Pyx_TypeInfo* type; + const char* name; + size_t offset; +} __Pyx_StructField; + +typedef struct { + __Pyx_StructField* field; + size_t parent_offset; +} __Pyx_BufFmt_StackElem; + +typedef struct { + __Pyx_StructField root; + __Pyx_BufFmt_StackElem* head; + size_t fmt_offset; + size_t new_count, enc_count; + size_t struct_alignment; + int is_complex; + char enc_type; + char new_packmode; + char enc_packmode; + char is_valid_array; +} __Pyx_BufFmt_Context; + + +/////////////// GetAndReleaseBuffer.proto /////////////// + +#if PY_MAJOR_VERSION < 3 + static int __Pyx_GetBuffer(PyObject *obj, Py_buffer *view, int flags); + static void __Pyx_ReleaseBuffer(Py_buffer *view); +#else + #define __Pyx_GetBuffer PyObject_GetBuffer + #define __Pyx_ReleaseBuffer PyBuffer_Release +#endif + +/////////////// GetAndReleaseBuffer /////////////// + +#if PY_MAJOR_VERSION < 3 +static int __Pyx_GetBuffer(PyObject *obj, Py_buffer *view, int flags) { + if (PyObject_CheckBuffer(obj)) return PyObject_GetBuffer(obj, view, flags); + + {{for type_ptr, getbuffer, releasebuffer in types}} + {{if getbuffer}} + if (__Pyx_TypeCheck(obj, {{type_ptr}})) return {{getbuffer}}(obj, view, flags); + {{endif}} + {{endfor}} + + PyErr_Format(PyExc_TypeError, "'%.200s' does not have the buffer interface", Py_TYPE(obj)->tp_name); + return -1; +} + +static void __Pyx_ReleaseBuffer(Py_buffer *view) { + PyObject *obj = view->obj; + if (!obj) return; + + if (PyObject_CheckBuffer(obj)) { + PyBuffer_Release(view); + return; + } + + if ((0)) {} + {{for type_ptr, getbuffer, releasebuffer in types}} + {{if releasebuffer}} + else if (__Pyx_TypeCheck(obj, {{type_ptr}})) {{releasebuffer}}(obj, view); + {{endif}} + {{endfor}} + + view->obj = NULL; + Py_DECREF(obj); +} + +#endif /* PY_MAJOR_VERSION < 3 */ + + +/////////////// BufferGetAndValidate.proto /////////////// + +#define __Pyx_GetBufferAndValidate(buf, obj, dtype, flags, nd, cast, stack) \ + ((obj == Py_None || obj == NULL) ? \ + (__Pyx_ZeroBuffer(buf), 0) : \ + __Pyx__GetBufferAndValidate(buf, obj, dtype, flags, nd, cast, stack)) + +static int __Pyx__GetBufferAndValidate(Py_buffer* buf, PyObject* obj, + __Pyx_TypeInfo* dtype, int flags, int nd, int cast, __Pyx_BufFmt_StackElem* stack); +static void __Pyx_ZeroBuffer(Py_buffer* buf); +static CYTHON_INLINE void __Pyx_SafeReleaseBuffer(Py_buffer* info);/*proto*/ + +static Py_ssize_t __Pyx_minusones[] = { {{ ", ".join(["-1"] * max_dims) }} }; +static Py_ssize_t __Pyx_zeros[] = { {{ ", ".join(["0"] * max_dims) }} }; + + +/////////////// BufferGetAndValidate /////////////// +//@requires: BufferFormatCheck + +static CYTHON_INLINE void __Pyx_SafeReleaseBuffer(Py_buffer* info) { + if (unlikely(info->buf == NULL)) return; + if (info->suboffsets == __Pyx_minusones) info->suboffsets = NULL; + __Pyx_ReleaseBuffer(info); +} + +static void __Pyx_ZeroBuffer(Py_buffer* buf) { + buf->buf = NULL; + buf->obj = NULL; + buf->strides = __Pyx_zeros; + buf->shape = __Pyx_zeros; + buf->suboffsets = __Pyx_minusones; +} + +static int __Pyx__GetBufferAndValidate( + Py_buffer* buf, PyObject* obj, __Pyx_TypeInfo* dtype, int flags, + int nd, int cast, __Pyx_BufFmt_StackElem* stack) +{ + buf->buf = NULL; + if (unlikely(__Pyx_GetBuffer(obj, buf, flags) == -1)) { + __Pyx_ZeroBuffer(buf); + return -1; + } + // From this point on, we have acquired the buffer and must release it on errors. + if (unlikely(buf->ndim != nd)) { + PyErr_Format(PyExc_ValueError, + "Buffer has wrong number of dimensions (expected %d, got %d)", + nd, buf->ndim); + goto fail; + } + if (!cast) { + __Pyx_BufFmt_Context ctx; + __Pyx_BufFmt_Init(&ctx, stack, dtype); + if (!__Pyx_BufFmt_CheckString(&ctx, buf->format)) goto fail; + } + if (unlikely((size_t)buf->itemsize != dtype->size)) { + PyErr_Format(PyExc_ValueError, + "Item size of buffer (%" CYTHON_FORMAT_SSIZE_T "d byte%s) does not match size of '%s' (%" CYTHON_FORMAT_SSIZE_T "d byte%s)", + buf->itemsize, (buf->itemsize > 1) ? "s" : "", + dtype->name, (Py_ssize_t)dtype->size, (dtype->size > 1) ? "s" : ""); + goto fail; + } + if (buf->suboffsets == NULL) buf->suboffsets = __Pyx_minusones; + return 0; +fail:; + __Pyx_SafeReleaseBuffer(buf); + return -1; +} + + +/////////////// BufferFormatCheck.proto /////////////// + +// Buffer format string checking +// +// Buffer type checking. Utility code for checking that acquired +// buffers match our assumptions. We only need to check ndim and +// the format string; the access mode/flags is checked by the +// exporter. See: +// +// http://docs.python.org/3/library/struct.html +// http://legacy.python.org/dev/peps/pep-3118/#additions-to-the-struct-string-syntax +// +// The alignment code is copied from _struct.c in Python. + +static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const char* ts); +static void __Pyx_BufFmt_Init(__Pyx_BufFmt_Context* ctx, + __Pyx_BufFmt_StackElem* stack, + __Pyx_TypeInfo* type); /*proto*/ + +/////////////// BufferFormatCheck /////////////// +//@requires: ModuleSetupCode.c::IsLittleEndian +//@requires: BufferFormatStructs + +static void __Pyx_BufFmt_Init(__Pyx_BufFmt_Context* ctx, + __Pyx_BufFmt_StackElem* stack, + __Pyx_TypeInfo* type) { + stack[0].field = &ctx->root; + stack[0].parent_offset = 0; + ctx->root.type = type; + ctx->root.name = "buffer dtype"; + ctx->root.offset = 0; + ctx->head = stack; + ctx->head->field = &ctx->root; + ctx->fmt_offset = 0; + ctx->head->parent_offset = 0; + ctx->new_packmode = '@'; + ctx->enc_packmode = '@'; + ctx->new_count = 1; + ctx->enc_count = 0; + ctx->enc_type = 0; + ctx->is_complex = 0; + ctx->is_valid_array = 0; + ctx->struct_alignment = 0; + while (type->typegroup == 'S') { + ++ctx->head; + ctx->head->field = type->fields; + ctx->head->parent_offset = 0; + type = type->fields->type; + } +} + +static int __Pyx_BufFmt_ParseNumber(const char** ts) { + int count; + const char* t = *ts; + if (*t < '0' || *t > '9') { + return -1; + } else { + count = *t++ - '0'; + while (*t >= '0' && *t <= '9') { + count *= 10; + count += *t++ - '0'; + } + } + *ts = t; + return count; +} + +static int __Pyx_BufFmt_ExpectNumber(const char **ts) { + int number = __Pyx_BufFmt_ParseNumber(ts); + if (number == -1) /* First char was not a digit */ + PyErr_Format(PyExc_ValueError,\ + "Does not understand character buffer dtype format string ('%c')", **ts); + return number; +} + + +static void __Pyx_BufFmt_RaiseUnexpectedChar(char ch) { + PyErr_Format(PyExc_ValueError, + "Unexpected format string character: '%c'", ch); +} + +static const char* __Pyx_BufFmt_DescribeTypeChar(char ch, int is_complex) { + switch (ch) { + case '?': return "'bool'"; + case 'c': return "'char'"; + case 'b': return "'signed char'"; + case 'B': return "'unsigned char'"; + case 'h': return "'short'"; + case 'H': return "'unsigned short'"; + case 'i': return "'int'"; + case 'I': return "'unsigned int'"; + case 'l': return "'long'"; + case 'L': return "'unsigned long'"; + case 'q': return "'long long'"; + case 'Q': return "'unsigned long long'"; + case 'f': return (is_complex ? "'complex float'" : "'float'"); + case 'd': return (is_complex ? "'complex double'" : "'double'"); + case 'g': return (is_complex ? "'complex long double'" : "'long double'"); + case 'T': return "a struct"; + case 'O': return "Python object"; + case 'P': return "a pointer"; + case 's': case 'p': return "a string"; + case 0: return "end"; + default: return "unparseable format string"; + } +} + +static size_t __Pyx_BufFmt_TypeCharToStandardSize(char ch, int is_complex) { + switch (ch) { + case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case 'h': case 'H': return 2; + case 'i': case 'I': case 'l': case 'L': return 4; + case 'q': case 'Q': return 8; + case 'f': return (is_complex ? 8 : 4); + case 'd': return (is_complex ? 16 : 8); + case 'g': { + PyErr_SetString(PyExc_ValueError, "Python does not define a standard format string size for long double ('g').."); + return 0; + } + case 'O': case 'P': return sizeof(void*); + default: + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } +} + +static size_t __Pyx_BufFmt_TypeCharToNativeSize(char ch, int is_complex) { + switch (ch) { + case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case 'h': case 'H': return sizeof(short); + case 'i': case 'I': return sizeof(int); + case 'l': case 'L': return sizeof(long); + #ifdef HAVE_LONG_LONG + case 'q': case 'Q': return sizeof(PY_LONG_LONG); + #endif + case 'f': return sizeof(float) * (is_complex ? 2 : 1); + case 'd': return sizeof(double) * (is_complex ? 2 : 1); + case 'g': return sizeof(long double) * (is_complex ? 2 : 1); + case 'O': case 'P': return sizeof(void*); + default: { + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } + } +} + +typedef struct { char c; short x; } __Pyx_st_short; +typedef struct { char c; int x; } __Pyx_st_int; +typedef struct { char c; long x; } __Pyx_st_long; +typedef struct { char c; float x; } __Pyx_st_float; +typedef struct { char c; double x; } __Pyx_st_double; +typedef struct { char c; long double x; } __Pyx_st_longdouble; +typedef struct { char c; void *x; } __Pyx_st_void_p; +#ifdef HAVE_LONG_LONG +typedef struct { char c; PY_LONG_LONG x; } __Pyx_st_longlong; +#endif + +static size_t __Pyx_BufFmt_TypeCharToAlignment(char ch, CYTHON_UNUSED int is_complex) { + switch (ch) { + case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case 'h': case 'H': return sizeof(__Pyx_st_short) - sizeof(short); + case 'i': case 'I': return sizeof(__Pyx_st_int) - sizeof(int); + case 'l': case 'L': return sizeof(__Pyx_st_long) - sizeof(long); +#ifdef HAVE_LONG_LONG + case 'q': case 'Q': return sizeof(__Pyx_st_longlong) - sizeof(PY_LONG_LONG); +#endif + case 'f': return sizeof(__Pyx_st_float) - sizeof(float); + case 'd': return sizeof(__Pyx_st_double) - sizeof(double); + case 'g': return sizeof(__Pyx_st_longdouble) - sizeof(long double); + case 'P': case 'O': return sizeof(__Pyx_st_void_p) - sizeof(void*); + default: + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } +} + +/* These are for computing the padding at the end of the struct to align + on the first member of the struct. This will probably the same as above, + but we don't have any guarantees. + */ +typedef struct { short x; char c; } __Pyx_pad_short; +typedef struct { int x; char c; } __Pyx_pad_int; +typedef struct { long x; char c; } __Pyx_pad_long; +typedef struct { float x; char c; } __Pyx_pad_float; +typedef struct { double x; char c; } __Pyx_pad_double; +typedef struct { long double x; char c; } __Pyx_pad_longdouble; +typedef struct { void *x; char c; } __Pyx_pad_void_p; +#ifdef HAVE_LONG_LONG +typedef struct { PY_LONG_LONG x; char c; } __Pyx_pad_longlong; +#endif + +static size_t __Pyx_BufFmt_TypeCharToPadding(char ch, CYTHON_UNUSED int is_complex) { + switch (ch) { + case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case 'h': case 'H': return sizeof(__Pyx_pad_short) - sizeof(short); + case 'i': case 'I': return sizeof(__Pyx_pad_int) - sizeof(int); + case 'l': case 'L': return sizeof(__Pyx_pad_long) - sizeof(long); +#ifdef HAVE_LONG_LONG + case 'q': case 'Q': return sizeof(__Pyx_pad_longlong) - sizeof(PY_LONG_LONG); +#endif + case 'f': return sizeof(__Pyx_pad_float) - sizeof(float); + case 'd': return sizeof(__Pyx_pad_double) - sizeof(double); + case 'g': return sizeof(__Pyx_pad_longdouble) - sizeof(long double); + case 'P': case 'O': return sizeof(__Pyx_pad_void_p) - sizeof(void*); + default: + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } +} + +static char __Pyx_BufFmt_TypeCharToGroup(char ch, int is_complex) { + switch (ch) { + case 'c': + return 'H'; + case 'b': case 'h': case 'i': + case 'l': case 'q': case 's': case 'p': + return 'I'; + case '?': case 'B': case 'H': case 'I': case 'L': case 'Q': + return 'U'; + case 'f': case 'd': case 'g': + return (is_complex ? 'C' : 'R'); + case 'O': + return 'O'; + case 'P': + return 'P'; + default: { + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } + } +} + + +static void __Pyx_BufFmt_RaiseExpected(__Pyx_BufFmt_Context* ctx) { + if (ctx->head == NULL || ctx->head->field == &ctx->root) { + const char* expected; + const char* quote; + if (ctx->head == NULL) { + expected = "end"; + quote = ""; + } else { + expected = ctx->head->field->type->name; + quote = "'"; + } + PyErr_Format(PyExc_ValueError, + "Buffer dtype mismatch, expected %s%s%s but got %s", + quote, expected, quote, + __Pyx_BufFmt_DescribeTypeChar(ctx->enc_type, ctx->is_complex)); + } else { + __Pyx_StructField* field = ctx->head->field; + __Pyx_StructField* parent = (ctx->head - 1)->field; + PyErr_Format(PyExc_ValueError, + "Buffer dtype mismatch, expected '%s' but got %s in '%s.%s'", + field->type->name, __Pyx_BufFmt_DescribeTypeChar(ctx->enc_type, ctx->is_complex), + parent->type->name, field->name); + } +} + +static int __Pyx_BufFmt_ProcessTypeChunk(__Pyx_BufFmt_Context* ctx) { + char group; + size_t size, offset, arraysize = 1; + + /* printf("processing... %s\n", ctx->head->field->type->name); */ + + if (ctx->enc_type == 0) return 0; + + /* Validate array size */ + if (ctx->head->field->type->arraysize[0]) { + int i, ndim = 0; + + /* handle strings ('s' and 'p') */ + if (ctx->enc_type == 's' || ctx->enc_type == 'p') { + ctx->is_valid_array = ctx->head->field->type->ndim == 1; + ndim = 1; + if (ctx->enc_count != ctx->head->field->type->arraysize[0]) { + PyErr_Format(PyExc_ValueError, + "Expected a dimension of size %zu, got %zu", + ctx->head->field->type->arraysize[0], ctx->enc_count); + return -1; + } + } + + if (!ctx->is_valid_array) { + PyErr_Format(PyExc_ValueError, "Expected %d dimensions, got %d", + ctx->head->field->type->ndim, ndim); + return -1; + } + for (i = 0; i < ctx->head->field->type->ndim; i++) { + arraysize *= ctx->head->field->type->arraysize[i]; + } + ctx->is_valid_array = 0; + ctx->enc_count = 1; + } + + group = __Pyx_BufFmt_TypeCharToGroup(ctx->enc_type, ctx->is_complex); + do { + __Pyx_StructField* field = ctx->head->field; + __Pyx_TypeInfo* type = field->type; + + if (ctx->enc_packmode == '@' || ctx->enc_packmode == '^') { + size = __Pyx_BufFmt_TypeCharToNativeSize(ctx->enc_type, ctx->is_complex); + } else { + size = __Pyx_BufFmt_TypeCharToStandardSize(ctx->enc_type, ctx->is_complex); + } + + if (ctx->enc_packmode == '@') { + size_t align_at = __Pyx_BufFmt_TypeCharToAlignment(ctx->enc_type, ctx->is_complex); + size_t align_mod_offset; + if (align_at == 0) return -1; + align_mod_offset = ctx->fmt_offset % align_at; + if (align_mod_offset > 0) ctx->fmt_offset += align_at - align_mod_offset; + + if (ctx->struct_alignment == 0) + ctx->struct_alignment = __Pyx_BufFmt_TypeCharToPadding(ctx->enc_type, + ctx->is_complex); + } + + if (type->size != size || type->typegroup != group) { + if (type->typegroup == 'C' && type->fields != NULL) { + /* special case -- treat as struct rather than complex number */ + size_t parent_offset = ctx->head->parent_offset + field->offset; + ++ctx->head; + ctx->head->field = type->fields; + ctx->head->parent_offset = parent_offset; + continue; + } + + if ((type->typegroup == 'H' || group == 'H') && type->size == size) { + /* special case -- chars don't care about sign */ + } else { + __Pyx_BufFmt_RaiseExpected(ctx); + return -1; + } + } + + offset = ctx->head->parent_offset + field->offset; + if (ctx->fmt_offset != offset) { + PyErr_Format(PyExc_ValueError, + "Buffer dtype mismatch; next field is at offset %" CYTHON_FORMAT_SSIZE_T "d but %" CYTHON_FORMAT_SSIZE_T "d expected", + (Py_ssize_t)ctx->fmt_offset, (Py_ssize_t)offset); + return -1; + } + + ctx->fmt_offset += size; + if (arraysize) + ctx->fmt_offset += (arraysize - 1) * size; + + --ctx->enc_count; /* Consume from buffer string */ + + /* Done checking, move to next field, pushing or popping struct stack if needed */ + while (1) { + if (field == &ctx->root) { + ctx->head = NULL; + if (ctx->enc_count != 0) { + __Pyx_BufFmt_RaiseExpected(ctx); + return -1; + } + break; /* breaks both loops as ctx->enc_count == 0 */ + } + ctx->head->field = ++field; + if (field->type == NULL) { + --ctx->head; + field = ctx->head->field; + continue; + } else if (field->type->typegroup == 'S') { + size_t parent_offset = ctx->head->parent_offset + field->offset; + if (field->type->fields->type == NULL) continue; /* empty struct */ + field = field->type->fields; + ++ctx->head; + ctx->head->field = field; + ctx->head->parent_offset = parent_offset; + break; + } else { + break; + } + } + } while (ctx->enc_count); + ctx->enc_type = 0; + ctx->is_complex = 0; + return 0; +} + +/* Parse an array in the format string (e.g. (1,2,3)) */ +static PyObject * +__pyx_buffmt_parse_array(__Pyx_BufFmt_Context* ctx, const char** tsp) +{ + const char *ts = *tsp; + int i = 0, number, ndim; + + ++ts; + if (ctx->new_count != 1) { + PyErr_SetString(PyExc_ValueError, + "Cannot handle repeated arrays in format string"); + return NULL; + } + + /* Process the previous element */ + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + + // store ndim now, as field advanced by __Pyx_BufFmt_ProcessTypeChunk call + ndim = ctx->head->field->type->ndim; + + /* Parse all numbers in the format string */ + while (*ts && *ts != ')') { + // ignore space characters (not using isspace() due to C/C++ problem on MacOS-X) + switch (*ts) { + case ' ': case '\f': case '\r': case '\n': case '\t': case '\v': continue; + default: break; /* not a 'break' in the loop */ + } + + number = __Pyx_BufFmt_ExpectNumber(&ts); + if (number == -1) return NULL; + + if (i < ndim && (size_t) number != ctx->head->field->type->arraysize[i]) + return PyErr_Format(PyExc_ValueError, + "Expected a dimension of size %zu, got %d", + ctx->head->field->type->arraysize[i], number); + + if (*ts != ',' && *ts != ')') + return PyErr_Format(PyExc_ValueError, + "Expected a comma in format string, got '%c'", *ts); + + if (*ts == ',') ts++; + i++; + } + + if (i != ndim) + return PyErr_Format(PyExc_ValueError, "Expected %d dimension(s), got %d", + ctx->head->field->type->ndim, i); + + if (!*ts) { + PyErr_SetString(PyExc_ValueError, + "Unexpected end of format string, expected ')'"); + return NULL; + } + + ctx->is_valid_array = 1; + ctx->new_count = 1; + *tsp = ++ts; + return Py_None; +} + +static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const char* ts) { + int got_Z = 0; + + while (1) { + /* puts(ts); */ + switch(*ts) { + case 0: + if (ctx->enc_type != 0 && ctx->head == NULL) { + __Pyx_BufFmt_RaiseExpected(ctx); + return NULL; + } + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + if (ctx->head != NULL) { + __Pyx_BufFmt_RaiseExpected(ctx); + return NULL; + } + return ts; + case ' ': + case '\r': + case '\n': + ++ts; + break; + case '<': + if (!__Pyx_Is_Little_Endian()) { + PyErr_SetString(PyExc_ValueError, "Little-endian buffer not supported on big-endian compiler"); + return NULL; + } + ctx->new_packmode = '='; + ++ts; + break; + case '>': + case '!': + if (__Pyx_Is_Little_Endian()) { + PyErr_SetString(PyExc_ValueError, "Big-endian buffer not supported on little-endian compiler"); + return NULL; + } + ctx->new_packmode = '='; + ++ts; + break; + case '=': + case '@': + case '^': + ctx->new_packmode = *ts++; + break; + case 'T': /* substruct */ + { + const char* ts_after_sub; + size_t i, struct_count = ctx->new_count; + size_t struct_alignment = ctx->struct_alignment; + ctx->new_count = 1; + ++ts; + if (*ts != '{') { + PyErr_SetString(PyExc_ValueError, "Buffer acquisition: Expected '{' after 'T'"); + return NULL; + } + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + ctx->enc_type = 0; /* Erase processed last struct element */ + ctx->enc_count = 0; + ctx->struct_alignment = 0; + ++ts; + ts_after_sub = ts; + for (i = 0; i != struct_count; ++i) { + ts_after_sub = __Pyx_BufFmt_CheckString(ctx, ts); + if (!ts_after_sub) return NULL; + } + ts = ts_after_sub; + if (struct_alignment) ctx->struct_alignment = struct_alignment; + } + break; + case '}': /* end of substruct; either repeat or move on */ + { + size_t alignment = ctx->struct_alignment; + ++ts; + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + ctx->enc_type = 0; /* Erase processed last struct element */ + if (alignment && ctx->fmt_offset % alignment) { + /* Pad struct on size of the first member */ + ctx->fmt_offset += alignment - (ctx->fmt_offset % alignment); + } + } + return ts; + case 'x': + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + ctx->fmt_offset += ctx->new_count; + ctx->new_count = 1; + ctx->enc_count = 0; + ctx->enc_type = 0; + ctx->enc_packmode = ctx->new_packmode; + ++ts; + break; + case 'Z': + got_Z = 1; + ++ts; + if (*ts != 'f' && *ts != 'd' && *ts != 'g') { + __Pyx_BufFmt_RaiseUnexpectedChar('Z'); + return NULL; + } + CYTHON_FALLTHROUGH; + case '?': case 'c': case 'b': case 'B': case 'h': case 'H': case 'i': case 'I': + case 'l': case 'L': case 'q': case 'Q': + case 'f': case 'd': case 'g': + case 'O': case 'p': + if ((ctx->enc_type == *ts) && (got_Z == ctx->is_complex) && + (ctx->enc_packmode == ctx->new_packmode) && (!ctx->is_valid_array)) { + /* Continue pooling same type */ + ctx->enc_count += ctx->new_count; + ctx->new_count = 1; + got_Z = 0; + ++ts; + break; + } + CYTHON_FALLTHROUGH; + case 's': + /* 's' or new type (cannot be added to current pool) */ + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + ctx->enc_count = ctx->new_count; + ctx->enc_packmode = ctx->new_packmode; + ctx->enc_type = *ts; + ctx->is_complex = got_Z; + ++ts; + ctx->new_count = 1; + got_Z = 0; + break; + case ':': + ++ts; + while(*ts != ':') ++ts; + ++ts; + break; + case '(': + if (!__pyx_buffmt_parse_array(ctx, &ts)) return NULL; + break; + default: + { + int number = __Pyx_BufFmt_ExpectNumber(&ts); + if (number == -1) return NULL; + ctx->new_count = (size_t)number; + } + } + } +} + +/////////////// TypeInfoCompare.proto /////////////// +static int __pyx_typeinfo_cmp(__Pyx_TypeInfo *a, __Pyx_TypeInfo *b); + +/////////////// TypeInfoCompare /////////////// +//@requires: BufferFormatStructs + +// See if two dtypes are equal +static int +__pyx_typeinfo_cmp(__Pyx_TypeInfo *a, __Pyx_TypeInfo *b) +{ + int i; + + if (!a || !b) + return 0; + + if (a == b) + return 1; + + if (a->size != b->size || a->typegroup != b->typegroup || + a->is_unsigned != b->is_unsigned || a->ndim != b->ndim) { + if (a->typegroup == 'H' || b->typegroup == 'H') { + /* Special case for chars */ + return a->size == b->size; + } else { + return 0; + } + } + + if (a->ndim) { + /* Verify multidimensional C arrays */ + for (i = 0; i < a->ndim; i++) + if (a->arraysize[i] != b->arraysize[i]) + return 0; + } + + if (a->typegroup == 'S') { + /* Check for packed struct */ + if (a->flags != b->flags) + return 0; + + /* compare all struct fields */ + if (a->fields || b->fields) { + /* Check if both have fields */ + if (!(a->fields && b->fields)) + return 0; + + /* compare */ + for (i = 0; a->fields[i].type && b->fields[i].type; i++) { + __Pyx_StructField *field_a = a->fields + i; + __Pyx_StructField *field_b = b->fields + i; + + if (field_a->offset != field_b->offset || + !__pyx_typeinfo_cmp(field_a->type, field_b->type)) + return 0; + } + + /* If all fields are processed, we have a match */ + return !a->fields[i].type && !b->fields[i].type; + } + } + + return 1; +} + + +/////////////// TypeInfoToFormat.proto /////////////// +struct __pyx_typeinfo_string { + char string[3]; +}; +static struct __pyx_typeinfo_string __Pyx_TypeInfoToFormat(__Pyx_TypeInfo *type); + +/////////////// TypeInfoToFormat /////////////// +//@requires: BufferFormatStructs + +// See also MemoryView.pyx:BufferFormatFromTypeInfo + +static struct __pyx_typeinfo_string __Pyx_TypeInfoToFormat(__Pyx_TypeInfo *type) { + struct __pyx_typeinfo_string result = { {0} }; + char *buf = (char *) result.string; + size_t size = type->size; + + switch (type->typegroup) { + case 'H': + *buf = 'c'; + break; + case 'I': + case 'U': + if (size == 1) + *buf = (type->is_unsigned) ? 'B' : 'b'; + else if (size == 2) + *buf = (type->is_unsigned) ? 'H' : 'h'; + else if (size == 4) + *buf = (type->is_unsigned) ? 'I' : 'i'; + else if (size == 8) + *buf = (type->is_unsigned) ? 'Q' : 'q'; + break; + case 'P': + *buf = 'P'; + break; + case 'C': + { + __Pyx_TypeInfo complex_type = *type; + complex_type.typegroup = 'R'; + complex_type.size /= 2; + + *buf++ = 'Z'; + *buf = __Pyx_TypeInfoToFormat(&complex_type).string[0]; + break; + } + case 'R': + if (size == 4) + *buf = 'f'; + else if (size == 8) + *buf = 'd'; + else + *buf = 'g'; + break; + } + + return result; +} diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/Builtins.c b/venv/lib/python3.8/site-packages/Cython/Utility/Builtins.c new file mode 100644 index 0000000..27a5512 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/Builtins.c @@ -0,0 +1,538 @@ +/* + * Special implementations of built-in functions and methods. + * + * Optional optimisations for builtins are in Optimize.c. + * + * General object operations and protocols are in ObjectHandling.c. + */ + +//////////////////// Globals.proto //////////////////// + +static PyObject* __Pyx_Globals(void); /*proto*/ + +//////////////////// Globals //////////////////// +//@substitute: naming +//@requires: ObjectHandling.c::GetAttr + +// This is a stub implementation until we have something more complete. +// Currently, we only handle the most common case of a read-only dict +// of Python names. Supporting cdef names in the module and write +// access requires a rewrite as a dedicated class. + +static PyObject* __Pyx_Globals(void) { + Py_ssize_t i; + PyObject *names; + PyObject *globals = $moddict_cname; + Py_INCREF(globals); + names = PyObject_Dir($module_cname); + if (!names) + goto bad; + for (i = PyList_GET_SIZE(names)-1; i >= 0; i--) { +#if CYTHON_COMPILING_IN_PYPY + PyObject* name = PySequence_ITEM(names, i); + if (!name) + goto bad; +#else + PyObject* name = PyList_GET_ITEM(names, i); +#endif + if (!PyDict_Contains(globals, name)) { + PyObject* value = __Pyx_GetAttr($module_cname, name); + if (!value) { +#if CYTHON_COMPILING_IN_PYPY + Py_DECREF(name); +#endif + goto bad; + } + if (PyDict_SetItem(globals, name, value) < 0) { +#if CYTHON_COMPILING_IN_PYPY + Py_DECREF(name); +#endif + Py_DECREF(value); + goto bad; + } + } +#if CYTHON_COMPILING_IN_PYPY + Py_DECREF(name); +#endif + } + Py_DECREF(names); + return globals; +bad: + Py_XDECREF(names); + Py_XDECREF(globals); + return NULL; +} + +//////////////////// PyExecGlobals.proto //////////////////// + +static PyObject* __Pyx_PyExecGlobals(PyObject*); + +//////////////////// PyExecGlobals //////////////////// +//@requires: Globals +//@requires: PyExec + +static PyObject* __Pyx_PyExecGlobals(PyObject* code) { + PyObject* result; + PyObject* globals = __Pyx_Globals(); + if (unlikely(!globals)) + return NULL; + result = __Pyx_PyExec2(code, globals); + Py_DECREF(globals); + return result; +} + +//////////////////// PyExec.proto //////////////////// + +static PyObject* __Pyx_PyExec3(PyObject*, PyObject*, PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyExec2(PyObject*, PyObject*); + +//////////////////// PyExec //////////////////// +//@substitute: naming + +static CYTHON_INLINE PyObject* __Pyx_PyExec2(PyObject* o, PyObject* globals) { + return __Pyx_PyExec3(o, globals, NULL); +} + +static PyObject* __Pyx_PyExec3(PyObject* o, PyObject* globals, PyObject* locals) { + PyObject* result; + PyObject* s = 0; + char *code = 0; + + if (!globals || globals == Py_None) { + globals = $moddict_cname; + } else if (!PyDict_Check(globals)) { + PyErr_Format(PyExc_TypeError, "exec() arg 2 must be a dict, not %.200s", + Py_TYPE(globals)->tp_name); + goto bad; + } + if (!locals || locals == Py_None) { + locals = globals; + } + + if (__Pyx_PyDict_GetItemStr(globals, PYIDENT("__builtins__")) == NULL) { + if (PyDict_SetItem(globals, PYIDENT("__builtins__"), PyEval_GetBuiltins()) < 0) + goto bad; + } + + if (PyCode_Check(o)) { + if (__Pyx_PyCode_HasFreeVars((PyCodeObject *)o)) { + PyErr_SetString(PyExc_TypeError, + "code object passed to exec() may not contain free variables"); + goto bad; + } + #if CYTHON_COMPILING_IN_PYPY || PY_VERSION_HEX < 0x030200B1 + result = PyEval_EvalCode((PyCodeObject *)o, globals, locals); + #else + result = PyEval_EvalCode(o, globals, locals); + #endif + } else { + PyCompilerFlags cf; + cf.cf_flags = 0; + if (PyUnicode_Check(o)) { + cf.cf_flags = PyCF_SOURCE_IS_UTF8; + s = PyUnicode_AsUTF8String(o); + if (!s) goto bad; + o = s; + #if PY_MAJOR_VERSION >= 3 + } else if (!PyBytes_Check(o)) { + #else + } else if (!PyString_Check(o)) { + #endif + PyErr_Format(PyExc_TypeError, + "exec: arg 1 must be string, bytes or code object, got %.200s", + Py_TYPE(o)->tp_name); + goto bad; + } + #if PY_MAJOR_VERSION >= 3 + code = PyBytes_AS_STRING(o); + #else + code = PyString_AS_STRING(o); + #endif + if (PyEval_MergeCompilerFlags(&cf)) { + result = PyRun_StringFlags(code, Py_file_input, globals, locals, &cf); + } else { + result = PyRun_String(code, Py_file_input, globals, locals); + } + Py_XDECREF(s); + } + + return result; +bad: + Py_XDECREF(s); + return 0; +} + +//////////////////// GetAttr3.proto //////////////////// + +static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); /*proto*/ + +//////////////////// GetAttr3 //////////////////// +//@requires: ObjectHandling.c::GetAttr +//@requires: Exceptions.c::PyThreadStateGet +//@requires: Exceptions.c::PyErrFetchRestore +//@requires: Exceptions.c::PyErrExceptionMatches + +static PyObject *__Pyx_GetAttr3Default(PyObject *d) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) + return NULL; + __Pyx_PyErr_Clear(); + Py_INCREF(d); + return d; +} + +static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) { + PyObject *r = __Pyx_GetAttr(o, n); + return (likely(r)) ? r : __Pyx_GetAttr3Default(d); +} + +//////////////////// HasAttr.proto //////////////////// + +static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); /*proto*/ + +//////////////////// HasAttr //////////////////// +//@requires: ObjectHandling.c::GetAttr + +static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) { + PyObject *r; + if (unlikely(!__Pyx_PyBaseString_Check(n))) { + PyErr_SetString(PyExc_TypeError, + "hasattr(): attribute name must be string"); + return -1; + } + r = __Pyx_GetAttr(o, n); + if (unlikely(!r)) { + PyErr_Clear(); + return 0; + } else { + Py_DECREF(r); + return 1; + } +} + +//////////////////// Intern.proto //////////////////// + +static PyObject* __Pyx_Intern(PyObject* s); /* proto */ + +//////////////////// Intern //////////////////// + +static PyObject* __Pyx_Intern(PyObject* s) { + if (!(likely(PyString_CheckExact(s)))) { + PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(s)->tp_name); + return 0; + } + Py_INCREF(s); + #if PY_MAJOR_VERSION >= 3 + PyUnicode_InternInPlace(&s); + #else + PyString_InternInPlace(&s); + #endif + return s; +} + +//////////////////// abs_longlong.proto //////////////////// + +static CYTHON_INLINE PY_LONG_LONG __Pyx_abs_longlong(PY_LONG_LONG x) { +#if defined (__cplusplus) && __cplusplus >= 201103L + return std::abs(x); +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + return llabs(x); +#elif defined (_MSC_VER) + // abs() is defined for long, but 64-bits type on MSVC is long long. + // Use MS-specific _abs64() instead, which returns the original (negative) value for abs(-MAX-1) + return _abs64(x); +#elif defined (__GNUC__) + // gcc or clang on 64 bit windows. + return __builtin_llabs(x); +#else + if (sizeof(PY_LONG_LONG) <= sizeof(Py_ssize_t)) + return __Pyx_sst_abs(x); + return (x<0) ? -x : x; +#endif +} + + +//////////////////// py_abs.proto //////////////////// + +#if CYTHON_USE_PYLONG_INTERNALS +static PyObject *__Pyx_PyLong_AbsNeg(PyObject *num);/*proto*/ + +#define __Pyx_PyNumber_Absolute(x) \ + ((likely(PyLong_CheckExact(x))) ? \ + (likely(Py_SIZE(x) >= 0) ? (Py_INCREF(x), (x)) : __Pyx_PyLong_AbsNeg(x)) : \ + PyNumber_Absolute(x)) + +#else +#define __Pyx_PyNumber_Absolute(x) PyNumber_Absolute(x) +#endif + +//////////////////// py_abs //////////////////// + +#if CYTHON_USE_PYLONG_INTERNALS +static PyObject *__Pyx_PyLong_AbsNeg(PyObject *n) { + if (likely(Py_SIZE(n) == -1)) { + // digits are unsigned + return PyLong_FromLong(((PyLongObject*)n)->ob_digit[0]); + } +#if CYTHON_COMPILING_IN_CPYTHON + { + PyObject *copy = _PyLong_Copy((PyLongObject*)n); + if (likely(copy)) { + Py_SIZE(copy) = -(Py_SIZE(copy)); + } + return copy; + } +#else + return PyNumber_Negative(n); +#endif +} +#endif + + +//////////////////// pow2.proto //////////////////// + +#define __Pyx_PyNumber_Power2(a, b) PyNumber_Power(a, b, Py_None) + + +//////////////////// object_ord.proto //////////////////// +//@requires: TypeConversion.c::UnicodeAsUCS4 + +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyObject_Ord(c) \ + (likely(PyUnicode_Check(c)) ? (long)__Pyx_PyUnicode_AsPy_UCS4(c) : __Pyx__PyObject_Ord(c)) +#else +#define __Pyx_PyObject_Ord(c) __Pyx__PyObject_Ord(c) +#endif +static long __Pyx__PyObject_Ord(PyObject* c); /*proto*/ + +//////////////////// object_ord //////////////////// + +static long __Pyx__PyObject_Ord(PyObject* c) { + Py_ssize_t size; + if (PyBytes_Check(c)) { + size = PyBytes_GET_SIZE(c); + if (likely(size == 1)) { + return (unsigned char) PyBytes_AS_STRING(c)[0]; + } +#if PY_MAJOR_VERSION < 3 + } else if (PyUnicode_Check(c)) { + return (long)__Pyx_PyUnicode_AsPy_UCS4(c); +#endif +#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + } else if (PyByteArray_Check(c)) { + size = PyByteArray_GET_SIZE(c); + if (likely(size == 1)) { + return (unsigned char) PyByteArray_AS_STRING(c)[0]; + } +#endif + } else { + // FIXME: support character buffers - but CPython doesn't support them either + PyErr_Format(PyExc_TypeError, + "ord() expected string of length 1, but %.200s found", c->ob_type->tp_name); + return (long)(Py_UCS4)-1; + } + PyErr_Format(PyExc_TypeError, + "ord() expected a character, but string of length %zd found", size); + return (long)(Py_UCS4)-1; +} + + +//////////////////// py_dict_keys.proto //////////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyDict_Keys(PyObject* d); /*proto*/ + +//////////////////// py_dict_keys //////////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyDict_Keys(PyObject* d) { + if (PY_MAJOR_VERSION >= 3) + return CALL_UNBOUND_METHOD(PyDict_Type, "keys", d); + else + return PyDict_Keys(d); +} + +//////////////////// py_dict_values.proto //////////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyDict_Values(PyObject* d); /*proto*/ + +//////////////////// py_dict_values //////////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyDict_Values(PyObject* d) { + if (PY_MAJOR_VERSION >= 3) + return CALL_UNBOUND_METHOD(PyDict_Type, "values", d); + else + return PyDict_Values(d); +} + +//////////////////// py_dict_items.proto //////////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyDict_Items(PyObject* d); /*proto*/ + +//////////////////// py_dict_items //////////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyDict_Items(PyObject* d) { + if (PY_MAJOR_VERSION >= 3) + return CALL_UNBOUND_METHOD(PyDict_Type, "items", d); + else + return PyDict_Items(d); +} + +//////////////////// py_dict_iterkeys.proto //////////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyDict_IterKeys(PyObject* d); /*proto*/ + +//////////////////// py_dict_iterkeys //////////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyDict_IterKeys(PyObject* d) { + if (PY_MAJOR_VERSION >= 3) + return CALL_UNBOUND_METHOD(PyDict_Type, "keys", d); + else + return CALL_UNBOUND_METHOD(PyDict_Type, "iterkeys", d); +} + +//////////////////// py_dict_itervalues.proto //////////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyDict_IterValues(PyObject* d); /*proto*/ + +//////////////////// py_dict_itervalues //////////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyDict_IterValues(PyObject* d) { + if (PY_MAJOR_VERSION >= 3) + return CALL_UNBOUND_METHOD(PyDict_Type, "values", d); + else + return CALL_UNBOUND_METHOD(PyDict_Type, "itervalues", d); +} + +//////////////////// py_dict_iteritems.proto //////////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyDict_IterItems(PyObject* d); /*proto*/ + +//////////////////// py_dict_iteritems //////////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyDict_IterItems(PyObject* d) { + if (PY_MAJOR_VERSION >= 3) + return CALL_UNBOUND_METHOD(PyDict_Type, "items", d); + else + return CALL_UNBOUND_METHOD(PyDict_Type, "iteritems", d); +} + +//////////////////// py_dict_viewkeys.proto //////////////////// + +#if PY_VERSION_HEX < 0x02070000 +#error This module uses dict views, which require Python 2.7 or later +#endif +static CYTHON_INLINE PyObject* __Pyx_PyDict_ViewKeys(PyObject* d); /*proto*/ + +//////////////////// py_dict_viewkeys //////////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyDict_ViewKeys(PyObject* d) { + if (PY_MAJOR_VERSION >= 3) + return CALL_UNBOUND_METHOD(PyDict_Type, "keys", d); + else + return CALL_UNBOUND_METHOD(PyDict_Type, "viewkeys", d); +} + +//////////////////// py_dict_viewvalues.proto //////////////////// + +#if PY_VERSION_HEX < 0x02070000 +#error This module uses dict views, which require Python 2.7 or later +#endif +static CYTHON_INLINE PyObject* __Pyx_PyDict_ViewValues(PyObject* d); /*proto*/ + +//////////////////// py_dict_viewvalues //////////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyDict_ViewValues(PyObject* d) { + if (PY_MAJOR_VERSION >= 3) + return CALL_UNBOUND_METHOD(PyDict_Type, "values", d); + else + return CALL_UNBOUND_METHOD(PyDict_Type, "viewvalues", d); +} + +//////////////////// py_dict_viewitems.proto //////////////////// + +#if PY_VERSION_HEX < 0x02070000 +#error This module uses dict views, which require Python 2.7 or later +#endif +static CYTHON_INLINE PyObject* __Pyx_PyDict_ViewItems(PyObject* d); /*proto*/ + +//////////////////// py_dict_viewitems //////////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyDict_ViewItems(PyObject* d) { + if (PY_MAJOR_VERSION >= 3) + return CALL_UNBOUND_METHOD(PyDict_Type, "items", d); + else + return CALL_UNBOUND_METHOD(PyDict_Type, "viewitems", d); +} + + +//////////////////// pyfrozenset_new.proto //////////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyFrozenSet_New(PyObject* it); + +//////////////////// pyfrozenset_new //////////////////// +//@substitute: naming + +static CYTHON_INLINE PyObject* __Pyx_PyFrozenSet_New(PyObject* it) { + if (it) { + PyObject* result; +#if CYTHON_COMPILING_IN_PYPY + // PyPy currently lacks PyFrozenSet_CheckExact() and PyFrozenSet_New() + PyObject* args; + args = PyTuple_Pack(1, it); + if (unlikely(!args)) + return NULL; + result = PyObject_Call((PyObject*)&PyFrozenSet_Type, args, NULL); + Py_DECREF(args); + return result; +#else + if (PyFrozenSet_CheckExact(it)) { + Py_INCREF(it); + return it; + } + result = PyFrozenSet_New(it); + if (unlikely(!result)) + return NULL; + if (likely(PySet_GET_SIZE(result))) + return result; + // empty frozenset is a singleton + // seems wasteful, but CPython does the same + Py_DECREF(result); +#endif + } +#if CYTHON_USE_TYPE_SLOTS + return PyFrozenSet_Type.tp_new(&PyFrozenSet_Type, $empty_tuple, NULL); +#else + return PyObject_Call((PyObject*)&PyFrozenSet_Type, $empty_tuple, NULL); +#endif +} + + +//////////////////// PySet_Update.proto //////////////////// + +static CYTHON_INLINE int __Pyx_PySet_Update(PyObject* set, PyObject* it); /*proto*/ + +//////////////////// PySet_Update //////////////////// + +static CYTHON_INLINE int __Pyx_PySet_Update(PyObject* set, PyObject* it) { + PyObject *retval; + #if CYTHON_USE_TYPE_SLOTS && !CYTHON_COMPILING_IN_PYPY + if (PyAnySet_Check(it)) { + if (PySet_GET_SIZE(it) == 0) + return 0; + // fast and safe case: CPython will update our result set and return it + retval = PySet_Type.tp_as_number->nb_inplace_or(set, it); + if (likely(retval == set)) { + Py_DECREF(retval); + return 0; + } + if (unlikely(!retval)) + return -1; + // unusual result, fall through to set.update() call below + Py_DECREF(retval); + } + #endif + retval = CALL_UNBOUND_METHOD(PySet_Type, "update", set, it); + if (unlikely(!retval)) return -1; + Py_DECREF(retval); + return 0; +} diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/CConvert.pyx b/venv/lib/python3.8/site-packages/Cython/Utility/CConvert.pyx new file mode 100644 index 0000000..5969f6a --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/CConvert.pyx @@ -0,0 +1,132 @@ +#################### FromPyStructUtility #################### + +cdef extern from *: + ctypedef struct PyTypeObject: + char* tp_name + PyTypeObject *Py_TYPE(obj) + bint PyMapping_Check(obj) + object PyErr_Format(exc, const char *format, ...) + +@cname("{{funcname}}") +cdef {{struct_type}} {{funcname}}(obj) except *: + cdef {{struct_type}} result + if not PyMapping_Check(obj): + PyErr_Format(TypeError, b"Expected %.16s, got %.200s", b"a mapping", Py_TYPE(obj).tp_name) + + {{for member in var_entries:}} + try: + value = obj['{{member.name}}'] + except KeyError: + raise ValueError("No value specified for struct attribute '{{member.name}}'") + result.{{member.cname}} = value + {{endfor}} + return result + + +#################### FromPyUnionUtility #################### + +cdef extern from *: + ctypedef struct PyTypeObject: + char* tp_name + PyTypeObject *Py_TYPE(obj) + bint PyMapping_Check(obj) + object PyErr_Format(exc, const char *format, ...) + +@cname("{{funcname}}") +cdef {{struct_type}} {{funcname}}(obj) except *: + cdef {{struct_type}} result + cdef Py_ssize_t length + if not PyMapping_Check(obj): + PyErr_Format(TypeError, b"Expected %.16s, got %.200s", b"a mapping", Py_TYPE(obj).tp_name) + + last_found = None + length = len(obj) + if length: + {{for member in var_entries:}} + if '{{member.name}}' in obj: + if last_found is not None: + raise ValueError("More than one union attribute passed: '%s' and '%s'" % (last_found, '{{member.name}}')) + last_found = '{{member.name}}' + result.{{member.cname}} = obj['{{member.name}}'] + length -= 1 + if not length: + return result + {{endfor}} + if last_found is None: + raise ValueError("No value specified for any of the union attributes (%s)" % + '{{", ".join(member.name for member in var_entries)}}') + return result + + +#################### cfunc.to_py #################### + +@cname("{{cname}}") +cdef object {{cname}}({{return_type.ctype}} (*f)({{ ', '.join(arg.type_cname for arg in args) }}) {{except_clause}}): + def wrap({{ ', '.join('{arg.ctype} {arg.name}'.format(arg=arg) for arg in args) }}): + """wrap({{', '.join(('{arg.name}: {arg.type_displayname}'.format(arg=arg) if arg.type_displayname else arg.name) for arg in args)}}){{if return_type.type_displayname}} -> {{return_type.type_displayname}}{{endif}}""" + {{'' if return_type.type.is_void else 'return '}}f({{ ', '.join(arg.name for arg in args) }}) + return wrap + + +#################### carray.from_py #################### + +cdef extern from *: + object PyErr_Format(exc, const char *format, ...) + +@cname("{{cname}}") +cdef int {{cname}}(object o, {{base_type}} *v, Py_ssize_t length) except -1: + cdef Py_ssize_t i = length + try: + i = len(o) + except (TypeError, OverflowError): + pass + if i == length: + for i, item in enumerate(o): + if i >= length: + break + v[i] = item + else: + i += 1 # convert index to length + if i == length: + return 0 + + PyErr_Format( + IndexError, + ("too many values found during array assignment, expected %zd" + if i >= length else + "not enough values found during array assignment, expected %zd, got %zd"), + length, i) + + +#################### carray.to_py #################### + +cdef extern from *: + void Py_INCREF(object o) + tuple PyTuple_New(Py_ssize_t size) + list PyList_New(Py_ssize_t size) + void PyTuple_SET_ITEM(object p, Py_ssize_t pos, object o) + void PyList_SET_ITEM(object p, Py_ssize_t pos, object o) + + +@cname("{{cname}}") +cdef inline list {{cname}}({{base_type}} *v, Py_ssize_t length): + cdef size_t i + cdef object value + l = PyList_New(length) + for i in range(length): + value = v[i] + Py_INCREF(value) + PyList_SET_ITEM(l, i, value) + return l + + +@cname("{{to_tuple_cname}}") +cdef inline tuple {{to_tuple_cname}}({{base_type}} *v, Py_ssize_t length): + cdef size_t i + cdef object value + t = PyTuple_New(length) + for i in range(length): + value = v[i] + Py_INCREF(value) + PyTuple_SET_ITEM(t, i, value) + return t diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/CMath.c b/venv/lib/python3.8/site-packages/Cython/Utility/CMath.c new file mode 100644 index 0000000..2cd2223 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/CMath.c @@ -0,0 +1,95 @@ + +/////////////// CDivisionWarning.proto /////////////// + +static int __Pyx_cdivision_warning(const char *, int); /* proto */ + +/////////////// CDivisionWarning /////////////// + +static int __Pyx_cdivision_warning(const char *filename, int lineno) { +#if CYTHON_COMPILING_IN_PYPY + // avoid compiler warnings + filename++; lineno++; + return PyErr_Warn(PyExc_RuntimeWarning, + "division with oppositely signed operands, C and Python semantics differ"); +#else + return PyErr_WarnExplicit(PyExc_RuntimeWarning, + "division with oppositely signed operands, C and Python semantics differ", + filename, + lineno, + __Pyx_MODULE_NAME, + NULL); +#endif +} + + +/////////////// DivInt.proto /////////////// + +static CYTHON_INLINE %(type)s __Pyx_div_%(type_name)s(%(type)s, %(type)s); /* proto */ + +/////////////// DivInt /////////////// + +static CYTHON_INLINE %(type)s __Pyx_div_%(type_name)s(%(type)s a, %(type)s b) { + %(type)s q = a / b; + %(type)s r = a - q*b; + q -= ((r != 0) & ((r ^ b) < 0)); + return q; +} + + +/////////////// ModInt.proto /////////////// + +static CYTHON_INLINE %(type)s __Pyx_mod_%(type_name)s(%(type)s, %(type)s); /* proto */ + +/////////////// ModInt /////////////// + +static CYTHON_INLINE %(type)s __Pyx_mod_%(type_name)s(%(type)s a, %(type)s b) { + %(type)s r = a %% b; + r += ((r != 0) & ((r ^ b) < 0)) * b; + return r; +} + + +/////////////// ModFloat.proto /////////////// + +static CYTHON_INLINE %(type)s __Pyx_mod_%(type_name)s(%(type)s, %(type)s); /* proto */ + +/////////////// ModFloat /////////////// + +static CYTHON_INLINE %(type)s __Pyx_mod_%(type_name)s(%(type)s a, %(type)s b) { + %(type)s r = fmod%(math_h_modifier)s(a, b); + r += ((r != 0) & ((r < 0) ^ (b < 0))) * b; + return r; +} + + +/////////////// IntPow.proto /////////////// + +static CYTHON_INLINE %(type)s %(func_name)s(%(type)s, %(type)s); /* proto */ + +/////////////// IntPow /////////////// + +static CYTHON_INLINE %(type)s %(func_name)s(%(type)s b, %(type)s e) { + %(type)s t = b; + switch (e) { + case 3: + t *= b; + CYTHON_FALLTHROUGH; + case 2: + t *= b; + CYTHON_FALLTHROUGH; + case 1: + return t; + case 0: + return 1; + } + #if %(signed)s + if (unlikely(e<0)) return 0; + #endif + t = 1; + while (likely(e)) { + t *= (b * (e&1)) | ((~e)&1); /* 1 or b */ + b *= b; + e >>= 1; + } + return t; +} diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/Capsule.c b/venv/lib/python3.8/site-packages/Cython/Utility/Capsule.c new file mode 100644 index 0000000..cc4fe0d --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/Capsule.c @@ -0,0 +1,20 @@ +//////////////// Capsule.proto //////////////// + +/* Todo: wrap the rest of the functionality in similar functions */ +static CYTHON_INLINE PyObject *__pyx_capsule_create(void *p, const char *sig); + +//////////////// Capsule //////////////// + +static CYTHON_INLINE PyObject * +__pyx_capsule_create(void *p, CYTHON_UNUSED const char *sig) +{ + PyObject *cobj; + +#if PY_VERSION_HEX >= 0x02070000 + cobj = PyCapsule_New(p, sig, NULL); +#else + cobj = PyCObject_FromVoidPtr(p, NULL); +#endif + + return cobj; +} diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/CommonStructures.c b/venv/lib/python3.8/site-packages/Cython/Utility/CommonStructures.c new file mode 100644 index 0000000..c7945fe --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/CommonStructures.c @@ -0,0 +1,86 @@ +/////////////// FetchCommonType.proto /////////////// + +static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type); + +/////////////// FetchCommonType /////////////// + +static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type) { + PyObject* fake_module; + PyTypeObject* cached_type = NULL; + + fake_module = PyImport_AddModule((char*) "_cython_" CYTHON_ABI); + if (!fake_module) return NULL; + Py_INCREF(fake_module); + + cached_type = (PyTypeObject*) PyObject_GetAttrString(fake_module, type->tp_name); + if (cached_type) { + if (!PyType_Check((PyObject*)cached_type)) { + PyErr_Format(PyExc_TypeError, + "Shared Cython type %.200s is not a type object", + type->tp_name); + goto bad; + } + if (cached_type->tp_basicsize != type->tp_basicsize) { + PyErr_Format(PyExc_TypeError, + "Shared Cython type %.200s has the wrong size, try recompiling", + type->tp_name); + goto bad; + } + } else { + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; + PyErr_Clear(); + if (PyType_Ready(type) < 0) goto bad; + if (PyObject_SetAttrString(fake_module, type->tp_name, (PyObject*) type) < 0) + goto bad; + Py_INCREF(type); + cached_type = type; + } + +done: + Py_DECREF(fake_module); + // NOTE: always returns owned reference, or NULL on error + return cached_type; + +bad: + Py_XDECREF(cached_type); + cached_type = NULL; + goto done; +} + + +/////////////// FetchCommonPointer.proto /////////////// + +static void* __Pyx_FetchCommonPointer(void* pointer, const char* name); + +/////////////// FetchCommonPointer /////////////// + + +static void* __Pyx_FetchCommonPointer(void* pointer, const char* name) { +#if PY_VERSION_HEX >= 0x02070000 + PyObject* fake_module = NULL; + PyObject* capsule = NULL; + void* value = NULL; + + fake_module = PyImport_AddModule((char*) "_cython_" CYTHON_ABI); + if (!fake_module) return NULL; + Py_INCREF(fake_module); + + capsule = PyObject_GetAttrString(fake_module, name); + if (!capsule) { + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; + PyErr_Clear(); + capsule = PyCapsule_New(pointer, name, NULL); + if (!capsule) goto bad; + if (PyObject_SetAttrString(fake_module, name, capsule) < 0) + goto bad; + } + value = PyCapsule_GetPointer(capsule, name); + +bad: + Py_XDECREF(capsule); + Py_DECREF(fake_module); + return value; +#else + return pointer; +#endif +} diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/Complex.c b/venv/lib/python3.8/site-packages/Cython/Utility/Complex.c new file mode 100644 index 0000000..28062a0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/Complex.c @@ -0,0 +1,291 @@ +/////////////// Header.proto /////////////// +//@proto_block: h_code + +#if !defined(CYTHON_CCOMPLEX) + #if defined(__cplusplus) + #define CYTHON_CCOMPLEX 1 + #elif defined(_Complex_I) + #define CYTHON_CCOMPLEX 1 + #else + #define CYTHON_CCOMPLEX 0 + #endif +#endif + +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + #include + #else + #include + #endif +#endif + +#if CYTHON_CCOMPLEX && !defined(__cplusplus) && defined(__sun__) && defined(__GNUC__) + #undef _Complex_I + #define _Complex_I 1.0fj +#endif + + +/////////////// RealImag.proto /////////////// + +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + #define __Pyx_CREAL(z) ((z).real()) + #define __Pyx_CIMAG(z) ((z).imag()) + #else + #define __Pyx_CREAL(z) (__real__(z)) + #define __Pyx_CIMAG(z) (__imag__(z)) + #endif +#else + #define __Pyx_CREAL(z) ((z).real) + #define __Pyx_CIMAG(z) ((z).imag) +#endif + +#if defined(__cplusplus) && CYTHON_CCOMPLEX \ + && (defined(_WIN32) || defined(__clang__) || (defined(__GNUC__) && (__GNUC__ >= 5 || __GNUC__ == 4 && __GNUC_MINOR__ >= 4 )) || __cplusplus >= 201103) + #define __Pyx_SET_CREAL(z,x) ((z).real(x)) + #define __Pyx_SET_CIMAG(z,y) ((z).imag(y)) +#else + #define __Pyx_SET_CREAL(z,x) __Pyx_CREAL(z) = (x) + #define __Pyx_SET_CIMAG(z,y) __Pyx_CIMAG(z) = (y) +#endif + + +/////////////// Declarations.proto /////////////// +//@proto_block: complex_type_declarations + +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + typedef ::std::complex< {{real_type}} > {{type_name}}; + #else + typedef {{real_type}} _Complex {{type_name}}; + #endif +#else + typedef struct { {{real_type}} real, imag; } {{type_name}}; +#endif + +static CYTHON_INLINE {{type}} {{type_name}}_from_parts({{real_type}}, {{real_type}}); + +/////////////// Declarations /////////////// + +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + static CYTHON_INLINE {{type}} {{type_name}}_from_parts({{real_type}} x, {{real_type}} y) { + return ::std::complex< {{real_type}} >(x, y); + } + #else + static CYTHON_INLINE {{type}} {{type_name}}_from_parts({{real_type}} x, {{real_type}} y) { + return x + y*({{type}})_Complex_I; + } + #endif +#else + static CYTHON_INLINE {{type}} {{type_name}}_from_parts({{real_type}} x, {{real_type}} y) { + {{type}} z; + z.real = x; + z.imag = y; + return z; + } +#endif + + +/////////////// ToPy.proto /////////////// + +#define __pyx_PyComplex_FromComplex(z) \ + PyComplex_FromDoubles((double)__Pyx_CREAL(z), \ + (double)__Pyx_CIMAG(z)) + + +/////////////// FromPy.proto /////////////// + +static {{type}} __Pyx_PyComplex_As_{{type_name}}(PyObject*); + +/////////////// FromPy /////////////// + +static {{type}} __Pyx_PyComplex_As_{{type_name}}(PyObject* o) { + Py_complex cval; +#if !CYTHON_COMPILING_IN_PYPY + if (PyComplex_CheckExact(o)) + cval = ((PyComplexObject *)o)->cval; + else +#endif + cval = PyComplex_AsCComplex(o); + return {{type_name}}_from_parts( + ({{real_type}})cval.real, + ({{real_type}})cval.imag); +} + + +/////////////// Arithmetic.proto /////////////// + +#if CYTHON_CCOMPLEX + #define __Pyx_c_eq{{func_suffix}}(a, b) ((a)==(b)) + #define __Pyx_c_sum{{func_suffix}}(a, b) ((a)+(b)) + #define __Pyx_c_diff{{func_suffix}}(a, b) ((a)-(b)) + #define __Pyx_c_prod{{func_suffix}}(a, b) ((a)*(b)) + #define __Pyx_c_quot{{func_suffix}}(a, b) ((a)/(b)) + #define __Pyx_c_neg{{func_suffix}}(a) (-(a)) + #ifdef __cplusplus + #define __Pyx_c_is_zero{{func_suffix}}(z) ((z)==({{real_type}})0) + #define __Pyx_c_conj{{func_suffix}}(z) (::std::conj(z)) + #if {{is_float}} + #define __Pyx_c_abs{{func_suffix}}(z) (::std::abs(z)) + #define __Pyx_c_pow{{func_suffix}}(a, b) (::std::pow(a, b)) + #endif + #else + #define __Pyx_c_is_zero{{func_suffix}}(z) ((z)==0) + #define __Pyx_c_conj{{func_suffix}}(z) (conj{{m}}(z)) + #if {{is_float}} + #define __Pyx_c_abs{{func_suffix}}(z) (cabs{{m}}(z)) + #define __Pyx_c_pow{{func_suffix}}(a, b) (cpow{{m}}(a, b)) + #endif + #endif +#else + static CYTHON_INLINE int __Pyx_c_eq{{func_suffix}}({{type}}, {{type}}); + static CYTHON_INLINE {{type}} __Pyx_c_sum{{func_suffix}}({{type}}, {{type}}); + static CYTHON_INLINE {{type}} __Pyx_c_diff{{func_suffix}}({{type}}, {{type}}); + static CYTHON_INLINE {{type}} __Pyx_c_prod{{func_suffix}}({{type}}, {{type}}); + static CYTHON_INLINE {{type}} __Pyx_c_quot{{func_suffix}}({{type}}, {{type}}); + static CYTHON_INLINE {{type}} __Pyx_c_neg{{func_suffix}}({{type}}); + static CYTHON_INLINE int __Pyx_c_is_zero{{func_suffix}}({{type}}); + static CYTHON_INLINE {{type}} __Pyx_c_conj{{func_suffix}}({{type}}); + #if {{is_float}} + static CYTHON_INLINE {{real_type}} __Pyx_c_abs{{func_suffix}}({{type}}); + static CYTHON_INLINE {{type}} __Pyx_c_pow{{func_suffix}}({{type}}, {{type}}); + #endif +#endif + +/////////////// Arithmetic /////////////// + +#if CYTHON_CCOMPLEX +#else + static CYTHON_INLINE int __Pyx_c_eq{{func_suffix}}({{type}} a, {{type}} b) { + return (a.real == b.real) && (a.imag == b.imag); + } + static CYTHON_INLINE {{type}} __Pyx_c_sum{{func_suffix}}({{type}} a, {{type}} b) { + {{type}} z; + z.real = a.real + b.real; + z.imag = a.imag + b.imag; + return z; + } + static CYTHON_INLINE {{type}} __Pyx_c_diff{{func_suffix}}({{type}} a, {{type}} b) { + {{type}} z; + z.real = a.real - b.real; + z.imag = a.imag - b.imag; + return z; + } + static CYTHON_INLINE {{type}} __Pyx_c_prod{{func_suffix}}({{type}} a, {{type}} b) { + {{type}} z; + z.real = a.real * b.real - a.imag * b.imag; + z.imag = a.real * b.imag + a.imag * b.real; + return z; + } + + #if {{is_float}} + static CYTHON_INLINE {{type}} __Pyx_c_quot{{func_suffix}}({{type}} a, {{type}} b) { + if (b.imag == 0) { + return {{type_name}}_from_parts(a.real / b.real, a.imag / b.real); + } else if (fabs{{m}}(b.real) >= fabs{{m}}(b.imag)) { + if (b.real == 0 && b.imag == 0) { + return {{type_name}}_from_parts(a.real / b.real, a.imag / b.imag); + } else { + {{real_type}} r = b.imag / b.real; + {{real_type}} s = ({{real_type}})(1.0) / (b.real + b.imag * r); + return {{type_name}}_from_parts( + (a.real + a.imag * r) * s, (a.imag - a.real * r) * s); + } + } else { + {{real_type}} r = b.real / b.imag; + {{real_type}} s = ({{real_type}})(1.0) / (b.imag + b.real * r); + return {{type_name}}_from_parts( + (a.real * r + a.imag) * s, (a.imag * r - a.real) * s); + } + } + #else + static CYTHON_INLINE {{type}} __Pyx_c_quot{{func_suffix}}({{type}} a, {{type}} b) { + if (b.imag == 0) { + return {{type_name}}_from_parts(a.real / b.real, a.imag / b.real); + } else { + {{real_type}} denom = b.real * b.real + b.imag * b.imag; + return {{type_name}}_from_parts( + (a.real * b.real + a.imag * b.imag) / denom, + (a.imag * b.real - a.real * b.imag) / denom); + } + } + #endif + + static CYTHON_INLINE {{type}} __Pyx_c_neg{{func_suffix}}({{type}} a) { + {{type}} z; + z.real = -a.real; + z.imag = -a.imag; + return z; + } + static CYTHON_INLINE int __Pyx_c_is_zero{{func_suffix}}({{type}} a) { + return (a.real == 0) && (a.imag == 0); + } + static CYTHON_INLINE {{type}} __Pyx_c_conj{{func_suffix}}({{type}} a) { + {{type}} z; + z.real = a.real; + z.imag = -a.imag; + return z; + } + #if {{is_float}} + static CYTHON_INLINE {{real_type}} __Pyx_c_abs{{func_suffix}}({{type}} z) { + #if !defined(HAVE_HYPOT) || defined(_MSC_VER) + return sqrt{{m}}(z.real*z.real + z.imag*z.imag); + #else + return hypot{{m}}(z.real, z.imag); + #endif + } + static CYTHON_INLINE {{type}} __Pyx_c_pow{{func_suffix}}({{type}} a, {{type}} b) { + {{type}} z; + {{real_type}} r, lnr, theta, z_r, z_theta; + if (b.imag == 0 && b.real == (int)b.real) { + if (b.real < 0) { + {{real_type}} denom = a.real * a.real + a.imag * a.imag; + a.real = a.real / denom; + a.imag = -a.imag / denom; + b.real = -b.real; + } + switch ((int)b.real) { + case 0: + z.real = 1; + z.imag = 0; + return z; + case 1: + return a; + case 2: + return __Pyx_c_prod{{func_suffix}}(a, a); + case 3: + z = __Pyx_c_prod{{func_suffix}}(a, a); + return __Pyx_c_prod{{func_suffix}}(z, a); + case 4: + z = __Pyx_c_prod{{func_suffix}}(a, a); + return __Pyx_c_prod{{func_suffix}}(z, z); + } + } + if (a.imag == 0) { + if (a.real == 0) { + return a; + } else if (b.imag == 0) { + z.real = pow{{m}}(a.real, b.real); + z.imag = 0; + return z; + } else if (a.real > 0) { + r = a.real; + theta = 0; + } else { + r = -a.real; + theta = atan2{{m}}(0.0, -1.0); + } + } else { + r = __Pyx_c_abs{{func_suffix}}(a); + theta = atan2{{m}}(a.imag, a.real); + } + lnr = log{{m}}(r); + z_r = exp{{m}}(lnr * b.real - theta * b.imag); + z_theta = theta * b.real + lnr * b.imag; + z.real = z_r * cos{{m}}(z_theta); + z.imag = z_r * sin{{m}}(z_theta); + return z; + } + #endif +#endif diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/Coroutine.c b/venv/lib/python3.8/site-packages/Cython/Utility/Coroutine.c new file mode 100644 index 0000000..abdf4f3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/Coroutine.c @@ -0,0 +1,2312 @@ +//////////////////// GeneratorYieldFrom.proto //////////////////// + +static CYTHON_INLINE PyObject* __Pyx_Generator_Yield_From(__pyx_CoroutineObject *gen, PyObject *source); + +//////////////////// GeneratorYieldFrom //////////////////// +//@requires: Generator + +static void __PyxPyIter_CheckErrorAndDecref(PyObject *source) { + PyErr_Format(PyExc_TypeError, + "iter() returned non-iterator of type '%.100s'", + Py_TYPE(source)->tp_name); + Py_DECREF(source); +} + +static CYTHON_INLINE PyObject* __Pyx_Generator_Yield_From(__pyx_CoroutineObject *gen, PyObject *source) { + PyObject *source_gen, *retval; +#ifdef __Pyx_Coroutine_USED + if (__Pyx_Coroutine_Check(source)) { + // TODO: this should only happen for types.coroutine()ed generators, but we can't determine that here + Py_INCREF(source); + source_gen = source; + retval = __Pyx_Generator_Next(source); + } else +#endif + { +#if CYTHON_USE_TYPE_SLOTS + if (likely(Py_TYPE(source)->tp_iter)) { + source_gen = Py_TYPE(source)->tp_iter(source); + if (unlikely(!source_gen)) + return NULL; + if (unlikely(!PyIter_Check(source_gen))) { + __PyxPyIter_CheckErrorAndDecref(source_gen); + return NULL; + } + } else + // CPython also allows non-iterable sequences to be iterated over +#endif + { + source_gen = PyObject_GetIter(source); + if (unlikely(!source_gen)) + return NULL; + } + // source_gen is now the iterator, make the first next() call +#if CYTHON_USE_TYPE_SLOTS + retval = Py_TYPE(source_gen)->tp_iternext(source_gen); +#else + retval = PyIter_Next(source_gen); +#endif + } + if (likely(retval)) { + gen->yieldfrom = source_gen; + return retval; + } + Py_DECREF(source_gen); + return NULL; +} + + +//////////////////// CoroutineYieldFrom.proto //////////////////// + +static CYTHON_INLINE PyObject* __Pyx_Coroutine_Yield_From(__pyx_CoroutineObject *gen, PyObject *source); + +//////////////////// CoroutineYieldFrom //////////////////// +//@requires: Coroutine +//@requires: GetAwaitIter + +static PyObject* __Pyx__Coroutine_Yield_From_Generic(__pyx_CoroutineObject *gen, PyObject *source) { + PyObject *retval; + PyObject *source_gen = __Pyx__Coroutine_GetAwaitableIter(source); + if (unlikely(!source_gen)) { + return NULL; + } + // source_gen is now the iterator, make the first next() call + if (__Pyx_Coroutine_Check(source_gen)) { + retval = __Pyx_Generator_Next(source_gen); + } else { +#if CYTHON_USE_TYPE_SLOTS + retval = Py_TYPE(source_gen)->tp_iternext(source_gen); +#else + retval = PyIter_Next(source_gen); +#endif + } + if (retval) { + gen->yieldfrom = source_gen; + return retval; + } + Py_DECREF(source_gen); + return NULL; +} + +static CYTHON_INLINE PyObject* __Pyx_Coroutine_Yield_From(__pyx_CoroutineObject *gen, PyObject *source) { + PyObject *retval; + if (__Pyx_Coroutine_Check(source)) { + if (unlikely(((__pyx_CoroutineObject*)source)->yieldfrom)) { + PyErr_SetString( + PyExc_RuntimeError, + "coroutine is being awaited already"); + return NULL; + } + retval = __Pyx_Generator_Next(source); +#ifdef __Pyx_AsyncGen_USED + // inlined "__pyx_PyAsyncGenASend" handling to avoid the series of generic calls + } else if (__pyx_PyAsyncGenASend_CheckExact(source)) { + retval = __Pyx_async_gen_asend_iternext(source); +#endif + } else { + return __Pyx__Coroutine_Yield_From_Generic(gen, source); + } + if (retval) { + Py_INCREF(source); + gen->yieldfrom = source; + } + return retval; +} + + +//////////////////// GetAwaitIter.proto //////////////////// + +static CYTHON_INLINE PyObject *__Pyx_Coroutine_GetAwaitableIter(PyObject *o); /*proto*/ +static PyObject *__Pyx__Coroutine_GetAwaitableIter(PyObject *o); /*proto*/ + +//////////////////// GetAwaitIter //////////////////// +//@requires: ObjectHandling.c::PyObjectGetMethod +//@requires: ObjectHandling.c::PyObjectCallNoArg +//@requires: ObjectHandling.c::PyObjectCallOneArg + +static CYTHON_INLINE PyObject *__Pyx_Coroutine_GetAwaitableIter(PyObject *o) { +#ifdef __Pyx_Coroutine_USED + if (__Pyx_Coroutine_Check(o)) { + return __Pyx_NewRef(o); + } +#endif + return __Pyx__Coroutine_GetAwaitableIter(o); +} + + +static void __Pyx_Coroutine_AwaitableIterError(PyObject *source) { +#if PY_VERSION_HEX >= 0x030600B3 || defined(_PyErr_FormatFromCause) + _PyErr_FormatFromCause( + PyExc_TypeError, + "'async for' received an invalid object " + "from __anext__: %.100s", + Py_TYPE(source)->tp_name); +#elif PY_MAJOR_VERSION >= 3 + PyObject *exc, *val, *val2, *tb; + assert(PyErr_Occurred()); + PyErr_Fetch(&exc, &val, &tb); + PyErr_NormalizeException(&exc, &val, &tb); + if (tb != NULL) { + PyException_SetTraceback(val, tb); + Py_DECREF(tb); + } + Py_DECREF(exc); + assert(!PyErr_Occurred()); + PyErr_Format( + PyExc_TypeError, + "'async for' received an invalid object " + "from __anext__: %.100s", + Py_TYPE(source)->tp_name); + + PyErr_Fetch(&exc, &val2, &tb); + PyErr_NormalizeException(&exc, &val2, &tb); + Py_INCREF(val); + PyException_SetCause(val2, val); + PyException_SetContext(val2, val); + PyErr_Restore(exc, val2, tb); +#else + // since Py2 does not have exception chaining, it's better to avoid shadowing exceptions there + source++; +#endif +} + +// adapted from genobject.c in Py3.5 +static PyObject *__Pyx__Coroutine_GetAwaitableIter(PyObject *obj) { + PyObject *res; +#if CYTHON_USE_ASYNC_SLOTS + __Pyx_PyAsyncMethodsStruct* am = __Pyx_PyType_AsAsync(obj); + if (likely(am && am->am_await)) { + res = (*am->am_await)(obj); + } else +#endif +#if PY_VERSION_HEX >= 0x030500B2 || defined(PyCoro_CheckExact) + if (PyCoro_CheckExact(obj)) { + return __Pyx_NewRef(obj); + } else +#endif +#if CYTHON_COMPILING_IN_CPYTHON && defined(CO_ITERABLE_COROUTINE) + if (PyGen_CheckExact(obj) && ((PyGenObject*)obj)->gi_code && ((PyCodeObject *)((PyGenObject*)obj)->gi_code)->co_flags & CO_ITERABLE_COROUTINE) { + // Python generator marked with "@types.coroutine" decorator + return __Pyx_NewRef(obj); + } else +#endif + { + PyObject *method = NULL; + int is_method = __Pyx_PyObject_GetMethod(obj, PYIDENT("__await__"), &method); + if (likely(is_method)) { + res = __Pyx_PyObject_CallOneArg(method, obj); + } else if (likely(method)) { + res = __Pyx_PyObject_CallNoArg(method); + } else + goto slot_error; + Py_DECREF(method); + } + if (unlikely(!res)) { + // surprisingly, CPython replaces the exception here... + __Pyx_Coroutine_AwaitableIterError(obj); + goto bad; + } + if (unlikely(!PyIter_Check(res))) { + PyErr_Format(PyExc_TypeError, + "__await__() returned non-iterator of type '%.100s'", + Py_TYPE(res)->tp_name); + Py_CLEAR(res); + } else { + int is_coroutine = 0; + #ifdef __Pyx_Coroutine_USED + is_coroutine |= __Pyx_Coroutine_Check(res); + #endif + #if PY_VERSION_HEX >= 0x030500B2 || defined(PyCoro_CheckExact) + is_coroutine |= PyCoro_CheckExact(res); + #endif + if (unlikely(is_coroutine)) { + /* __await__ must return an *iterator*, not + a coroutine or another awaitable (see PEP 492) */ + PyErr_SetString(PyExc_TypeError, + "__await__() returned a coroutine"); + Py_CLEAR(res); + } + } + return res; +slot_error: + PyErr_Format(PyExc_TypeError, + "object %.100s can't be used in 'await' expression", + Py_TYPE(obj)->tp_name); +bad: + return NULL; +} + + +//////////////////// AsyncIter.proto //////////////////// + +static CYTHON_INLINE PyObject *__Pyx_Coroutine_GetAsyncIter(PyObject *o); /*proto*/ +static CYTHON_INLINE PyObject *__Pyx_Coroutine_AsyncIterNext(PyObject *o); /*proto*/ + +//////////////////// AsyncIter //////////////////// +//@requires: GetAwaitIter +//@requires: ObjectHandling.c::PyObjectCallMethod0 + +static PyObject *__Pyx_Coroutine_GetAsyncIter_Generic(PyObject *obj) { +#if PY_VERSION_HEX < 0x030500B1 + { + PyObject *iter = __Pyx_PyObject_CallMethod0(obj, PYIDENT("__aiter__")); + if (likely(iter)) + return iter; + // FIXME: for the sake of a nicely conforming exception message, assume any AttributeError meant '__aiter__' + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) + return NULL; + } +#else + // avoid C warning about 'unused function' + if ((0)) (void) __Pyx_PyObject_CallMethod0(obj, PYIDENT("__aiter__")); +#endif + + PyErr_Format(PyExc_TypeError, "'async for' requires an object with __aiter__ method, got %.100s", + Py_TYPE(obj)->tp_name); + return NULL; +} + + +static CYTHON_INLINE PyObject *__Pyx_Coroutine_GetAsyncIter(PyObject *obj) { +#ifdef __Pyx_AsyncGen_USED + if (__Pyx_AsyncGen_CheckExact(obj)) { + return __Pyx_NewRef(obj); + } +#endif +#if CYTHON_USE_ASYNC_SLOTS + { + __Pyx_PyAsyncMethodsStruct* am = __Pyx_PyType_AsAsync(obj); + if (likely(am && am->am_aiter)) { + return (*am->am_aiter)(obj); + } + } +#endif + return __Pyx_Coroutine_GetAsyncIter_Generic(obj); +} + + +static PyObject *__Pyx__Coroutine_AsyncIterNext(PyObject *obj) { +#if PY_VERSION_HEX < 0x030500B1 + { + PyObject *value = __Pyx_PyObject_CallMethod0(obj, PYIDENT("__anext__")); + if (likely(value)) + return value; + } + // FIXME: for the sake of a nicely conforming exception message, assume any AttributeError meant '__anext__' + if (PyErr_ExceptionMatches(PyExc_AttributeError)) +#endif + PyErr_Format(PyExc_TypeError, "'async for' requires an object with __anext__ method, got %.100s", + Py_TYPE(obj)->tp_name); + return NULL; +} + + +static CYTHON_INLINE PyObject *__Pyx_Coroutine_AsyncIterNext(PyObject *obj) { +#ifdef __Pyx_AsyncGen_USED + if (__Pyx_AsyncGen_CheckExact(obj)) { + return __Pyx_async_gen_anext(obj); + } +#endif +#if CYTHON_USE_ASYNC_SLOTS + { + __Pyx_PyAsyncMethodsStruct* am = __Pyx_PyType_AsAsync(obj); + if (likely(am && am->am_anext)) { + return (*am->am_anext)(obj); + } + } +#endif + return __Pyx__Coroutine_AsyncIterNext(obj); +} + + +//////////////////// pep479.proto //////////////////// + +static void __Pyx_Generator_Replace_StopIteration(int in_async_gen); /*proto*/ + +//////////////////// pep479 //////////////////// +//@requires: Exceptions.c::GetException + +static void __Pyx_Generator_Replace_StopIteration(CYTHON_UNUSED int in_async_gen) { + PyObject *exc, *val, *tb, *cur_exc; + __Pyx_PyThreadState_declare + #ifdef __Pyx_StopAsyncIteration_USED + int is_async_stopiteration = 0; + #endif + + cur_exc = PyErr_Occurred(); + if (likely(!__Pyx_PyErr_GivenExceptionMatches(cur_exc, PyExc_StopIteration))) { + #ifdef __Pyx_StopAsyncIteration_USED + if (in_async_gen && unlikely(__Pyx_PyErr_GivenExceptionMatches(cur_exc, __Pyx_PyExc_StopAsyncIteration))) { + is_async_stopiteration = 1; + } else + #endif + return; + } + + __Pyx_PyThreadState_assign + // Chain exceptions by moving Stop(Async)Iteration to exc_info before creating the RuntimeError. + // In Py2.x, no chaining happens, but the exception still stays visible in exc_info. + __Pyx_GetException(&exc, &val, &tb); + Py_XDECREF(exc); + Py_XDECREF(val); + Py_XDECREF(tb); + PyErr_SetString(PyExc_RuntimeError, + #ifdef __Pyx_StopAsyncIteration_USED + is_async_stopiteration ? "async generator raised StopAsyncIteration" : + in_async_gen ? "async generator raised StopIteration" : + #endif + "generator raised StopIteration"); +} + + +//////////////////// CoroutineBase.proto //////////////////// +//@substitute: naming + +typedef PyObject *(*__pyx_coroutine_body_t)(PyObject *, PyThreadState *, PyObject *); + +#if CYTHON_USE_EXC_INFO_STACK +// See https://bugs.python.org/issue25612 +#define __Pyx_ExcInfoStruct _PyErr_StackItem +#else +// Minimal replacement struct for Py<3.7, without the Py3.7 exception state stack. +typedef struct { + PyObject *exc_type; + PyObject *exc_value; + PyObject *exc_traceback; +} __Pyx_ExcInfoStruct; +#endif + +typedef struct { + PyObject_HEAD + __pyx_coroutine_body_t body; + PyObject *closure; + __Pyx_ExcInfoStruct gi_exc_state; + PyObject *gi_weakreflist; + PyObject *classobj; + PyObject *yieldfrom; + PyObject *gi_name; + PyObject *gi_qualname; + PyObject *gi_modulename; + PyObject *gi_code; + int resume_label; + // using T_BOOL for property below requires char value + char is_running; +} __pyx_CoroutineObject; + +static __pyx_CoroutineObject *__Pyx__Coroutine_New( + PyTypeObject *type, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure, + PyObject *name, PyObject *qualname, PyObject *module_name); /*proto*/ + +static __pyx_CoroutineObject *__Pyx__Coroutine_NewInit( + __pyx_CoroutineObject *gen, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure, + PyObject *name, PyObject *qualname, PyObject *module_name); /*proto*/ + +static CYTHON_INLINE void __Pyx_Coroutine_ExceptionClear(__Pyx_ExcInfoStruct *self); +static int __Pyx_Coroutine_clear(PyObject *self); /*proto*/ +static PyObject *__Pyx_Coroutine_Send(PyObject *self, PyObject *value); /*proto*/ +static PyObject *__Pyx_Coroutine_Close(PyObject *self); /*proto*/ +static PyObject *__Pyx_Coroutine_Throw(PyObject *gen, PyObject *args); /*proto*/ + +// macros for exception state swapping instead of inline functions to make use of the local thread state context +#if CYTHON_USE_EXC_INFO_STACK +#define __Pyx_Coroutine_SwapException(self) +#define __Pyx_Coroutine_ResetAndClearException(self) __Pyx_Coroutine_ExceptionClear(&(self)->gi_exc_state) +#else +#define __Pyx_Coroutine_SwapException(self) { \ + __Pyx_ExceptionSwap(&(self)->gi_exc_state.exc_type, &(self)->gi_exc_state.exc_value, &(self)->gi_exc_state.exc_traceback); \ + __Pyx_Coroutine_ResetFrameBackpointer(&(self)->gi_exc_state); \ + } +#define __Pyx_Coroutine_ResetAndClearException(self) { \ + __Pyx_ExceptionReset((self)->gi_exc_state.exc_type, (self)->gi_exc_state.exc_value, (self)->gi_exc_state.exc_traceback); \ + (self)->gi_exc_state.exc_type = (self)->gi_exc_state.exc_value = (self)->gi_exc_state.exc_traceback = NULL; \ + } +#endif + +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyGen_FetchStopIterationValue(pvalue) \ + __Pyx_PyGen__FetchStopIterationValue($local_tstate_cname, pvalue) +#else +#define __Pyx_PyGen_FetchStopIterationValue(pvalue) \ + __Pyx_PyGen__FetchStopIterationValue(__Pyx_PyThreadState_Current, pvalue) +#endif +static int __Pyx_PyGen__FetchStopIterationValue(PyThreadState *tstate, PyObject **pvalue); /*proto*/ +static CYTHON_INLINE void __Pyx_Coroutine_ResetFrameBackpointer(__Pyx_ExcInfoStruct *exc_state); /*proto*/ + + +//////////////////// Coroutine.proto //////////////////// + +#define __Pyx_Coroutine_USED +static PyTypeObject *__pyx_CoroutineType = 0; +static PyTypeObject *__pyx_CoroutineAwaitType = 0; +#define __Pyx_Coroutine_CheckExact(obj) (Py_TYPE(obj) == __pyx_CoroutineType) +// __Pyx_Coroutine_Check(obj): see override for IterableCoroutine below +#define __Pyx_Coroutine_Check(obj) __Pyx_Coroutine_CheckExact(obj) +#define __Pyx_CoroutineAwait_CheckExact(obj) (Py_TYPE(obj) == __pyx_CoroutineAwaitType) + +#define __Pyx_Coroutine_New(body, code, closure, name, qualname, module_name) \ + __Pyx__Coroutine_New(__pyx_CoroutineType, body, code, closure, name, qualname, module_name) + +static int __pyx_Coroutine_init(void); /*proto*/ +static PyObject *__Pyx__Coroutine_await(PyObject *coroutine); /*proto*/ + +typedef struct { + PyObject_HEAD + PyObject *coroutine; +} __pyx_CoroutineAwaitObject; + +static PyObject *__Pyx_CoroutineAwait_Close(__pyx_CoroutineAwaitObject *self, PyObject *arg); /*proto*/ +static PyObject *__Pyx_CoroutineAwait_Throw(__pyx_CoroutineAwaitObject *self, PyObject *args); /*proto*/ + + +//////////////////// Generator.proto //////////////////// + +#define __Pyx_Generator_USED +static PyTypeObject *__pyx_GeneratorType = 0; +#define __Pyx_Generator_CheckExact(obj) (Py_TYPE(obj) == __pyx_GeneratorType) + +#define __Pyx_Generator_New(body, code, closure, name, qualname, module_name) \ + __Pyx__Coroutine_New(__pyx_GeneratorType, body, code, closure, name, qualname, module_name) + +static PyObject *__Pyx_Generator_Next(PyObject *self); +static int __pyx_Generator_init(void); /*proto*/ + + +//////////////////// AsyncGen //////////////////// +//@requires: AsyncGen.c::AsyncGenerator +// -> empty, only delegates to separate file + + +//////////////////// CoroutineBase //////////////////// +//@substitute: naming +//@requires: Exceptions.c::PyErrFetchRestore +//@requires: Exceptions.c::PyThreadStateGet +//@requires: Exceptions.c::SwapException +//@requires: Exceptions.c::RaiseException +//@requires: Exceptions.c::SaveResetException +//@requires: ObjectHandling.c::PyObjectCallMethod1 +//@requires: ObjectHandling.c::PyObjectGetAttrStr +//@requires: CommonStructures.c::FetchCommonType + +#include +#include + +#define __Pyx_Coroutine_Undelegate(gen) Py_CLEAR((gen)->yieldfrom) + +// If StopIteration exception is set, fetches its 'value' +// attribute if any, otherwise sets pvalue to None. +// +// Returns 0 if no exception or StopIteration is set. +// If any other exception is set, returns -1 and leaves +// pvalue unchanged. +static int __Pyx_PyGen__FetchStopIterationValue(CYTHON_UNUSED PyThreadState *$local_tstate_cname, PyObject **pvalue) { + PyObject *et, *ev, *tb; + PyObject *value = NULL; + + __Pyx_ErrFetch(&et, &ev, &tb); + + if (!et) { + Py_XDECREF(tb); + Py_XDECREF(ev); + Py_INCREF(Py_None); + *pvalue = Py_None; + return 0; + } + + // most common case: plain StopIteration without or with separate argument + if (likely(et == PyExc_StopIteration)) { + if (!ev) { + Py_INCREF(Py_None); + value = Py_None; + } +#if PY_VERSION_HEX >= 0x030300A0 + else if (Py_TYPE(ev) == (PyTypeObject*)PyExc_StopIteration) { + value = ((PyStopIterationObject *)ev)->value; + Py_INCREF(value); + Py_DECREF(ev); + } +#endif + // PyErr_SetObject() and friends put the value directly into ev + else if (unlikely(PyTuple_Check(ev))) { + // if it's a tuple, it is interpreted as separate constructor arguments (surprise!) + if (PyTuple_GET_SIZE(ev) >= 1) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + value = PyTuple_GET_ITEM(ev, 0); + Py_INCREF(value); +#else + value = PySequence_ITEM(ev, 0); +#endif + } else { + Py_INCREF(Py_None); + value = Py_None; + } + Py_DECREF(ev); + } + else if (!__Pyx_TypeCheck(ev, (PyTypeObject*)PyExc_StopIteration)) { + // 'steal' reference to ev + value = ev; + } + if (likely(value)) { + Py_XDECREF(tb); + Py_DECREF(et); + *pvalue = value; + return 0; + } + } else if (!__Pyx_PyErr_GivenExceptionMatches(et, PyExc_StopIteration)) { + __Pyx_ErrRestore(et, ev, tb); + return -1; + } + + // otherwise: normalise and check what that gives us + PyErr_NormalizeException(&et, &ev, &tb); + if (unlikely(!PyObject_TypeCheck(ev, (PyTypeObject*)PyExc_StopIteration))) { + // looks like normalisation failed - raise the new exception + __Pyx_ErrRestore(et, ev, tb); + return -1; + } + Py_XDECREF(tb); + Py_DECREF(et); +#if PY_VERSION_HEX >= 0x030300A0 + value = ((PyStopIterationObject *)ev)->value; + Py_INCREF(value); + Py_DECREF(ev); +#else + { + PyObject* args = __Pyx_PyObject_GetAttrStr(ev, PYIDENT("args")); + Py_DECREF(ev); + if (likely(args)) { + value = PySequence_GetItem(args, 0); + Py_DECREF(args); + } + if (unlikely(!value)) { + __Pyx_ErrRestore(NULL, NULL, NULL); + Py_INCREF(Py_None); + value = Py_None; + } + } +#endif + *pvalue = value; + return 0; +} + +static CYTHON_INLINE +void __Pyx_Coroutine_ExceptionClear(__Pyx_ExcInfoStruct *exc_state) { + PyObject *t, *v, *tb; + t = exc_state->exc_type; + v = exc_state->exc_value; + tb = exc_state->exc_traceback; + + exc_state->exc_type = NULL; + exc_state->exc_value = NULL; + exc_state->exc_traceback = NULL; + + Py_XDECREF(t); + Py_XDECREF(v); + Py_XDECREF(tb); +} + +#define __Pyx_Coroutine_AlreadyRunningError(gen) (__Pyx__Coroutine_AlreadyRunningError(gen), (PyObject*)NULL) +static void __Pyx__Coroutine_AlreadyRunningError(CYTHON_UNUSED __pyx_CoroutineObject *gen) { + const char *msg; + if ((0)) { + #ifdef __Pyx_Coroutine_USED + } else if (__Pyx_Coroutine_Check((PyObject*)gen)) { + msg = "coroutine already executing"; + #endif + #ifdef __Pyx_AsyncGen_USED + } else if (__Pyx_AsyncGen_CheckExact((PyObject*)gen)) { + msg = "async generator already executing"; + #endif + } else { + msg = "generator already executing"; + } + PyErr_SetString(PyExc_ValueError, msg); +} + +#define __Pyx_Coroutine_NotStartedError(gen) (__Pyx__Coroutine_NotStartedError(gen), (PyObject*)NULL) +static void __Pyx__Coroutine_NotStartedError(CYTHON_UNUSED PyObject *gen) { + const char *msg; + if ((0)) { + #ifdef __Pyx_Coroutine_USED + } else if (__Pyx_Coroutine_Check(gen)) { + msg = "can't send non-None value to a just-started coroutine"; + #endif + #ifdef __Pyx_AsyncGen_USED + } else if (__Pyx_AsyncGen_CheckExact(gen)) { + msg = "can't send non-None value to a just-started async generator"; + #endif + } else { + msg = "can't send non-None value to a just-started generator"; + } + PyErr_SetString(PyExc_TypeError, msg); +} + +#define __Pyx_Coroutine_AlreadyTerminatedError(gen, value, closing) (__Pyx__Coroutine_AlreadyTerminatedError(gen, value, closing), (PyObject*)NULL) +static void __Pyx__Coroutine_AlreadyTerminatedError(CYTHON_UNUSED PyObject *gen, PyObject *value, CYTHON_UNUSED int closing) { + #ifdef __Pyx_Coroutine_USED + if (!closing && __Pyx_Coroutine_Check(gen)) { + // `self` is an exhausted coroutine: raise an error, + // except when called from gen_close(), which should + // always be a silent method. + PyErr_SetString(PyExc_RuntimeError, "cannot reuse already awaited coroutine"); + } else + #endif + if (value) { + // `gen` is an exhausted generator: + // only set exception if called from send(). + #ifdef __Pyx_AsyncGen_USED + if (__Pyx_AsyncGen_CheckExact(gen)) + PyErr_SetNone(__Pyx_PyExc_StopAsyncIteration); + else + #endif + PyErr_SetNone(PyExc_StopIteration); + } +} + +static +PyObject *__Pyx_Coroutine_SendEx(__pyx_CoroutineObject *self, PyObject *value, int closing) { + __Pyx_PyThreadState_declare + PyThreadState *tstate; + __Pyx_ExcInfoStruct *exc_state; + PyObject *retval; + + assert(!self->is_running); + + if (unlikely(self->resume_label == 0)) { + if (unlikely(value && value != Py_None)) { + return __Pyx_Coroutine_NotStartedError((PyObject*)self); + } + } + + if (unlikely(self->resume_label == -1)) { + return __Pyx_Coroutine_AlreadyTerminatedError((PyObject*)self, value, closing); + } + +#if CYTHON_FAST_THREAD_STATE + __Pyx_PyThreadState_assign + tstate = $local_tstate_cname; +#else + tstate = __Pyx_PyThreadState_Current; +#endif + + // Traceback/Frame rules pre-Py3.7: + // - on entry, save external exception state in self->gi_exc_state, restore it on exit + // - on exit, keep internally generated exceptions in self->gi_exc_state, clear everything else + // - on entry, set "f_back" pointer of internal exception traceback to (current) outer call frame + // - on exit, clear "f_back" of internal exception traceback + // - do not touch external frames and tracebacks + + // Traceback/Frame rules for Py3.7+ (CYTHON_USE_EXC_INFO_STACK): + // - on entry, push internal exception state in self->gi_exc_state on the exception stack + // - on exit, keep internally generated exceptions in self->gi_exc_state, clear everything else + // - on entry, set "f_back" pointer of internal exception traceback to (current) outer call frame + // - on exit, clear "f_back" of internal exception traceback + // - do not touch external frames and tracebacks + + exc_state = &self->gi_exc_state; + if (exc_state->exc_type) { + #if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_PYSTON + // FIXME: what to do in PyPy? + #else + // Generators always return to their most recent caller, not + // necessarily their creator. + if (exc_state->exc_traceback) { + PyTracebackObject *tb = (PyTracebackObject *) exc_state->exc_traceback; + PyFrameObject *f = tb->tb_frame; + + Py_XINCREF(tstate->frame); + assert(f->f_back == NULL); + f->f_back = tstate->frame; + } + #endif + } + +#if CYTHON_USE_EXC_INFO_STACK + // See https://bugs.python.org/issue25612 + exc_state->previous_item = tstate->exc_info; + tstate->exc_info = exc_state; +#else + if (exc_state->exc_type) { + // We were in an except handler when we left, + // restore the exception state which was put aside. + __Pyx_ExceptionSwap(&exc_state->exc_type, &exc_state->exc_value, &exc_state->exc_traceback); + // self->exc_* now holds the exception state of the caller + } else { + // save away the exception state of the caller + __Pyx_Coroutine_ExceptionClear(exc_state); + __Pyx_ExceptionSave(&exc_state->exc_type, &exc_state->exc_value, &exc_state->exc_traceback); + } +#endif + + self->is_running = 1; + retval = self->body((PyObject *) self, tstate, value); + self->is_running = 0; + +#if CYTHON_USE_EXC_INFO_STACK + // See https://bugs.python.org/issue25612 + exc_state = &self->gi_exc_state; + tstate->exc_info = exc_state->previous_item; + exc_state->previous_item = NULL; + // Cut off the exception frame chain so that we can reconnect it on re-entry above. + __Pyx_Coroutine_ResetFrameBackpointer(exc_state); +#endif + + return retval; +} + +static CYTHON_INLINE void __Pyx_Coroutine_ResetFrameBackpointer(__Pyx_ExcInfoStruct *exc_state) { + // Don't keep the reference to f_back any longer than necessary. It + // may keep a chain of frames alive or it could create a reference + // cycle. + PyObject *exc_tb = exc_state->exc_traceback; + + if (likely(exc_tb)) { +#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_PYSTON + // FIXME: what to do in PyPy? +#else + PyTracebackObject *tb = (PyTracebackObject *) exc_tb; + PyFrameObject *f = tb->tb_frame; + Py_CLEAR(f->f_back); +#endif + } +} + +static CYTHON_INLINE +PyObject *__Pyx_Coroutine_MethodReturn(CYTHON_UNUSED PyObject* gen, PyObject *retval) { + if (unlikely(!retval)) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (!__Pyx_PyErr_Occurred()) { + // method call must not terminate with NULL without setting an exception + PyObject *exc = PyExc_StopIteration; + #ifdef __Pyx_AsyncGen_USED + if (__Pyx_AsyncGen_CheckExact(gen)) + exc = __Pyx_PyExc_StopAsyncIteration; + #endif + __Pyx_PyErr_SetNone(exc); + } + } + return retval; +} + +static CYTHON_INLINE +PyObject *__Pyx_Coroutine_FinishDelegation(__pyx_CoroutineObject *gen) { + PyObject *ret; + PyObject *val = NULL; + __Pyx_Coroutine_Undelegate(gen); + __Pyx_PyGen__FetchStopIterationValue(__Pyx_PyThreadState_Current, &val); + // val == NULL on failure => pass on exception + ret = __Pyx_Coroutine_SendEx(gen, val, 0); + Py_XDECREF(val); + return ret; +} + +static PyObject *__Pyx_Coroutine_Send(PyObject *self, PyObject *value) { + PyObject *retval; + __pyx_CoroutineObject *gen = (__pyx_CoroutineObject*) self; + PyObject *yf = gen->yieldfrom; + if (unlikely(gen->is_running)) + return __Pyx_Coroutine_AlreadyRunningError(gen); + if (yf) { + PyObject *ret; + // FIXME: does this really need an INCREF() ? + //Py_INCREF(yf); + gen->is_running = 1; + #ifdef __Pyx_Generator_USED + if (__Pyx_Generator_CheckExact(yf)) { + ret = __Pyx_Coroutine_Send(yf, value); + } else + #endif + #ifdef __Pyx_Coroutine_USED + if (__Pyx_Coroutine_Check(yf)) { + ret = __Pyx_Coroutine_Send(yf, value); + } else + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_PyAsyncGenASend_CheckExact(yf)) { + ret = __Pyx_async_gen_asend_send(yf, value); + } else + #endif + #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03030000 && (defined(__linux__) || PY_VERSION_HEX >= 0x030600B3) + // _PyGen_Send() is not exported before Py3.6 + if (PyGen_CheckExact(yf)) { + ret = _PyGen_Send((PyGenObject*)yf, value == Py_None ? NULL : value); + } else + #endif + #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03050000 && defined(PyCoro_CheckExact) && (defined(__linux__) || PY_VERSION_HEX >= 0x030600B3) + // _PyGen_Send() is not exported before Py3.6 + if (PyCoro_CheckExact(yf)) { + ret = _PyGen_Send((PyGenObject*)yf, value == Py_None ? NULL : value); + } else + #endif + { + if (value == Py_None) + ret = Py_TYPE(yf)->tp_iternext(yf); + else + ret = __Pyx_PyObject_CallMethod1(yf, PYIDENT("send"), value); + } + gen->is_running = 0; + //Py_DECREF(yf); + if (likely(ret)) { + return ret; + } + retval = __Pyx_Coroutine_FinishDelegation(gen); + } else { + retval = __Pyx_Coroutine_SendEx(gen, value, 0); + } + return __Pyx_Coroutine_MethodReturn(self, retval); +} + +// This helper function is used by gen_close and gen_throw to +// close a subiterator being delegated to by yield-from. +static int __Pyx_Coroutine_CloseIter(__pyx_CoroutineObject *gen, PyObject *yf) { + PyObject *retval = NULL; + int err = 0; + + #ifdef __Pyx_Generator_USED + if (__Pyx_Generator_CheckExact(yf)) { + retval = __Pyx_Coroutine_Close(yf); + if (!retval) + return -1; + } else + #endif + #ifdef __Pyx_Coroutine_USED + if (__Pyx_Coroutine_Check(yf)) { + retval = __Pyx_Coroutine_Close(yf); + if (!retval) + return -1; + } else + if (__Pyx_CoroutineAwait_CheckExact(yf)) { + retval = __Pyx_CoroutineAwait_Close((__pyx_CoroutineAwaitObject*)yf, NULL); + if (!retval) + return -1; + } else + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_PyAsyncGenASend_CheckExact(yf)) { + retval = __Pyx_async_gen_asend_close(yf, NULL); + // cannot fail + } else + if (__pyx_PyAsyncGenAThrow_CheckExact(yf)) { + retval = __Pyx_async_gen_athrow_close(yf, NULL); + // cannot fail + } else + #endif + { + PyObject *meth; + gen->is_running = 1; + meth = __Pyx_PyObject_GetAttrStr(yf, PYIDENT("close")); + if (unlikely(!meth)) { + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_WriteUnraisable(yf); + } + PyErr_Clear(); + } else { + retval = PyObject_CallFunction(meth, NULL); + Py_DECREF(meth); + if (!retval) + err = -1; + } + gen->is_running = 0; + } + Py_XDECREF(retval); + return err; +} + +static PyObject *__Pyx_Generator_Next(PyObject *self) { + __pyx_CoroutineObject *gen = (__pyx_CoroutineObject*) self; + PyObject *yf = gen->yieldfrom; + if (unlikely(gen->is_running)) + return __Pyx_Coroutine_AlreadyRunningError(gen); + if (yf) { + PyObject *ret; + // FIXME: does this really need an INCREF() ? + //Py_INCREF(yf); + // YieldFrom code ensures that yf is an iterator + gen->is_running = 1; + #ifdef __Pyx_Generator_USED + if (__Pyx_Generator_CheckExact(yf)) { + ret = __Pyx_Generator_Next(yf); + } else + #endif + #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03030000 && (defined(__linux__) || PY_VERSION_HEX >= 0x030600B3) + // _PyGen_Send() is not exported before Py3.6 + if (PyGen_CheckExact(yf)) { + ret = _PyGen_Send((PyGenObject*)yf, NULL); + } else + #endif + #ifdef __Pyx_Coroutine_USED + if (__Pyx_Coroutine_Check(yf)) { + ret = __Pyx_Coroutine_Send(yf, Py_None); + } else + #endif + ret = Py_TYPE(yf)->tp_iternext(yf); + gen->is_running = 0; + //Py_DECREF(yf); + if (likely(ret)) { + return ret; + } + return __Pyx_Coroutine_FinishDelegation(gen); + } + return __Pyx_Coroutine_SendEx(gen, Py_None, 0); +} + +static PyObject *__Pyx_Coroutine_Close_Method(PyObject *self, CYTHON_UNUSED PyObject *arg) { + return __Pyx_Coroutine_Close(self); +} + +static PyObject *__Pyx_Coroutine_Close(PyObject *self) { + __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; + PyObject *retval, *raised_exception; + PyObject *yf = gen->yieldfrom; + int err = 0; + + if (unlikely(gen->is_running)) + return __Pyx_Coroutine_AlreadyRunningError(gen); + + if (yf) { + Py_INCREF(yf); + err = __Pyx_Coroutine_CloseIter(gen, yf); + __Pyx_Coroutine_Undelegate(gen); + Py_DECREF(yf); + } + if (err == 0) + PyErr_SetNone(PyExc_GeneratorExit); + retval = __Pyx_Coroutine_SendEx(gen, NULL, 1); + if (unlikely(retval)) { + const char *msg; + Py_DECREF(retval); + if ((0)) { + #ifdef __Pyx_Coroutine_USED + } else if (__Pyx_Coroutine_Check(self)) { + msg = "coroutine ignored GeneratorExit"; + #endif + #ifdef __Pyx_AsyncGen_USED + } else if (__Pyx_AsyncGen_CheckExact(self)) { +#if PY_VERSION_HEX < 0x03060000 + msg = "async generator ignored GeneratorExit - might require Python 3.6+ finalisation (PEP 525)"; +#else + msg = "async generator ignored GeneratorExit"; +#endif + #endif + } else { + msg = "generator ignored GeneratorExit"; + } + PyErr_SetString(PyExc_RuntimeError, msg); + return NULL; + } + raised_exception = PyErr_Occurred(); + if (likely(!raised_exception || __Pyx_PyErr_GivenExceptionMatches2(raised_exception, PyExc_GeneratorExit, PyExc_StopIteration))) { + // ignore these errors + if (raised_exception) PyErr_Clear(); + Py_INCREF(Py_None); + return Py_None; + } + return NULL; +} + +static PyObject *__Pyx__Coroutine_Throw(PyObject *self, PyObject *typ, PyObject *val, PyObject *tb, + PyObject *args, int close_on_genexit) { + __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; + PyObject *yf = gen->yieldfrom; + + if (unlikely(gen->is_running)) + return __Pyx_Coroutine_AlreadyRunningError(gen); + + if (yf) { + PyObject *ret; + Py_INCREF(yf); + if (__Pyx_PyErr_GivenExceptionMatches(typ, PyExc_GeneratorExit) && close_on_genexit) { + // Asynchronous generators *should not* be closed right away. + // We have to allow some awaits to work it through, hence the + // `close_on_genexit` parameter here. + int err = __Pyx_Coroutine_CloseIter(gen, yf); + Py_DECREF(yf); + __Pyx_Coroutine_Undelegate(gen); + if (err < 0) + return __Pyx_Coroutine_MethodReturn(self, __Pyx_Coroutine_SendEx(gen, NULL, 0)); + goto throw_here; + } + gen->is_running = 1; + if (0 + #ifdef __Pyx_Generator_USED + || __Pyx_Generator_CheckExact(yf) + #endif + #ifdef __Pyx_Coroutine_USED + || __Pyx_Coroutine_Check(yf) + #endif + ) { + ret = __Pyx__Coroutine_Throw(yf, typ, val, tb, args, close_on_genexit); + #ifdef __Pyx_Coroutine_USED + } else if (__Pyx_CoroutineAwait_CheckExact(yf)) { + ret = __Pyx__Coroutine_Throw(((__pyx_CoroutineAwaitObject*)yf)->coroutine, typ, val, tb, args, close_on_genexit); + #endif + } else { + PyObject *meth = __Pyx_PyObject_GetAttrStr(yf, PYIDENT("throw")); + if (unlikely(!meth)) { + Py_DECREF(yf); + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) { + gen->is_running = 0; + return NULL; + } + PyErr_Clear(); + __Pyx_Coroutine_Undelegate(gen); + gen->is_running = 0; + goto throw_here; + } + if (likely(args)) { + ret = PyObject_CallObject(meth, args); + } else { + // "tb" or even "val" might be NULL, but that also correctly terminates the argument list + ret = PyObject_CallFunctionObjArgs(meth, typ, val, tb, NULL); + } + Py_DECREF(meth); + } + gen->is_running = 0; + Py_DECREF(yf); + if (!ret) { + ret = __Pyx_Coroutine_FinishDelegation(gen); + } + return __Pyx_Coroutine_MethodReturn(self, ret); + } +throw_here: + __Pyx_Raise(typ, val, tb, NULL); + return __Pyx_Coroutine_MethodReturn(self, __Pyx_Coroutine_SendEx(gen, NULL, 0)); +} + +static PyObject *__Pyx_Coroutine_Throw(PyObject *self, PyObject *args) { + PyObject *typ; + PyObject *val = NULL; + PyObject *tb = NULL; + + if (!PyArg_UnpackTuple(args, (char *)"throw", 1, 3, &typ, &val, &tb)) + return NULL; + + return __Pyx__Coroutine_Throw(self, typ, val, tb, args, 1); +} + +static CYTHON_INLINE int __Pyx_Coroutine_traverse_excstate(__Pyx_ExcInfoStruct *exc_state, visitproc visit, void *arg) { + Py_VISIT(exc_state->exc_type); + Py_VISIT(exc_state->exc_value); + Py_VISIT(exc_state->exc_traceback); + return 0; +} + +static int __Pyx_Coroutine_traverse(__pyx_CoroutineObject *gen, visitproc visit, void *arg) { + Py_VISIT(gen->closure); + Py_VISIT(gen->classobj); + Py_VISIT(gen->yieldfrom); + return __Pyx_Coroutine_traverse_excstate(&gen->gi_exc_state, visit, arg); +} + +static int __Pyx_Coroutine_clear(PyObject *self) { + __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; + + Py_CLEAR(gen->closure); + Py_CLEAR(gen->classobj); + Py_CLEAR(gen->yieldfrom); + __Pyx_Coroutine_ExceptionClear(&gen->gi_exc_state); +#ifdef __Pyx_AsyncGen_USED + if (__Pyx_AsyncGen_CheckExact(self)) { + Py_CLEAR(((__pyx_PyAsyncGenObject*)gen)->ag_finalizer); + } +#endif + Py_CLEAR(gen->gi_code); + Py_CLEAR(gen->gi_name); + Py_CLEAR(gen->gi_qualname); + Py_CLEAR(gen->gi_modulename); + return 0; +} + +static void __Pyx_Coroutine_dealloc(PyObject *self) { + __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; + + PyObject_GC_UnTrack(gen); + if (gen->gi_weakreflist != NULL) + PyObject_ClearWeakRefs(self); + + if (gen->resume_label >= 0) { + // Generator is paused or unstarted, so we need to close + PyObject_GC_Track(self); +#if PY_VERSION_HEX >= 0x030400a1 && CYTHON_USE_TP_FINALIZE + if (PyObject_CallFinalizerFromDealloc(self)) +#else + Py_TYPE(gen)->tp_del(self); + if (self->ob_refcnt > 0) +#endif + { + // resurrected. :( + return; + } + PyObject_GC_UnTrack(self); + } + +#ifdef __Pyx_AsyncGen_USED + if (__Pyx_AsyncGen_CheckExact(self)) { + /* We have to handle this case for asynchronous generators + right here, because this code has to be between UNTRACK + and GC_Del. */ + Py_CLEAR(((__pyx_PyAsyncGenObject*)self)->ag_finalizer); + } +#endif + __Pyx_Coroutine_clear(self); + PyObject_GC_Del(gen); +} + +static void __Pyx_Coroutine_del(PyObject *self) { + PyObject *error_type, *error_value, *error_traceback; + __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; + __Pyx_PyThreadState_declare + + if (gen->resume_label < 0) { + // already terminated => nothing to clean up + return; + } + +#if !CYTHON_USE_TP_FINALIZE + // Temporarily resurrect the object. + assert(self->ob_refcnt == 0); + self->ob_refcnt = 1; +#endif + + __Pyx_PyThreadState_assign + + // Save the current exception, if any. + __Pyx_ErrFetch(&error_type, &error_value, &error_traceback); + +#ifdef __Pyx_AsyncGen_USED + if (__Pyx_AsyncGen_CheckExact(self)) { + __pyx_PyAsyncGenObject *agen = (__pyx_PyAsyncGenObject*)self; + PyObject *finalizer = agen->ag_finalizer; + if (finalizer && !agen->ag_closed) { + PyObject *res = __Pyx_PyObject_CallOneArg(finalizer, self); + if (unlikely(!res)) { + PyErr_WriteUnraisable(self); + } else { + Py_DECREF(res); + } + // Restore the saved exception. + __Pyx_ErrRestore(error_type, error_value, error_traceback); + return; + } + } +#endif + + if (unlikely(gen->resume_label == 0 && !error_value)) { +#ifdef __Pyx_Coroutine_USED +#ifdef __Pyx_Generator_USED + // only warn about (async) coroutines + if (!__Pyx_Generator_CheckExact(self)) +#endif + { + // untrack dead object as we are executing Python code (which might trigger GC) + PyObject_GC_UnTrack(self); +#if PY_MAJOR_VERSION >= 3 /* PY_VERSION_HEX >= 0x03030000*/ || defined(PyErr_WarnFormat) + if (unlikely(PyErr_WarnFormat(PyExc_RuntimeWarning, 1, "coroutine '%.50S' was never awaited", gen->gi_qualname) < 0)) + PyErr_WriteUnraisable(self); +#else + {PyObject *msg; + char *cmsg; + #if CYTHON_COMPILING_IN_PYPY + msg = NULL; + cmsg = (char*) "coroutine was never awaited"; + #else + char *cname; + PyObject *qualname; + qualname = gen->gi_qualname; + cname = PyString_AS_STRING(qualname); + msg = PyString_FromFormat("coroutine '%.50s' was never awaited", cname); + + if (unlikely(!msg)) { + PyErr_Clear(); + cmsg = (char*) "coroutine was never awaited"; + } else { + cmsg = PyString_AS_STRING(msg); + } + #endif + if (unlikely(PyErr_WarnEx(PyExc_RuntimeWarning, cmsg, 1) < 0)) + PyErr_WriteUnraisable(self); + Py_XDECREF(msg);} +#endif + PyObject_GC_Track(self); + } +#endif /*__Pyx_Coroutine_USED*/ + } else { + PyObject *res = __Pyx_Coroutine_Close(self); + if (unlikely(!res)) { + if (PyErr_Occurred()) + PyErr_WriteUnraisable(self); + } else { + Py_DECREF(res); + } + } + + // Restore the saved exception. + __Pyx_ErrRestore(error_type, error_value, error_traceback); + +#if !CYTHON_USE_TP_FINALIZE + // Undo the temporary resurrection; can't use DECREF here, it would + // cause a recursive call. + assert(self->ob_refcnt > 0); + if (--self->ob_refcnt == 0) { + // this is the normal path out + return; + } + + // close() resurrected it! Make it look like the original Py_DECREF + // never happened. + { + Py_ssize_t refcnt = self->ob_refcnt; + _Py_NewReference(self); + self->ob_refcnt = refcnt; + } +#if CYTHON_COMPILING_IN_CPYTHON + assert(PyType_IS_GC(self->ob_type) && + _Py_AS_GC(self)->gc.gc_refs != _PyGC_REFS_UNTRACKED); + + // If Py_REF_DEBUG, _Py_NewReference bumped _Py_RefTotal, so + // we need to undo that. + _Py_DEC_REFTOTAL; +#endif + // If Py_TRACE_REFS, _Py_NewReference re-added self to the object + // chain, so no more to do there. + // If COUNT_ALLOCS, the original decref bumped tp_frees, and + // _Py_NewReference bumped tp_allocs: both of those need to be + // undone. +#ifdef COUNT_ALLOCS + --Py_TYPE(self)->tp_frees; + --Py_TYPE(self)->tp_allocs; +#endif +#endif +} + +static PyObject * +__Pyx_Coroutine_get_name(__pyx_CoroutineObject *self, CYTHON_UNUSED void *context) +{ + PyObject *name = self->gi_name; + // avoid NULL pointer dereference during garbage collection + if (unlikely(!name)) name = Py_None; + Py_INCREF(name); + return name; +} + +static int +__Pyx_Coroutine_set_name(__pyx_CoroutineObject *self, PyObject *value, CYTHON_UNUSED void *context) +{ + PyObject *tmp; + +#if PY_MAJOR_VERSION >= 3 + if (unlikely(value == NULL || !PyUnicode_Check(value))) +#else + if (unlikely(value == NULL || !PyString_Check(value))) +#endif + { + PyErr_SetString(PyExc_TypeError, + "__name__ must be set to a string object"); + return -1; + } + tmp = self->gi_name; + Py_INCREF(value); + self->gi_name = value; + Py_XDECREF(tmp); + return 0; +} + +static PyObject * +__Pyx_Coroutine_get_qualname(__pyx_CoroutineObject *self, CYTHON_UNUSED void *context) +{ + PyObject *name = self->gi_qualname; + // avoid NULL pointer dereference during garbage collection + if (unlikely(!name)) name = Py_None; + Py_INCREF(name); + return name; +} + +static int +__Pyx_Coroutine_set_qualname(__pyx_CoroutineObject *self, PyObject *value, CYTHON_UNUSED void *context) +{ + PyObject *tmp; + +#if PY_MAJOR_VERSION >= 3 + if (unlikely(value == NULL || !PyUnicode_Check(value))) +#else + if (unlikely(value == NULL || !PyString_Check(value))) +#endif + { + PyErr_SetString(PyExc_TypeError, + "__qualname__ must be set to a string object"); + return -1; + } + tmp = self->gi_qualname; + Py_INCREF(value); + self->gi_qualname = value; + Py_XDECREF(tmp); + return 0; +} + +static __pyx_CoroutineObject *__Pyx__Coroutine_New( + PyTypeObject* type, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure, + PyObject *name, PyObject *qualname, PyObject *module_name) { + __pyx_CoroutineObject *gen = PyObject_GC_New(__pyx_CoroutineObject, type); + if (unlikely(!gen)) + return NULL; + return __Pyx__Coroutine_NewInit(gen, body, code, closure, name, qualname, module_name); +} + +static __pyx_CoroutineObject *__Pyx__Coroutine_NewInit( + __pyx_CoroutineObject *gen, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure, + PyObject *name, PyObject *qualname, PyObject *module_name) { + gen->body = body; + gen->closure = closure; + Py_XINCREF(closure); + gen->is_running = 0; + gen->resume_label = 0; + gen->classobj = NULL; + gen->yieldfrom = NULL; + gen->gi_exc_state.exc_type = NULL; + gen->gi_exc_state.exc_value = NULL; + gen->gi_exc_state.exc_traceback = NULL; +#if CYTHON_USE_EXC_INFO_STACK + gen->gi_exc_state.previous_item = NULL; +#endif + gen->gi_weakreflist = NULL; + Py_XINCREF(qualname); + gen->gi_qualname = qualname; + Py_XINCREF(name); + gen->gi_name = name; + Py_XINCREF(module_name); + gen->gi_modulename = module_name; + Py_XINCREF(code); + gen->gi_code = code; + + PyObject_GC_Track(gen); + return gen; +} + + +//////////////////// Coroutine //////////////////// +//@requires: CoroutineBase +//@requires: PatchGeneratorABC +//@requires: ObjectHandling.c::PyObject_GenericGetAttrNoDict + +static void __Pyx_CoroutineAwait_dealloc(PyObject *self) { + PyObject_GC_UnTrack(self); + Py_CLEAR(((__pyx_CoroutineAwaitObject*)self)->coroutine); + PyObject_GC_Del(self); +} + +static int __Pyx_CoroutineAwait_traverse(__pyx_CoroutineAwaitObject *self, visitproc visit, void *arg) { + Py_VISIT(self->coroutine); + return 0; +} + +static int __Pyx_CoroutineAwait_clear(__pyx_CoroutineAwaitObject *self) { + Py_CLEAR(self->coroutine); + return 0; +} + +static PyObject *__Pyx_CoroutineAwait_Next(__pyx_CoroutineAwaitObject *self) { + return __Pyx_Generator_Next(self->coroutine); +} + +static PyObject *__Pyx_CoroutineAwait_Send(__pyx_CoroutineAwaitObject *self, PyObject *value) { + return __Pyx_Coroutine_Send(self->coroutine, value); +} + +static PyObject *__Pyx_CoroutineAwait_Throw(__pyx_CoroutineAwaitObject *self, PyObject *args) { + return __Pyx_Coroutine_Throw(self->coroutine, args); +} + +static PyObject *__Pyx_CoroutineAwait_Close(__pyx_CoroutineAwaitObject *self, CYTHON_UNUSED PyObject *arg) { + return __Pyx_Coroutine_Close(self->coroutine); +} + +static PyObject *__Pyx_CoroutineAwait_self(PyObject *self) { + Py_INCREF(self); + return self; +} + +#if !CYTHON_COMPILING_IN_PYPY +static PyObject *__Pyx_CoroutineAwait_no_new(CYTHON_UNUSED PyTypeObject *type, CYTHON_UNUSED PyObject *args, CYTHON_UNUSED PyObject *kwargs) { + PyErr_SetString(PyExc_TypeError, "cannot instantiate type, use 'await coroutine' instead"); + return NULL; +} +#endif + +static PyMethodDef __pyx_CoroutineAwait_methods[] = { + {"send", (PyCFunction) __Pyx_CoroutineAwait_Send, METH_O, + (char*) PyDoc_STR("send(arg) -> send 'arg' into coroutine,\nreturn next yielded value or raise StopIteration.")}, + {"throw", (PyCFunction) __Pyx_CoroutineAwait_Throw, METH_VARARGS, + (char*) PyDoc_STR("throw(typ[,val[,tb]]) -> raise exception in coroutine,\nreturn next yielded value or raise StopIteration.")}, + {"close", (PyCFunction) __Pyx_CoroutineAwait_Close, METH_NOARGS, + (char*) PyDoc_STR("close() -> raise GeneratorExit inside coroutine.")}, + {0, 0, 0, 0} +}; + +static PyTypeObject __pyx_CoroutineAwaitType_type = { + PyVarObject_HEAD_INIT(0, 0) + "coroutine_wrapper", /*tp_name*/ + sizeof(__pyx_CoroutineAwaitObject), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + (destructor) __Pyx_CoroutineAwait_dealloc,/*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + 0, /*tp_as_async resp. tp_compare*/ + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + PyDoc_STR("A wrapper object implementing __await__ for coroutines."), /*tp_doc*/ + (traverseproc) __Pyx_CoroutineAwait_traverse, /*tp_traverse*/ + (inquiry) __Pyx_CoroutineAwait_clear, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + __Pyx_CoroutineAwait_self, /*tp_iter*/ + (iternextfunc) __Pyx_CoroutineAwait_Next, /*tp_iternext*/ + __pyx_CoroutineAwait_methods, /*tp_methods*/ + 0 , /*tp_members*/ + 0 , /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ +#if !CYTHON_COMPILING_IN_PYPY + __Pyx_CoroutineAwait_no_new, /*tp_new*/ +#else + 0, /*tp_new*/ +#endif + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ +#if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ +#endif +#if PY_VERSION_HEX >= 0x030800b1 + 0, /*tp_vectorcall*/ +#endif +#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ +#endif +}; + +#if PY_VERSION_HEX < 0x030500B1 || defined(__Pyx_IterableCoroutine_USED) || CYTHON_USE_ASYNC_SLOTS +static CYTHON_INLINE PyObject *__Pyx__Coroutine_await(PyObject *coroutine) { + __pyx_CoroutineAwaitObject *await = PyObject_GC_New(__pyx_CoroutineAwaitObject, __pyx_CoroutineAwaitType); + if (unlikely(!await)) return NULL; + Py_INCREF(coroutine); + await->coroutine = coroutine; + PyObject_GC_Track(await); + return (PyObject*)await; +} +#endif + +#if PY_VERSION_HEX < 0x030500B1 +static PyObject *__Pyx_Coroutine_await_method(PyObject *coroutine, CYTHON_UNUSED PyObject *arg) { + return __Pyx__Coroutine_await(coroutine); +} +#endif + +#if defined(__Pyx_IterableCoroutine_USED) || CYTHON_USE_ASYNC_SLOTS +static PyObject *__Pyx_Coroutine_await(PyObject *coroutine) { + if (unlikely(!coroutine || !__Pyx_Coroutine_Check(coroutine))) { + PyErr_SetString(PyExc_TypeError, "invalid input, expected coroutine"); + return NULL; + } + return __Pyx__Coroutine_await(coroutine); +} +#endif + +static PyObject * +__Pyx_Coroutine_get_frame(CYTHON_UNUSED __pyx_CoroutineObject *self, CYTHON_UNUSED void *context) +{ + // Fake implementation that always returns None, but at least does not raise an AttributeError. + Py_RETURN_NONE; +} + +#if CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 && PY_VERSION_HEX < 0x030500B1 +static PyObject *__Pyx_Coroutine_compare(PyObject *obj, PyObject *other, int op) { + PyObject* result; + switch (op) { + case Py_EQ: result = (other == obj) ? Py_True : Py_False; break; + case Py_NE: result = (other != obj) ? Py_True : Py_False; break; + default: + result = Py_NotImplemented; + } + Py_INCREF(result); + return result; +} +#endif + +static PyMethodDef __pyx_Coroutine_methods[] = { + {"send", (PyCFunction) __Pyx_Coroutine_Send, METH_O, + (char*) PyDoc_STR("send(arg) -> send 'arg' into coroutine,\nreturn next iterated value or raise StopIteration.")}, + {"throw", (PyCFunction) __Pyx_Coroutine_Throw, METH_VARARGS, + (char*) PyDoc_STR("throw(typ[,val[,tb]]) -> raise exception in coroutine,\nreturn next iterated value or raise StopIteration.")}, + {"close", (PyCFunction) __Pyx_Coroutine_Close_Method, METH_NOARGS, + (char*) PyDoc_STR("close() -> raise GeneratorExit inside coroutine.")}, +#if PY_VERSION_HEX < 0x030500B1 + {"__await__", (PyCFunction) __Pyx_Coroutine_await_method, METH_NOARGS, + (char*) PyDoc_STR("__await__() -> return an iterator to be used in await expression.")}, +#endif + {0, 0, 0, 0} +}; + +static PyMemberDef __pyx_Coroutine_memberlist[] = { + {(char *) "cr_running", T_BOOL, offsetof(__pyx_CoroutineObject, is_running), READONLY, NULL}, + {(char*) "cr_await", T_OBJECT, offsetof(__pyx_CoroutineObject, yieldfrom), READONLY, + (char*) PyDoc_STR("object being awaited, or None")}, + {(char*) "cr_code", T_OBJECT, offsetof(__pyx_CoroutineObject, gi_code), READONLY, NULL}, + {(char *) "__module__", T_OBJECT, offsetof(__pyx_CoroutineObject, gi_modulename), PY_WRITE_RESTRICTED, 0}, + {0, 0, 0, 0, 0} +}; + +static PyGetSetDef __pyx_Coroutine_getsets[] = { + {(char *) "__name__", (getter)__Pyx_Coroutine_get_name, (setter)__Pyx_Coroutine_set_name, + (char*) PyDoc_STR("name of the coroutine"), 0}, + {(char *) "__qualname__", (getter)__Pyx_Coroutine_get_qualname, (setter)__Pyx_Coroutine_set_qualname, + (char*) PyDoc_STR("qualified name of the coroutine"), 0}, + {(char *) "cr_frame", (getter)__Pyx_Coroutine_get_frame, NULL, + (char*) PyDoc_STR("Frame of the coroutine"), 0}, + {0, 0, 0, 0, 0} +}; + +#if CYTHON_USE_ASYNC_SLOTS +static __Pyx_PyAsyncMethodsStruct __pyx_Coroutine_as_async = { + __Pyx_Coroutine_await, /*am_await*/ + 0, /*am_aiter*/ + 0, /*am_anext*/ +}; +#endif + +static PyTypeObject __pyx_CoroutineType_type = { + PyVarObject_HEAD_INIT(0, 0) + "coroutine", /*tp_name*/ + sizeof(__pyx_CoroutineObject), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + (destructor) __Pyx_Coroutine_dealloc,/*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ +#if CYTHON_USE_ASYNC_SLOTS + &__pyx_Coroutine_as_async, /*tp_as_async (tp_reserved) - Py3 only! */ +#else + 0, /*tp_reserved*/ +#endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_HAVE_FINALIZE, /*tp_flags*/ + 0, /*tp_doc*/ + (traverseproc) __Pyx_Coroutine_traverse, /*tp_traverse*/ + 0, /*tp_clear*/ +#if CYTHON_USE_ASYNC_SLOTS && CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 && PY_VERSION_HEX < 0x030500B1 + // in order to (mis-)use tp_reserved above, we must also implement tp_richcompare + __Pyx_Coroutine_compare, /*tp_richcompare*/ +#else + 0, /*tp_richcompare*/ +#endif + offsetof(__pyx_CoroutineObject, gi_weakreflist), /*tp_weaklistoffset*/ + // no tp_iter() as iterator is only available through __await__() + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_Coroutine_methods, /*tp_methods*/ + __pyx_Coroutine_memberlist, /*tp_members*/ + __pyx_Coroutine_getsets, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + 0, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ +#if CYTHON_USE_TP_FINALIZE + 0, /*tp_del*/ +#else + __Pyx_Coroutine_del, /*tp_del*/ +#endif + 0, /*tp_version_tag*/ +#if CYTHON_USE_TP_FINALIZE + __Pyx_Coroutine_del, /*tp_finalize*/ +#elif PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ +#endif +#if PY_VERSION_HEX >= 0x030800b1 + 0, /*tp_vectorcall*/ +#endif +#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ +#endif +}; + +static int __pyx_Coroutine_init(void) { + // on Windows, C-API functions can't be used in slots statically + __pyx_CoroutineType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; + __pyx_CoroutineType = __Pyx_FetchCommonType(&__pyx_CoroutineType_type); + if (unlikely(!__pyx_CoroutineType)) + return -1; + +#ifdef __Pyx_IterableCoroutine_USED + if (unlikely(__pyx_IterableCoroutine_init() == -1)) + return -1; +#endif + + __pyx_CoroutineAwaitType = __Pyx_FetchCommonType(&__pyx_CoroutineAwaitType_type); + if (unlikely(!__pyx_CoroutineAwaitType)) + return -1; + return 0; +} + + +//////////////////// IterableCoroutine.proto //////////////////// + +#define __Pyx_IterableCoroutine_USED + +static PyTypeObject *__pyx_IterableCoroutineType = 0; + +#undef __Pyx_Coroutine_Check +#define __Pyx_Coroutine_Check(obj) (__Pyx_Coroutine_CheckExact(obj) || (Py_TYPE(obj) == __pyx_IterableCoroutineType)) + +#define __Pyx_IterableCoroutine_New(body, code, closure, name, qualname, module_name) \ + __Pyx__Coroutine_New(__pyx_IterableCoroutineType, body, code, closure, name, qualname, module_name) + +static int __pyx_IterableCoroutine_init(void);/*proto*/ + + +//////////////////// IterableCoroutine //////////////////// +//@requires: Coroutine +//@requires: CommonStructures.c::FetchCommonType + +static PyTypeObject __pyx_IterableCoroutineType_type = { + PyVarObject_HEAD_INIT(0, 0) + "iterable_coroutine", /*tp_name*/ + sizeof(__pyx_CoroutineObject), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + (destructor) __Pyx_Coroutine_dealloc,/*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ +#if CYTHON_USE_ASYNC_SLOTS + &__pyx_Coroutine_as_async, /*tp_as_async (tp_reserved) - Py3 only! */ +#else + 0, /*tp_reserved*/ +#endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_HAVE_FINALIZE, /*tp_flags*/ + 0, /*tp_doc*/ + (traverseproc) __Pyx_Coroutine_traverse, /*tp_traverse*/ + 0, /*tp_clear*/ +#if CYTHON_USE_ASYNC_SLOTS && CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 && PY_VERSION_HEX < 0x030500B1 + // in order to (mis-)use tp_reserved above, we must also implement tp_richcompare + __Pyx_Coroutine_compare, /*tp_richcompare*/ +#else + 0, /*tp_richcompare*/ +#endif + offsetof(__pyx_CoroutineObject, gi_weakreflist), /*tp_weaklistoffset*/ + // enable iteration for legacy support of asyncio yield-from protocol + __Pyx_Coroutine_await, /*tp_iter*/ + (iternextfunc) __Pyx_Generator_Next, /*tp_iternext*/ + __pyx_Coroutine_methods, /*tp_methods*/ + __pyx_Coroutine_memberlist, /*tp_members*/ + __pyx_Coroutine_getsets, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + 0, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ +#if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_del*/ +#else + __Pyx_Coroutine_del, /*tp_del*/ +#endif + 0, /*tp_version_tag*/ +#if PY_VERSION_HEX >= 0x030400a1 + __Pyx_Coroutine_del, /*tp_finalize*/ +#endif +#if PY_VERSION_HEX >= 0x030800b1 + 0, /*tp_vectorcall*/ +#endif +#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ +#endif +}; + + +static int __pyx_IterableCoroutine_init(void) { + __pyx_IterableCoroutineType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; + __pyx_IterableCoroutineType = __Pyx_FetchCommonType(&__pyx_IterableCoroutineType_type); + if (unlikely(!__pyx_IterableCoroutineType)) + return -1; + return 0; +} + + +//////////////////// Generator //////////////////// +//@requires: CoroutineBase +//@requires: PatchGeneratorABC +//@requires: ObjectHandling.c::PyObject_GenericGetAttrNoDict + +static PyMethodDef __pyx_Generator_methods[] = { + {"send", (PyCFunction) __Pyx_Coroutine_Send, METH_O, + (char*) PyDoc_STR("send(arg) -> send 'arg' into generator,\nreturn next yielded value or raise StopIteration.")}, + {"throw", (PyCFunction) __Pyx_Coroutine_Throw, METH_VARARGS, + (char*) PyDoc_STR("throw(typ[,val[,tb]]) -> raise exception in generator,\nreturn next yielded value or raise StopIteration.")}, + {"close", (PyCFunction) __Pyx_Coroutine_Close_Method, METH_NOARGS, + (char*) PyDoc_STR("close() -> raise GeneratorExit inside generator.")}, + {0, 0, 0, 0} +}; + +static PyMemberDef __pyx_Generator_memberlist[] = { + {(char *) "gi_running", T_BOOL, offsetof(__pyx_CoroutineObject, is_running), READONLY, NULL}, + {(char*) "gi_yieldfrom", T_OBJECT, offsetof(__pyx_CoroutineObject, yieldfrom), READONLY, + (char*) PyDoc_STR("object being iterated by 'yield from', or None")}, + {(char*) "gi_code", T_OBJECT, offsetof(__pyx_CoroutineObject, gi_code), READONLY, NULL}, + {0, 0, 0, 0, 0} +}; + +static PyGetSetDef __pyx_Generator_getsets[] = { + {(char *) "__name__", (getter)__Pyx_Coroutine_get_name, (setter)__Pyx_Coroutine_set_name, + (char*) PyDoc_STR("name of the generator"), 0}, + {(char *) "__qualname__", (getter)__Pyx_Coroutine_get_qualname, (setter)__Pyx_Coroutine_set_qualname, + (char*) PyDoc_STR("qualified name of the generator"), 0}, + {0, 0, 0, 0, 0} +}; + +static PyTypeObject __pyx_GeneratorType_type = { + PyVarObject_HEAD_INIT(0, 0) + "generator", /*tp_name*/ + sizeof(__pyx_CoroutineObject), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + (destructor) __Pyx_Coroutine_dealloc,/*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + 0, /*tp_compare / tp_as_async*/ + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_HAVE_FINALIZE, /*tp_flags*/ + 0, /*tp_doc*/ + (traverseproc) __Pyx_Coroutine_traverse, /*tp_traverse*/ + 0, /*tp_clear*/ + 0, /*tp_richcompare*/ + offsetof(__pyx_CoroutineObject, gi_weakreflist), /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + (iternextfunc) __Pyx_Generator_Next, /*tp_iternext*/ + __pyx_Generator_methods, /*tp_methods*/ + __pyx_Generator_memberlist, /*tp_members*/ + __pyx_Generator_getsets, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + 0, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ +#if CYTHON_USE_TP_FINALIZE + 0, /*tp_del*/ +#else + __Pyx_Coroutine_del, /*tp_del*/ +#endif + 0, /*tp_version_tag*/ +#if CYTHON_USE_TP_FINALIZE + __Pyx_Coroutine_del, /*tp_finalize*/ +#elif PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ +#endif +#if PY_VERSION_HEX >= 0x030800b1 + 0, /*tp_vectorcall*/ +#endif +#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ +#endif +}; + +static int __pyx_Generator_init(void) { + // on Windows, C-API functions can't be used in slots statically + __pyx_GeneratorType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; + __pyx_GeneratorType_type.tp_iter = PyObject_SelfIter; + + __pyx_GeneratorType = __Pyx_FetchCommonType(&__pyx_GeneratorType_type); + if (unlikely(!__pyx_GeneratorType)) { + return -1; + } + return 0; +} + + +/////////////// ReturnWithStopIteration.proto /////////////// + +#define __Pyx_ReturnWithStopIteration(value) \ + if (value == Py_None) PyErr_SetNone(PyExc_StopIteration); else __Pyx__ReturnWithStopIteration(value) +static void __Pyx__ReturnWithStopIteration(PyObject* value); /*proto*/ + +/////////////// ReturnWithStopIteration /////////////// +//@requires: Exceptions.c::PyErrFetchRestore +//@requires: Exceptions.c::PyThreadStateGet +//@substitute: naming + +// 1) Instantiating an exception just to pass back a value is costly. +// 2) CPython 3.3 <= x < 3.5b1 crash in yield-from when the StopIteration is not instantiated. +// 3) Passing a tuple as value into PyErr_SetObject() passes its items on as arguments. +// 4) Passing an exception as value will interpret it as an exception on unpacking and raise it (or unpack its value). +// 5) If there is currently an exception being handled, we need to chain it. + +static void __Pyx__ReturnWithStopIteration(PyObject* value) { + PyObject *exc, *args; +#if CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_PYSTON + __Pyx_PyThreadState_declare + if ((PY_VERSION_HEX >= 0x03030000 && PY_VERSION_HEX < 0x030500B1) + || unlikely(PyTuple_Check(value) || PyExceptionInstance_Check(value))) { + args = PyTuple_New(1); + if (unlikely(!args)) return; + Py_INCREF(value); + PyTuple_SET_ITEM(args, 0, value); + exc = PyType_Type.tp_call(PyExc_StopIteration, args, NULL); + Py_DECREF(args); + if (!exc) return; + } else { + // it's safe to avoid instantiating the exception + Py_INCREF(value); + exc = value; + } + #if CYTHON_FAST_THREAD_STATE + __Pyx_PyThreadState_assign + #if CYTHON_USE_EXC_INFO_STACK + if (!$local_tstate_cname->exc_info->exc_type) + #else + if (!$local_tstate_cname->exc_type) + #endif + { + // no chaining needed => avoid the overhead in PyErr_SetObject() + Py_INCREF(PyExc_StopIteration); + __Pyx_ErrRestore(PyExc_StopIteration, exc, NULL); + return; + } + #endif +#else + args = PyTuple_Pack(1, value); + if (unlikely(!args)) return; + exc = PyObject_Call(PyExc_StopIteration, args, NULL); + Py_DECREF(args); + if (unlikely(!exc)) return; +#endif + PyErr_SetObject(PyExc_StopIteration, exc); + Py_DECREF(exc); +} + + +//////////////////// PatchModuleWithCoroutine.proto //////////////////// + +static PyObject* __Pyx_Coroutine_patch_module(PyObject* module, const char* py_code); /*proto*/ + +//////////////////// PatchModuleWithCoroutine //////////////////// +//@substitute: naming + +static PyObject* __Pyx_Coroutine_patch_module(PyObject* module, const char* py_code) { +#if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + int result; + PyObject *globals, *result_obj; + globals = PyDict_New(); if (unlikely(!globals)) goto ignore; + result = PyDict_SetItemString(globals, "_cython_coroutine_type", + #ifdef __Pyx_Coroutine_USED + (PyObject*)__pyx_CoroutineType); + #else + Py_None); + #endif + if (unlikely(result < 0)) goto ignore; + result = PyDict_SetItemString(globals, "_cython_generator_type", + #ifdef __Pyx_Generator_USED + (PyObject*)__pyx_GeneratorType); + #else + Py_None); + #endif + if (unlikely(result < 0)) goto ignore; + if (unlikely(PyDict_SetItemString(globals, "_module", module) < 0)) goto ignore; + if (unlikely(PyDict_SetItemString(globals, "__builtins__", $builtins_cname) < 0)) goto ignore; + result_obj = PyRun_String(py_code, Py_file_input, globals, globals); + if (unlikely(!result_obj)) goto ignore; + Py_DECREF(result_obj); + Py_DECREF(globals); + return module; + +ignore: + Py_XDECREF(globals); + PyErr_WriteUnraisable(module); + if (unlikely(PyErr_WarnEx(PyExc_RuntimeWarning, "Cython module failed to patch module with custom type", 1) < 0)) { + Py_DECREF(module); + module = NULL; + } +#else + // avoid "unused" warning + py_code++; +#endif + return module; +} + + +//////////////////// PatchGeneratorABC.proto //////////////////// + +// register with Generator/Coroutine ABCs in 'collections.abc' +// see https://bugs.python.org/issue24018 +static int __Pyx_patch_abc(void); /*proto*/ + +//////////////////// PatchGeneratorABC //////////////////// +//@requires: PatchModuleWithCoroutine + +#ifndef CYTHON_REGISTER_ABCS +#define CYTHON_REGISTER_ABCS 1 +#endif + +#if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) +static PyObject* __Pyx_patch_abc_module(PyObject *module); /*proto*/ +static PyObject* __Pyx_patch_abc_module(PyObject *module) { + module = __Pyx_Coroutine_patch_module( + module, CSTRING("""\ +if _cython_generator_type is not None: + try: Generator = _module.Generator + except AttributeError: pass + else: Generator.register(_cython_generator_type) +if _cython_coroutine_type is not None: + try: Coroutine = _module.Coroutine + except AttributeError: pass + else: Coroutine.register(_cython_coroutine_type) +""") + ); + return module; +} +#endif + +static int __Pyx_patch_abc(void) { +#if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + static int abc_patched = 0; + if (CYTHON_REGISTER_ABCS && !abc_patched) { + PyObject *module; + module = PyImport_ImportModule((PY_MAJOR_VERSION >= 3) ? "collections.abc" : "collections"); + if (!module) { + PyErr_WriteUnraisable(NULL); + if (unlikely(PyErr_WarnEx(PyExc_RuntimeWarning, + ((PY_MAJOR_VERSION >= 3) ? + "Cython module failed to register with collections.abc module" : + "Cython module failed to register with collections module"), 1) < 0)) { + return -1; + } + } else { + module = __Pyx_patch_abc_module(module); + abc_patched = 1; + if (unlikely(!module)) + return -1; + Py_DECREF(module); + } + // also register with "backports_abc" module if available, just in case + module = PyImport_ImportModule("backports_abc"); + if (module) { + module = __Pyx_patch_abc_module(module); + Py_XDECREF(module); + } + if (!module) { + PyErr_Clear(); + } + } +#else + // avoid "unused" warning for __Pyx_Coroutine_patch_module() + if ((0)) __Pyx_Coroutine_patch_module(NULL, NULL); +#endif + return 0; +} + + +//////////////////// PatchAsyncIO.proto //////////////////// + +// run after importing "asyncio" to patch Cython generator support into it +static PyObject* __Pyx_patch_asyncio(PyObject* module); /*proto*/ + +//////////////////// PatchAsyncIO //////////////////// +//@requires: ImportExport.c::Import +//@requires: PatchModuleWithCoroutine +//@requires: PatchInspect + +static PyObject* __Pyx_patch_asyncio(PyObject* module) { +#if PY_VERSION_HEX < 0x030500B2 && \ + (defined(__Pyx_Coroutine_USED) || defined(__Pyx_Generator_USED)) && \ + (!defined(CYTHON_PATCH_ASYNCIO) || CYTHON_PATCH_ASYNCIO) + PyObject *patch_module = NULL; + static int asyncio_patched = 0; + if (unlikely((!asyncio_patched) && module)) { + PyObject *package; + package = __Pyx_Import(PYIDENT("asyncio.coroutines"), NULL, 0); + if (package) { + patch_module = __Pyx_Coroutine_patch_module( + PyObject_GetAttrString(package, "coroutines"), CSTRING("""\ +try: + coro_types = _module._COROUTINE_TYPES +except AttributeError: pass +else: + if _cython_coroutine_type is not None and _cython_coroutine_type not in coro_types: + coro_types = tuple(coro_types) + (_cython_coroutine_type,) + if _cython_generator_type is not None and _cython_generator_type not in coro_types: + coro_types = tuple(coro_types) + (_cython_generator_type,) +_module._COROUTINE_TYPES = coro_types +""") + ); + } else { + PyErr_Clear(); +// Always enable fallback: even if we compile against 3.4.2, we might be running on 3.4.1 at some point. +//#if PY_VERSION_HEX < 0x03040200 + // Py3.4.1 used to have asyncio.tasks instead of asyncio.coroutines + package = __Pyx_Import(PYIDENT("asyncio.tasks"), NULL, 0); + if (unlikely(!package)) goto asyncio_done; + patch_module = __Pyx_Coroutine_patch_module( + PyObject_GetAttrString(package, "tasks"), CSTRING("""\ +if hasattr(_module, 'iscoroutine'): + old_types = getattr(_module.iscoroutine, '_cython_coroutine_types', None) + if old_types is None or not isinstance(old_types, set): + old_types = set() + def cy_wrap(orig_func, type=type, cython_coroutine_types=old_types): + def cy_iscoroutine(obj): return type(obj) in cython_coroutine_types or orig_func(obj) + cy_iscoroutine._cython_coroutine_types = cython_coroutine_types + return cy_iscoroutine + _module.iscoroutine = cy_wrap(_module.iscoroutine) + if _cython_coroutine_type is not None: + old_types.add(_cython_coroutine_type) + if _cython_generator_type is not None: + old_types.add(_cython_generator_type) +""") + ); +//#endif +// Py < 0x03040200 + } + Py_DECREF(package); + if (unlikely(!patch_module)) goto ignore; +//#if PY_VERSION_HEX < 0x03040200 +asyncio_done: + PyErr_Clear(); +//#endif + asyncio_patched = 1; +#ifdef __Pyx_Generator_USED + // now patch inspect.isgenerator() by looking up the imported module in the patched asyncio module + { + PyObject *inspect_module; + if (patch_module) { + inspect_module = PyObject_GetAttr(patch_module, PYIDENT("inspect")); + Py_DECREF(patch_module); + } else { + inspect_module = __Pyx_Import(PYIDENT("inspect"), NULL, 0); + } + if (unlikely(!inspect_module)) goto ignore; + inspect_module = __Pyx_patch_inspect(inspect_module); + if (unlikely(!inspect_module)) { + Py_DECREF(module); + module = NULL; + } + Py_XDECREF(inspect_module); + } +#else + // avoid "unused" warning for __Pyx_patch_inspect() + if ((0)) return __Pyx_patch_inspect(module); +#endif + } + return module; +ignore: + PyErr_WriteUnraisable(module); + if (unlikely(PyErr_WarnEx(PyExc_RuntimeWarning, "Cython module failed to patch asyncio package with custom generator type", 1) < 0)) { + Py_DECREF(module); + module = NULL; + } +#else + // avoid "unused" warning for __Pyx_Coroutine_patch_module() + if ((0)) return __Pyx_patch_inspect(__Pyx_Coroutine_patch_module(module, NULL)); +#endif + return module; +} + + +//////////////////// PatchInspect.proto //////////////////// + +// run after importing "inspect" to patch Cython generator support into it +static PyObject* __Pyx_patch_inspect(PyObject* module); /*proto*/ + +//////////////////// PatchInspect //////////////////// +//@requires: PatchModuleWithCoroutine + +static PyObject* __Pyx_patch_inspect(PyObject* module) { +#if defined(__Pyx_Generator_USED) && (!defined(CYTHON_PATCH_INSPECT) || CYTHON_PATCH_INSPECT) + static int inspect_patched = 0; + if (unlikely((!inspect_patched) && module)) { + module = __Pyx_Coroutine_patch_module( + module, CSTRING("""\ +old_types = getattr(_module.isgenerator, '_cython_generator_types', None) +if old_types is None or not isinstance(old_types, set): + old_types = set() + def cy_wrap(orig_func, type=type, cython_generator_types=old_types): + def cy_isgenerator(obj): return type(obj) in cython_generator_types or orig_func(obj) + cy_isgenerator._cython_generator_types = cython_generator_types + return cy_isgenerator + _module.isgenerator = cy_wrap(_module.isgenerator) +old_types.add(_cython_generator_type) +""") + ); + inspect_patched = 1; + } +#else + // avoid "unused" warning for __Pyx_Coroutine_patch_module() + if ((0)) return __Pyx_Coroutine_patch_module(module, NULL); +#endif + return module; +} + + +//////////////////// StopAsyncIteration.proto //////////////////// + +#define __Pyx_StopAsyncIteration_USED +static PyObject *__Pyx_PyExc_StopAsyncIteration; +static int __pyx_StopAsyncIteration_init(void); /*proto*/ + +//////////////////// StopAsyncIteration //////////////////// + +#if PY_VERSION_HEX < 0x030500B1 +static PyTypeObject __Pyx__PyExc_StopAsyncIteration_type = { + PyVarObject_HEAD_INIT(0, 0) + "StopAsyncIteration", /*tp_name*/ + sizeof(PyBaseExceptionObject), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + 0, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + 0, /*tp_compare / reserved*/ + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + PyDoc_STR("Signal the end from iterator.__anext__()."), /*tp_doc*/ + 0, /*tp_traverse*/ + 0, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + 0, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + 0, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ +#if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ +#endif +}; +#endif + +static int __pyx_StopAsyncIteration_init(void) { +#if PY_VERSION_HEX >= 0x030500B1 + __Pyx_PyExc_StopAsyncIteration = PyExc_StopAsyncIteration; +#else + PyObject *builtins = PyEval_GetBuiltins(); + if (likely(builtins)) { + PyObject *exc = PyMapping_GetItemString(builtins, (char*) "StopAsyncIteration"); + if (exc) { + __Pyx_PyExc_StopAsyncIteration = exc; + return 0; + } + } + PyErr_Clear(); + + __Pyx__PyExc_StopAsyncIteration_type.tp_traverse = ((PyTypeObject*)PyExc_BaseException)->tp_traverse; + __Pyx__PyExc_StopAsyncIteration_type.tp_clear = ((PyTypeObject*)PyExc_BaseException)->tp_clear; + __Pyx__PyExc_StopAsyncIteration_type.tp_dictoffset = ((PyTypeObject*)PyExc_BaseException)->tp_dictoffset; + __Pyx__PyExc_StopAsyncIteration_type.tp_base = (PyTypeObject*)PyExc_Exception; + + __Pyx_PyExc_StopAsyncIteration = (PyObject*) __Pyx_FetchCommonType(&__Pyx__PyExc_StopAsyncIteration_type); + if (unlikely(!__Pyx_PyExc_StopAsyncIteration)) + return -1; + if (builtins && unlikely(PyMapping_SetItemString(builtins, (char*) "StopAsyncIteration", __Pyx_PyExc_StopAsyncIteration) < 0)) + return -1; +#endif + return 0; +} diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/CpdefEnums.pyx b/venv/lib/python3.8/site-packages/Cython/Utility/CpdefEnums.pyx new file mode 100644 index 0000000..148d776 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/CpdefEnums.pyx @@ -0,0 +1,66 @@ +#################### EnumBase #################### + +cimport cython + +cdef extern from *: + int PY_VERSION_HEX + +cdef object __Pyx_OrderedDict +if PY_VERSION_HEX >= 0x02070000: + from collections import OrderedDict as __Pyx_OrderedDict +else: + __Pyx_OrderedDict = dict + +@cython.internal +cdef class __Pyx_EnumMeta(type): + def __init__(cls, name, parents, dct): + type.__init__(cls, name, parents, dct) + cls.__members__ = __Pyx_OrderedDict() + def __iter__(cls): + return iter(cls.__members__.values()) + def __getitem__(cls, name): + return cls.__members__[name] + +# @cython.internal +cdef object __Pyx_EnumBase +class __Pyx_EnumBase(int): + __metaclass__ = __Pyx_EnumMeta + def __new__(cls, value, name=None): + for v in cls: + if v == value: + return v + if name is None: + raise ValueError("Unknown enum value: '%s'" % value) + res = int.__new__(cls, value) + res.name = name + setattr(cls, name, res) + cls.__members__[name] = res + return res + def __repr__(self): + return "<%s.%s: %d>" % (self.__class__.__name__, self.name, self) + def __str__(self): + return "%s.%s" % (self.__class__.__name__, self.name) + +if PY_VERSION_HEX >= 0x03040000: + from enum import IntEnum as __Pyx_EnumBase + +#################### EnumType #################### +#@requires: EnumBase + +cdef dict __Pyx_globals = globals() +if PY_VERSION_HEX >= 0x03040000: + # create new IntEnum() + {{name}} = __Pyx_EnumBase('{{name}}', __Pyx_OrderedDict([ + {{for item in items}} + ('{{item}}', {{item}}), + {{endfor}} + ])) + {{for item in items}} + __Pyx_globals['{{item}}'] = {{name}}.{{item}} + {{endfor}} +else: + class {{name}}(__Pyx_EnumBase): + pass + {{for item in items}} + __Pyx_globals['{{item}}'] = {{name}}({{item}}, '{{item}}') + {{endfor}} diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/CppConvert.pyx b/venv/lib/python3.8/site-packages/Cython/Utility/CppConvert.pyx new file mode 100644 index 0000000..03360e5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/CppConvert.pyx @@ -0,0 +1,237 @@ +# TODO: Figure out how many of the pass-by-value copies the compiler can eliminate. + + +#################### string.from_py #################### + +cdef extern from *: + cdef cppclass string "{{type}}": + string() + string(char* c_str, size_t size) + cdef const char* __Pyx_PyObject_AsStringAndSize(object, Py_ssize_t*) except NULL + +@cname("{{cname}}") +cdef string {{cname}}(object o) except *: + cdef Py_ssize_t length = 0 + cdef const char* data = __Pyx_PyObject_AsStringAndSize(o, &length) + return string(data, length) + + +#################### string.to_py #################### + +#cimport cython +#from libcpp.string cimport string +cdef extern from *: + cdef cppclass string "{{type}}": + char* data() + size_t size() + +{{for py_type in ['PyObject', 'PyUnicode', 'PyStr', 'PyBytes', 'PyByteArray']}} +cdef extern from *: + cdef object __Pyx_{{py_type}}_FromStringAndSize(const char*, size_t) + +@cname("{{cname.replace("PyObject", py_type, 1)}}") +cdef inline object {{cname.replace("PyObject", py_type, 1)}}(const string& s): + return __Pyx_{{py_type}}_FromStringAndSize(s.data(), s.size()) +{{endfor}} + + +#################### vector.from_py #################### + +cdef extern from *: + cdef cppclass vector "std::vector" [T]: + void push_back(T&) + +@cname("{{cname}}") +cdef vector[X] {{cname}}(object o) except *: + cdef vector[X] v + for item in o: + v.push_back(item) + return v + + +#################### vector.to_py #################### + +cdef extern from *: + cdef cppclass vector "const std::vector" [T]: + size_t size() + T& operator[](size_t) + +@cname("{{cname}}") +cdef object {{cname}}(vector[X]& v): + return [v[i] for i in range(v.size())] + + +#################### list.from_py #################### + +cdef extern from *: + cdef cppclass cpp_list "std::list" [T]: + void push_back(T&) + +@cname("{{cname}}") +cdef cpp_list[X] {{cname}}(object o) except *: + cdef cpp_list[X] l + for item in o: + l.push_back(item) + return l + + +#################### list.to_py #################### + +cimport cython + +cdef extern from *: + cdef cppclass cpp_list "std::list" [T]: + cppclass const_iterator: + T& operator*() + const_iterator operator++() + bint operator!=(const_iterator) + const_iterator begin() + const_iterator end() + +@cname("{{cname}}") +cdef object {{cname}}(const cpp_list[X]& v): + o = [] + cdef cpp_list[X].const_iterator iter = v.begin() + while iter != v.end(): + o.append(cython.operator.dereference(iter)) + cython.operator.preincrement(iter) + return o + + +#################### set.from_py #################### + +cdef extern from *: + cdef cppclass set "std::{{maybe_unordered}}set" [T]: + void insert(T&) + +@cname("{{cname}}") +cdef set[X] {{cname}}(object o) except *: + cdef set[X] s + for item in o: + s.insert(item) + return s + + +#################### set.to_py #################### + +cimport cython + +cdef extern from *: + cdef cppclass cpp_set "std::{{maybe_unordered}}set" [T]: + cppclass const_iterator: + T& operator*() + const_iterator operator++() + bint operator!=(const_iterator) + const_iterator begin() + const_iterator end() + +@cname("{{cname}}") +cdef object {{cname}}(const cpp_set[X]& s): + o = set() + cdef cpp_set[X].const_iterator iter = s.begin() + while iter != s.end(): + o.add(cython.operator.dereference(iter)) + cython.operator.preincrement(iter) + return o + +#################### pair.from_py #################### + +cdef extern from *: + cdef cppclass pair "std::pair" [T, U]: + pair() + pair(T&, U&) + +@cname("{{cname}}") +cdef pair[X,Y] {{cname}}(object o) except *: + x, y = o + return pair[X,Y](x, y) + + +#################### pair.to_py #################### + +cdef extern from *: + cdef cppclass pair "std::pair" [T, U]: + T first + U second + +@cname("{{cname}}") +cdef object {{cname}}(const pair[X,Y]& p): + return p.first, p.second + + +#################### map.from_py #################### + +cdef extern from *: + cdef cppclass pair "std::pair" [T, U]: + pair(T&, U&) + cdef cppclass map "std::{{maybe_unordered}}map" [T, U]: + void insert(pair[T, U]&) + cdef cppclass vector "std::vector" [T]: + pass + + +@cname("{{cname}}") +cdef map[X,Y] {{cname}}(object o) except *: + cdef dict d = o + cdef map[X,Y] m + for key, value in d.iteritems(): + m.insert(pair[X,Y](key, value)) + return m + + +#################### map.to_py #################### +# TODO: Work out const so that this can take a const +# reference rather than pass by value. + +cimport cython + +cdef extern from *: + cdef cppclass map "std::{{maybe_unordered}}map" [T, U]: + cppclass value_type: + T first + U second + cppclass const_iterator: + value_type& operator*() + const_iterator operator++() + bint operator!=(const_iterator) + const_iterator begin() + const_iterator end() + +@cname("{{cname}}") +cdef object {{cname}}(const map[X,Y]& s): + o = {} + cdef const map[X,Y].value_type *key_value + cdef map[X,Y].const_iterator iter = s.begin() + while iter != s.end(): + key_value = &cython.operator.dereference(iter) + o[key_value.first] = key_value.second + cython.operator.preincrement(iter) + return o + + +#################### complex.from_py #################### + +cdef extern from *: + cdef cppclass std_complex "std::complex" [T]: + std_complex() + std_complex(T, T) except + + +@cname("{{cname}}") +cdef std_complex[X] {{cname}}(object o) except *: + cdef double complex z = o + return std_complex[X](z.real, z.imag) + + +#################### complex.to_py #################### + +cdef extern from *: + cdef cppclass std_complex "std::complex" [T]: + X real() + X imag() + +@cname("{{cname}}") +cdef object {{cname}}(const std_complex[X]& z): + cdef double complex tmp + tmp.real = z.real() + tmp.imag = z.imag() + return tmp diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/CppSupport.cpp b/venv/lib/python3.8/site-packages/Cython/Utility/CppSupport.cpp new file mode 100644 index 0000000..b8fcff0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/CppSupport.cpp @@ -0,0 +1,58 @@ +/////////////// CppExceptionConversion.proto /////////////// + +#ifndef __Pyx_CppExn2PyErr +#include +#include +#include +#include + +static void __Pyx_CppExn2PyErr() { + // Catch a handful of different errors here and turn them into the + // equivalent Python errors. + try { + if (PyErr_Occurred()) + ; // let the latest Python exn pass through and ignore the current one + else + throw; + } catch (const std::bad_alloc& exn) { + PyErr_SetString(PyExc_MemoryError, exn.what()); + } catch (const std::bad_cast& exn) { + PyErr_SetString(PyExc_TypeError, exn.what()); + } catch (const std::bad_typeid& exn) { + PyErr_SetString(PyExc_TypeError, exn.what()); + } catch (const std::domain_error& exn) { + PyErr_SetString(PyExc_ValueError, exn.what()); + } catch (const std::invalid_argument& exn) { + PyErr_SetString(PyExc_ValueError, exn.what()); + } catch (const std::ios_base::failure& exn) { + // Unfortunately, in standard C++ we have no way of distinguishing EOF + // from other errors here; be careful with the exception mask + PyErr_SetString(PyExc_IOError, exn.what()); + } catch (const std::out_of_range& exn) { + // Change out_of_range to IndexError + PyErr_SetString(PyExc_IndexError, exn.what()); + } catch (const std::overflow_error& exn) { + PyErr_SetString(PyExc_OverflowError, exn.what()); + } catch (const std::range_error& exn) { + PyErr_SetString(PyExc_ArithmeticError, exn.what()); + } catch (const std::underflow_error& exn) { + PyErr_SetString(PyExc_ArithmeticError, exn.what()); + } catch (const std::exception& exn) { + PyErr_SetString(PyExc_RuntimeError, exn.what()); + } + catch (...) + { + PyErr_SetString(PyExc_RuntimeError, "Unknown exception"); + } +} +#endif + +/////////////// PythranConversion.proto /////////////// + +template +auto __Pyx_pythran_to_python(T &&value) -> decltype(to_python( + typename pythonic::returnable::type>::type>::type{std::forward(value)})) +{ + using returnable_type = typename pythonic::returnable::type>::type>::type; + return to_python(returnable_type{std::forward(value)}); +} diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/CythonFunction.c b/venv/lib/python3.8/site-packages/Cython/Utility/CythonFunction.c new file mode 100644 index 0000000..8f23497 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/CythonFunction.c @@ -0,0 +1,1332 @@ + +//////////////////// CythonFunctionShared.proto //////////////////// + +#define __Pyx_CyFunction_USED 1 + +#define __Pyx_CYFUNCTION_STATICMETHOD 0x01 +#define __Pyx_CYFUNCTION_CLASSMETHOD 0x02 +#define __Pyx_CYFUNCTION_CCLASS 0x04 + +#define __Pyx_CyFunction_GetClosure(f) \ + (((__pyx_CyFunctionObject *) (f))->func_closure) +#define __Pyx_CyFunction_GetClassObj(f) \ + (((__pyx_CyFunctionObject *) (f))->func_classobj) + +#define __Pyx_CyFunction_Defaults(type, f) \ + ((type *)(((__pyx_CyFunctionObject *) (f))->defaults)) +#define __Pyx_CyFunction_SetDefaultsGetter(f, g) \ + ((__pyx_CyFunctionObject *) (f))->defaults_getter = (g) + + +typedef struct { + PyCFunctionObject func; +#if PY_VERSION_HEX < 0x030500A0 + PyObject *func_weakreflist; +#endif + PyObject *func_dict; + PyObject *func_name; + PyObject *func_qualname; + PyObject *func_doc; + PyObject *func_globals; + PyObject *func_code; + PyObject *func_closure; + // No-args super() class cell + PyObject *func_classobj; + + // Dynamic default args and annotations + void *defaults; + int defaults_pyobjects; + size_t defaults_size; // used by FusedFunction for copying defaults + int flags; + + // Defaults info + PyObject *defaults_tuple; /* Const defaults tuple */ + PyObject *defaults_kwdict; /* Const kwonly defaults dict */ + PyObject *(*defaults_getter)(PyObject *); + PyObject *func_annotations; /* function annotations dict */ +} __pyx_CyFunctionObject; + +static PyTypeObject *__pyx_CyFunctionType = 0; + +#define __Pyx_CyFunction_Check(obj) (__Pyx_TypeCheck(obj, __pyx_CyFunctionType)) + +static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject* op, PyMethodDef *ml, + int flags, PyObject* qualname, + PyObject *self, + PyObject *module, PyObject *globals, + PyObject* code); + +static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *m, + size_t size, + int pyobjects); +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *m, + PyObject *tuple); +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *m, + PyObject *dict); +static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *m, + PyObject *dict); + + +static int __pyx_CyFunction_init(void); + + +//////////////////// CythonFunctionShared //////////////////// +//@substitute: naming +//@requires: CommonStructures.c::FetchCommonType +////@requires: ObjectHandling.c::PyObjectGetAttrStr + +#include + +static PyObject * +__Pyx_CyFunction_get_doc(__pyx_CyFunctionObject *op, CYTHON_UNUSED void *closure) +{ + if (unlikely(op->func_doc == NULL)) { + if (op->func.m_ml->ml_doc) { +#if PY_MAJOR_VERSION >= 3 + op->func_doc = PyUnicode_FromString(op->func.m_ml->ml_doc); +#else + op->func_doc = PyString_FromString(op->func.m_ml->ml_doc); +#endif + if (unlikely(op->func_doc == NULL)) + return NULL; + } else { + Py_INCREF(Py_None); + return Py_None; + } + } + Py_INCREF(op->func_doc); + return op->func_doc; +} + +static int +__Pyx_CyFunction_set_doc(__pyx_CyFunctionObject *op, PyObject *value, CYTHON_UNUSED void *context) +{ + PyObject *tmp = op->func_doc; + if (value == NULL) { + // Mark as deleted + value = Py_None; + } + Py_INCREF(value); + op->func_doc = value; + Py_XDECREF(tmp); + return 0; +} + +static PyObject * +__Pyx_CyFunction_get_name(__pyx_CyFunctionObject *op, CYTHON_UNUSED void *context) +{ + if (unlikely(op->func_name == NULL)) { +#if PY_MAJOR_VERSION >= 3 + op->func_name = PyUnicode_InternFromString(op->func.m_ml->ml_name); +#else + op->func_name = PyString_InternFromString(op->func.m_ml->ml_name); +#endif + if (unlikely(op->func_name == NULL)) + return NULL; + } + Py_INCREF(op->func_name); + return op->func_name; +} + +static int +__Pyx_CyFunction_set_name(__pyx_CyFunctionObject *op, PyObject *value, CYTHON_UNUSED void *context) +{ + PyObject *tmp; + +#if PY_MAJOR_VERSION >= 3 + if (unlikely(value == NULL || !PyUnicode_Check(value))) +#else + if (unlikely(value == NULL || !PyString_Check(value))) +#endif + { + PyErr_SetString(PyExc_TypeError, + "__name__ must be set to a string object"); + return -1; + } + tmp = op->func_name; + Py_INCREF(value); + op->func_name = value; + Py_XDECREF(tmp); + return 0; +} + +static PyObject * +__Pyx_CyFunction_get_qualname(__pyx_CyFunctionObject *op, CYTHON_UNUSED void *context) +{ + Py_INCREF(op->func_qualname); + return op->func_qualname; +} + +static int +__Pyx_CyFunction_set_qualname(__pyx_CyFunctionObject *op, PyObject *value, CYTHON_UNUSED void *context) +{ + PyObject *tmp; + +#if PY_MAJOR_VERSION >= 3 + if (unlikely(value == NULL || !PyUnicode_Check(value))) +#else + if (unlikely(value == NULL || !PyString_Check(value))) +#endif + { + PyErr_SetString(PyExc_TypeError, + "__qualname__ must be set to a string object"); + return -1; + } + tmp = op->func_qualname; + Py_INCREF(value); + op->func_qualname = value; + Py_XDECREF(tmp); + return 0; +} + +static PyObject * +__Pyx_CyFunction_get_self(__pyx_CyFunctionObject *m, CYTHON_UNUSED void *closure) +{ + PyObject *self; + + self = m->func_closure; + if (self == NULL) + self = Py_None; + Py_INCREF(self); + return self; +} + +static PyObject * +__Pyx_CyFunction_get_dict(__pyx_CyFunctionObject *op, CYTHON_UNUSED void *context) +{ + if (unlikely(op->func_dict == NULL)) { + op->func_dict = PyDict_New(); + if (unlikely(op->func_dict == NULL)) + return NULL; + } + Py_INCREF(op->func_dict); + return op->func_dict; +} + +static int +__Pyx_CyFunction_set_dict(__pyx_CyFunctionObject *op, PyObject *value, CYTHON_UNUSED void *context) +{ + PyObject *tmp; + + if (unlikely(value == NULL)) { + PyErr_SetString(PyExc_TypeError, + "function's dictionary may not be deleted"); + return -1; + } + if (unlikely(!PyDict_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "setting function's dictionary to a non-dict"); + return -1; + } + tmp = op->func_dict; + Py_INCREF(value); + op->func_dict = value; + Py_XDECREF(tmp); + return 0; +} + +static PyObject * +__Pyx_CyFunction_get_globals(__pyx_CyFunctionObject *op, CYTHON_UNUSED void *context) +{ + Py_INCREF(op->func_globals); + return op->func_globals; +} + +static PyObject * +__Pyx_CyFunction_get_closure(CYTHON_UNUSED __pyx_CyFunctionObject *op, CYTHON_UNUSED void *context) +{ + Py_INCREF(Py_None); + return Py_None; +} + +static PyObject * +__Pyx_CyFunction_get_code(__pyx_CyFunctionObject *op, CYTHON_UNUSED void *context) +{ + PyObject* result = (op->func_code) ? op->func_code : Py_None; + Py_INCREF(result); + return result; +} + +static int +__Pyx_CyFunction_init_defaults(__pyx_CyFunctionObject *op) { + int result = 0; + PyObject *res = op->defaults_getter((PyObject *) op); + if (unlikely(!res)) + return -1; + + // Cache result + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + op->defaults_tuple = PyTuple_GET_ITEM(res, 0); + Py_INCREF(op->defaults_tuple); + op->defaults_kwdict = PyTuple_GET_ITEM(res, 1); + Py_INCREF(op->defaults_kwdict); + #else + op->defaults_tuple = PySequence_ITEM(res, 0); + if (unlikely(!op->defaults_tuple)) result = -1; + else { + op->defaults_kwdict = PySequence_ITEM(res, 1); + if (unlikely(!op->defaults_kwdict)) result = -1; + } + #endif + Py_DECREF(res); + return result; +} + +static int +__Pyx_CyFunction_set_defaults(__pyx_CyFunctionObject *op, PyObject* value, CYTHON_UNUSED void *context) { + PyObject* tmp; + if (!value) { + // del => explicit None to prevent rebuilding + value = Py_None; + } else if (value != Py_None && !PyTuple_Check(value)) { + PyErr_SetString(PyExc_TypeError, + "__defaults__ must be set to a tuple object"); + return -1; + } + Py_INCREF(value); + tmp = op->defaults_tuple; + op->defaults_tuple = value; + Py_XDECREF(tmp); + return 0; +} + +static PyObject * +__Pyx_CyFunction_get_defaults(__pyx_CyFunctionObject *op, CYTHON_UNUSED void *context) { + PyObject* result = op->defaults_tuple; + if (unlikely(!result)) { + if (op->defaults_getter) { + if (__Pyx_CyFunction_init_defaults(op) < 0) return NULL; + result = op->defaults_tuple; + } else { + result = Py_None; + } + } + Py_INCREF(result); + return result; +} + +static int +__Pyx_CyFunction_set_kwdefaults(__pyx_CyFunctionObject *op, PyObject* value, CYTHON_UNUSED void *context) { + PyObject* tmp; + if (!value) { + // del => explicit None to prevent rebuilding + value = Py_None; + } else if (value != Py_None && !PyDict_Check(value)) { + PyErr_SetString(PyExc_TypeError, + "__kwdefaults__ must be set to a dict object"); + return -1; + } + Py_INCREF(value); + tmp = op->defaults_kwdict; + op->defaults_kwdict = value; + Py_XDECREF(tmp); + return 0; +} + +static PyObject * +__Pyx_CyFunction_get_kwdefaults(__pyx_CyFunctionObject *op, CYTHON_UNUSED void *context) { + PyObject* result = op->defaults_kwdict; + if (unlikely(!result)) { + if (op->defaults_getter) { + if (__Pyx_CyFunction_init_defaults(op) < 0) return NULL; + result = op->defaults_kwdict; + } else { + result = Py_None; + } + } + Py_INCREF(result); + return result; +} + +static int +__Pyx_CyFunction_set_annotations(__pyx_CyFunctionObject *op, PyObject* value, CYTHON_UNUSED void *context) { + PyObject* tmp; + if (!value || value == Py_None) { + value = NULL; + } else if (!PyDict_Check(value)) { + PyErr_SetString(PyExc_TypeError, + "__annotations__ must be set to a dict object"); + return -1; + } + Py_XINCREF(value); + tmp = op->func_annotations; + op->func_annotations = value; + Py_XDECREF(tmp); + return 0; +} + +static PyObject * +__Pyx_CyFunction_get_annotations(__pyx_CyFunctionObject *op, CYTHON_UNUSED void *context) { + PyObject* result = op->func_annotations; + if (unlikely(!result)) { + result = PyDict_New(); + if (unlikely(!result)) return NULL; + op->func_annotations = result; + } + Py_INCREF(result); + return result; +} + +//#if PY_VERSION_HEX >= 0x030400C1 +//static PyObject * +//__Pyx_CyFunction_get_signature(__pyx_CyFunctionObject *op, CYTHON_UNUSED void *context) { +// PyObject *inspect_module, *signature_class, *signature; +// // from inspect import Signature +// inspect_module = PyImport_ImportModuleLevelObject(PYIDENT("inspect"), NULL, NULL, NULL, 0); +// if (unlikely(!inspect_module)) +// goto bad; +// signature_class = __Pyx_PyObject_GetAttrStr(inspect_module, PYIDENT("Signature")); +// Py_DECREF(inspect_module); +// if (unlikely(!signature_class)) +// goto bad; +// // return Signature.from_function(op) +// signature = PyObject_CallMethodObjArgs(signature_class, PYIDENT("from_function"), op, NULL); +// Py_DECREF(signature_class); +// if (likely(signature)) +// return signature; +//bad: +// // make sure we raise an AttributeError from this property on any errors +// if (!PyErr_ExceptionMatches(PyExc_AttributeError)) +// PyErr_SetString(PyExc_AttributeError, "failed to calculate __signature__"); +// return NULL; +//} +//#endif + +static PyGetSetDef __pyx_CyFunction_getsets[] = { + {(char *) "func_doc", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, + {(char *) "__doc__", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, + {(char *) "func_name", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, + {(char *) "__name__", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, + {(char *) "__qualname__", (getter)__Pyx_CyFunction_get_qualname, (setter)__Pyx_CyFunction_set_qualname, 0, 0}, + {(char *) "__self__", (getter)__Pyx_CyFunction_get_self, 0, 0, 0}, + {(char *) "func_dict", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, + {(char *) "__dict__", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, + {(char *) "func_globals", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, + {(char *) "__globals__", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, + {(char *) "func_closure", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, + {(char *) "__closure__", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, + {(char *) "func_code", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, + {(char *) "__code__", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, + {(char *) "func_defaults", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, + {(char *) "__defaults__", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, + {(char *) "__kwdefaults__", (getter)__Pyx_CyFunction_get_kwdefaults, (setter)__Pyx_CyFunction_set_kwdefaults, 0, 0}, + {(char *) "__annotations__", (getter)__Pyx_CyFunction_get_annotations, (setter)__Pyx_CyFunction_set_annotations, 0, 0}, +//#if PY_VERSION_HEX >= 0x030400C1 +// {(char *) "__signature__", (getter)__Pyx_CyFunction_get_signature, 0, 0, 0}, +//#endif + {0, 0, 0, 0, 0} +}; + +static PyMemberDef __pyx_CyFunction_members[] = { + {(char *) "__module__", T_OBJECT, offsetof(PyCFunctionObject, m_module), PY_WRITE_RESTRICTED, 0}, + {0, 0, 0, 0, 0} +}; + +static PyObject * +__Pyx_CyFunction_reduce(__pyx_CyFunctionObject *m, CYTHON_UNUSED PyObject *args) +{ +#if PY_MAJOR_VERSION >= 3 + return PyUnicode_FromString(m->func.m_ml->ml_name); +#else + return PyString_FromString(m->func.m_ml->ml_name); +#endif +} + +static PyMethodDef __pyx_CyFunction_methods[] = { + {"__reduce__", (PyCFunction)__Pyx_CyFunction_reduce, METH_VARARGS, 0}, + {0, 0, 0, 0} +}; + + +#if PY_VERSION_HEX < 0x030500A0 +#define __Pyx_CyFunction_weakreflist(cyfunc) ((cyfunc)->func_weakreflist) +#else +#define __Pyx_CyFunction_weakreflist(cyfunc) ((cyfunc)->func.m_weakreflist) +#endif + +static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject *op, PyMethodDef *ml, int flags, PyObject* qualname, + PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { + if (unlikely(op == NULL)) + return NULL; + op->flags = flags; + __Pyx_CyFunction_weakreflist(op) = NULL; + op->func.m_ml = ml; + op->func.m_self = (PyObject *) op; + Py_XINCREF(closure); + op->func_closure = closure; + Py_XINCREF(module); + op->func.m_module = module; + op->func_dict = NULL; + op->func_name = NULL; + Py_INCREF(qualname); + op->func_qualname = qualname; + op->func_doc = NULL; + op->func_classobj = NULL; + op->func_globals = globals; + Py_INCREF(op->func_globals); + Py_XINCREF(code); + op->func_code = code; + // Dynamic Default args + op->defaults_pyobjects = 0; + op->defaults_size = 0; + op->defaults = NULL; + op->defaults_tuple = NULL; + op->defaults_kwdict = NULL; + op->defaults_getter = NULL; + op->func_annotations = NULL; + return (PyObject *) op; +} + +static int +__Pyx_CyFunction_clear(__pyx_CyFunctionObject *m) +{ + Py_CLEAR(m->func_closure); + Py_CLEAR(m->func.m_module); + Py_CLEAR(m->func_dict); + Py_CLEAR(m->func_name); + Py_CLEAR(m->func_qualname); + Py_CLEAR(m->func_doc); + Py_CLEAR(m->func_globals); + Py_CLEAR(m->func_code); + Py_CLEAR(m->func_classobj); + Py_CLEAR(m->defaults_tuple); + Py_CLEAR(m->defaults_kwdict); + Py_CLEAR(m->func_annotations); + + if (m->defaults) { + PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); + int i; + + for (i = 0; i < m->defaults_pyobjects; i++) + Py_XDECREF(pydefaults[i]); + + PyObject_Free(m->defaults); + m->defaults = NULL; + } + + return 0; +} + +static void __Pyx__CyFunction_dealloc(__pyx_CyFunctionObject *m) +{ + if (__Pyx_CyFunction_weakreflist(m) != NULL) + PyObject_ClearWeakRefs((PyObject *) m); + __Pyx_CyFunction_clear(m); + PyObject_GC_Del(m); +} + +static void __Pyx_CyFunction_dealloc(__pyx_CyFunctionObject *m) +{ + PyObject_GC_UnTrack(m); + __Pyx__CyFunction_dealloc(m); +} + +static int __Pyx_CyFunction_traverse(__pyx_CyFunctionObject *m, visitproc visit, void *arg) +{ + Py_VISIT(m->func_closure); + Py_VISIT(m->func.m_module); + Py_VISIT(m->func_dict); + Py_VISIT(m->func_name); + Py_VISIT(m->func_qualname); + Py_VISIT(m->func_doc); + Py_VISIT(m->func_globals); + Py_VISIT(m->func_code); + Py_VISIT(m->func_classobj); + Py_VISIT(m->defaults_tuple); + Py_VISIT(m->defaults_kwdict); + + if (m->defaults) { + PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); + int i; + + for (i = 0; i < m->defaults_pyobjects; i++) + Py_VISIT(pydefaults[i]); + } + + return 0; +} + +static PyObject *__Pyx_CyFunction_descr_get(PyObject *func, PyObject *obj, PyObject *type) +{ + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + + if (m->flags & __Pyx_CYFUNCTION_STATICMETHOD) { + Py_INCREF(func); + return func; + } + + if (m->flags & __Pyx_CYFUNCTION_CLASSMETHOD) { + if (type == NULL) + type = (PyObject *)(Py_TYPE(obj)); + return __Pyx_PyMethod_New(func, type, (PyObject *)(Py_TYPE(type))); + } + + if (obj == Py_None) + obj = NULL; + return __Pyx_PyMethod_New(func, obj, type); +} + +static PyObject* +__Pyx_CyFunction_repr(__pyx_CyFunctionObject *op) +{ +#if PY_MAJOR_VERSION >= 3 + return PyUnicode_FromFormat("", + op->func_qualname, (void *)op); +#else + return PyString_FromFormat("", + PyString_AsString(op->func_qualname), (void *)op); +#endif +} + +static PyObject * __Pyx_CyFunction_CallMethod(PyObject *func, PyObject *self, PyObject *arg, PyObject *kw) { + // originally copied from PyCFunction_Call() in CPython's Objects/methodobject.c + PyCFunctionObject* f = (PyCFunctionObject*)func; + PyCFunction meth = f->m_ml->ml_meth; + Py_ssize_t size; + + switch (f->m_ml->ml_flags & (METH_VARARGS | METH_KEYWORDS | METH_NOARGS | METH_O)) { + case METH_VARARGS: + if (likely(kw == NULL || PyDict_Size(kw) == 0)) + return (*meth)(self, arg); + break; + case METH_VARARGS | METH_KEYWORDS: + return (*(PyCFunctionWithKeywords)(void*)meth)(self, arg, kw); + case METH_NOARGS: + if (likely(kw == NULL || PyDict_Size(kw) == 0)) { + size = PyTuple_GET_SIZE(arg); + if (likely(size == 0)) + return (*meth)(self, NULL); + PyErr_Format(PyExc_TypeError, + "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", + f->m_ml->ml_name, size); + return NULL; + } + break; + case METH_O: + if (likely(kw == NULL || PyDict_Size(kw) == 0)) { + size = PyTuple_GET_SIZE(arg); + if (likely(size == 1)) { + PyObject *result, *arg0; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + arg0 = PyTuple_GET_ITEM(arg, 0); + #else + arg0 = PySequence_ITEM(arg, 0); if (unlikely(!arg0)) return NULL; + #endif + result = (*meth)(self, arg0); + #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) + Py_DECREF(arg0); + #endif + return result; + } + PyErr_Format(PyExc_TypeError, + "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", + f->m_ml->ml_name, size); + return NULL; + } + break; + default: + PyErr_SetString(PyExc_SystemError, "Bad call flags in " + "__Pyx_CyFunction_Call. METH_OLDARGS is no " + "longer supported!"); + + return NULL; + } + PyErr_Format(PyExc_TypeError, "%.200s() takes no keyword arguments", + f->m_ml->ml_name); + return NULL; +} + +static CYTHON_INLINE PyObject *__Pyx_CyFunction_Call(PyObject *func, PyObject *arg, PyObject *kw) { + return __Pyx_CyFunction_CallMethod(func, ((PyCFunctionObject*)func)->m_self, arg, kw); +} + +static PyObject *__Pyx_CyFunction_CallAsMethod(PyObject *func, PyObject *args, PyObject *kw) { + PyObject *result; + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *) func; + if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { + Py_ssize_t argc; + PyObject *new_args; + PyObject *self; + + argc = PyTuple_GET_SIZE(args); + new_args = PyTuple_GetSlice(args, 1, argc); + + if (unlikely(!new_args)) + return NULL; + + self = PyTuple_GetItem(args, 0); + if (unlikely(!self)) { + Py_DECREF(new_args); + return NULL; + } + + result = __Pyx_CyFunction_CallMethod(func, self, new_args, kw); + Py_DECREF(new_args); + } else { + result = __Pyx_CyFunction_Call(func, args, kw); + } + return result; +} + +static PyTypeObject __pyx_CyFunctionType_type = { + PyVarObject_HEAD_INIT(0, 0) + "cython_function_or_method", /*tp_name*/ + sizeof(__pyx_CyFunctionObject), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + (destructor) __Pyx_CyFunction_dealloc, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ +#if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ +#else + 0, /*reserved*/ +#endif + (reprfunc) __Pyx_CyFunction_repr, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + __Pyx_CyFunction_CallAsMethod, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + (traverseproc) __Pyx_CyFunction_traverse, /*tp_traverse*/ + (inquiry) __Pyx_CyFunction_clear, /*tp_clear*/ + 0, /*tp_richcompare*/ +#if PY_VERSION_HEX < 0x030500A0 + offsetof(__pyx_CyFunctionObject, func_weakreflist), /*tp_weaklistoffset*/ +#else + offsetof(PyCFunctionObject, m_weakreflist), /*tp_weaklistoffset*/ +#endif + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_CyFunction_methods, /*tp_methods*/ + __pyx_CyFunction_members, /*tp_members*/ + __pyx_CyFunction_getsets, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + __Pyx_CyFunction_descr_get, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + offsetof(__pyx_CyFunctionObject, func_dict),/*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + 0, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ +#if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ +#endif +#if PY_VERSION_HEX >= 0x030800b1 + 0, /*tp_vectorcall*/ +#endif +#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ +#endif +}; + + +static int __pyx_CyFunction_init(void) { + __pyx_CyFunctionType = __Pyx_FetchCommonType(&__pyx_CyFunctionType_type); + if (unlikely(__pyx_CyFunctionType == NULL)) { + return -1; + } + return 0; +} + +static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *func, size_t size, int pyobjects) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + + m->defaults = PyObject_Malloc(size); + if (unlikely(!m->defaults)) + return PyErr_NoMemory(); + memset(m->defaults, 0, size); + m->defaults_pyobjects = pyobjects; + m->defaults_size = size; + return m->defaults; +} + +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *func, PyObject *tuple) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->defaults_tuple = tuple; + Py_INCREF(tuple); +} + +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *func, PyObject *dict) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->defaults_kwdict = dict; + Py_INCREF(dict); +} + +static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *func, PyObject *dict) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->func_annotations = dict; + Py_INCREF(dict); +} + + +//////////////////// CythonFunction.proto //////////////////// + +static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, + int flags, PyObject* qualname, + PyObject *closure, + PyObject *module, PyObject *globals, + PyObject* code); + +//////////////////// CythonFunction //////////////////// +//@requires: CythonFunctionShared + +static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, int flags, PyObject* qualname, + PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { + PyObject *op = __Pyx_CyFunction_Init( + PyObject_GC_New(__pyx_CyFunctionObject, __pyx_CyFunctionType), + ml, flags, qualname, closure, module, globals, code + ); + if (likely(op)) { + PyObject_GC_Track(op); + } + return op; +} + + +//////////////////// CyFunctionClassCell.proto //////////////////// +static int __Pyx_CyFunction_InitClassCell(PyObject *cyfunctions, PyObject *classobj);/*proto*/ + +//////////////////// CyFunctionClassCell //////////////////// +//@requires: CythonFunctionShared + +static int __Pyx_CyFunction_InitClassCell(PyObject *cyfunctions, PyObject *classobj) { + Py_ssize_t i, count = PyList_GET_SIZE(cyfunctions); + + for (i = 0; i < count; i++) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + PyList_GET_ITEM(cyfunctions, i); +#else + PySequence_ITEM(cyfunctions, i); + if (unlikely(!m)) + return -1; +#endif + Py_INCREF(classobj); + m->func_classobj = classobj; +#if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) + Py_DECREF((PyObject*)m); +#endif + } + return 0; +} + + +//////////////////// FusedFunction.proto //////////////////// + +typedef struct { + __pyx_CyFunctionObject func; + PyObject *__signatures__; + PyObject *type; + PyObject *self; +} __pyx_FusedFunctionObject; + +static PyObject *__pyx_FusedFunction_New(PyMethodDef *ml, int flags, + PyObject *qualname, PyObject *closure, + PyObject *module, PyObject *globals, + PyObject *code); + +static int __pyx_FusedFunction_clear(__pyx_FusedFunctionObject *self); +static PyTypeObject *__pyx_FusedFunctionType = NULL; +static int __pyx_FusedFunction_init(void); + +#define __Pyx_FusedFunction_USED + +//////////////////// FusedFunction //////////////////// +//@requires: CythonFunctionShared + +static PyObject * +__pyx_FusedFunction_New(PyMethodDef *ml, int flags, + PyObject *qualname, PyObject *closure, + PyObject *module, PyObject *globals, + PyObject *code) +{ + PyObject *op = __Pyx_CyFunction_Init( + // __pyx_CyFunctionObject is correct below since that's the cast that we want. + PyObject_GC_New(__pyx_CyFunctionObject, __pyx_FusedFunctionType), + ml, flags, qualname, closure, module, globals, code + ); + if (likely(op)) { + __pyx_FusedFunctionObject *fusedfunc = (__pyx_FusedFunctionObject *) op; + fusedfunc->__signatures__ = NULL; + fusedfunc->type = NULL; + fusedfunc->self = NULL; + PyObject_GC_Track(op); + } + return op; +} + +static void +__pyx_FusedFunction_dealloc(__pyx_FusedFunctionObject *self) +{ + PyObject_GC_UnTrack(self); + Py_CLEAR(self->self); + Py_CLEAR(self->type); + Py_CLEAR(self->__signatures__); + __Pyx__CyFunction_dealloc((__pyx_CyFunctionObject *) self); +} + +static int +__pyx_FusedFunction_traverse(__pyx_FusedFunctionObject *self, + visitproc visit, + void *arg) +{ + Py_VISIT(self->self); + Py_VISIT(self->type); + Py_VISIT(self->__signatures__); + return __Pyx_CyFunction_traverse((__pyx_CyFunctionObject *) self, visit, arg); +} + +static int +__pyx_FusedFunction_clear(__pyx_FusedFunctionObject *self) +{ + Py_CLEAR(self->self); + Py_CLEAR(self->type); + Py_CLEAR(self->__signatures__); + return __Pyx_CyFunction_clear((__pyx_CyFunctionObject *) self); +} + + +static PyObject * +__pyx_FusedFunction_descr_get(PyObject *self, PyObject *obj, PyObject *type) +{ + __pyx_FusedFunctionObject *func, *meth; + + func = (__pyx_FusedFunctionObject *) self; + + if (func->self || func->func.flags & __Pyx_CYFUNCTION_STATICMETHOD) { + // Do not allow rebinding and don't do anything for static methods + Py_INCREF(self); + return self; + } + + if (obj == Py_None) + obj = NULL; + + meth = (__pyx_FusedFunctionObject *) __pyx_FusedFunction_New( + ((PyCFunctionObject *) func)->m_ml, + ((__pyx_CyFunctionObject *) func)->flags, + ((__pyx_CyFunctionObject *) func)->func_qualname, + ((__pyx_CyFunctionObject *) func)->func_closure, + ((PyCFunctionObject *) func)->m_module, + ((__pyx_CyFunctionObject *) func)->func_globals, + ((__pyx_CyFunctionObject *) func)->func_code); + if (!meth) + return NULL; + + // defaults needs copying fully rather than just copying the pointer + // since otherwise it will be freed on destruction of meth despite + // belonging to func rather than meth + if (func->func.defaults) { + PyObject **pydefaults; + int i; + + if (!__Pyx_CyFunction_InitDefaults((PyObject*)meth, + func->func.defaults_size, + func->func.defaults_pyobjects)) { + Py_XDECREF((PyObject*)meth); + return NULL; + } + memcpy(meth->func.defaults, func->func.defaults, func->func.defaults_size); + + pydefaults = __Pyx_CyFunction_Defaults(PyObject *, meth); + for (i = 0; i < meth->func.defaults_pyobjects; i++) + Py_XINCREF(pydefaults[i]); + } + + Py_XINCREF(func->func.func_classobj); + meth->func.func_classobj = func->func.func_classobj; + + Py_XINCREF(func->__signatures__); + meth->__signatures__ = func->__signatures__; + + Py_XINCREF(type); + meth->type = type; + + Py_XINCREF(func->func.defaults_tuple); + meth->func.defaults_tuple = func->func.defaults_tuple; + + if (func->func.flags & __Pyx_CYFUNCTION_CLASSMETHOD) + obj = type; + + Py_XINCREF(obj); + meth->self = obj; + + return (PyObject *) meth; +} + +static PyObject * +_obj_to_str(PyObject *obj) +{ + if (PyType_Check(obj)) + return PyObject_GetAttr(obj, PYIDENT("__name__")); + else + return PyObject_Str(obj); +} + +static PyObject * +__pyx_FusedFunction_getitem(__pyx_FusedFunctionObject *self, PyObject *idx) +{ + PyObject *signature = NULL; + PyObject *unbound_result_func; + PyObject *result_func = NULL; + + if (self->__signatures__ == NULL) { + PyErr_SetString(PyExc_TypeError, "Function is not fused"); + return NULL; + } + + if (PyTuple_Check(idx)) { + PyObject *list = PyList_New(0); + Py_ssize_t n = PyTuple_GET_SIZE(idx); + PyObject *sep = NULL; + int i; + + if (unlikely(!list)) + return NULL; + + for (i = 0; i < n; i++) { + int ret; + PyObject *string; +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + PyObject *item = PyTuple_GET_ITEM(idx, i); +#else + PyObject *item = PySequence_ITEM(idx, i); if (unlikely(!item)) goto __pyx_err; +#endif + string = _obj_to_str(item); +#if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) + Py_DECREF(item); +#endif + if (unlikely(!string)) goto __pyx_err; + ret = PyList_Append(list, string); + Py_DECREF(string); + if (unlikely(ret < 0)) goto __pyx_err; + } + + sep = PyUnicode_FromString("|"); + if (likely(sep)) + signature = PyUnicode_Join(sep, list); +__pyx_err: +; + Py_DECREF(list); + Py_XDECREF(sep); + } else { + signature = _obj_to_str(idx); + } + + if (!signature) + return NULL; + + unbound_result_func = PyObject_GetItem(self->__signatures__, signature); + + if (unbound_result_func) { + if (self->self || self->type) { + __pyx_FusedFunctionObject *unbound = (__pyx_FusedFunctionObject *) unbound_result_func; + + // TODO: move this to InitClassCell + Py_CLEAR(unbound->func.func_classobj); + Py_XINCREF(self->func.func_classobj); + unbound->func.func_classobj = self->func.func_classobj; + + result_func = __pyx_FusedFunction_descr_get(unbound_result_func, + self->self, self->type); + } else { + result_func = unbound_result_func; + Py_INCREF(result_func); + } + } + + Py_DECREF(signature); + Py_XDECREF(unbound_result_func); + + return result_func; +} + +static PyObject * +__pyx_FusedFunction_callfunction(PyObject *func, PyObject *args, PyObject *kw) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *) func; + int static_specialized = (cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD && + !((__pyx_FusedFunctionObject *) func)->__signatures__); + + if (cyfunc->flags & __Pyx_CYFUNCTION_CCLASS && !static_specialized) { + return __Pyx_CyFunction_CallAsMethod(func, args, kw); + } else { + return __Pyx_CyFunction_Call(func, args, kw); + } +} + +// Note: the 'self' from method binding is passed in in the args tuple, +// whereas PyCFunctionObject's m_self is passed in as the first +// argument to the C function. For extension methods we need +// to pass 'self' as 'm_self' and not as the first element of the +// args tuple. + +static PyObject * +__pyx_FusedFunction_call(PyObject *func, PyObject *args, PyObject *kw) +{ + __pyx_FusedFunctionObject *binding_func = (__pyx_FusedFunctionObject *) func; + Py_ssize_t argc = PyTuple_GET_SIZE(args); + PyObject *new_args = NULL; + __pyx_FusedFunctionObject *new_func = NULL; + PyObject *result = NULL; + PyObject *self = NULL; + int is_staticmethod = binding_func->func.flags & __Pyx_CYFUNCTION_STATICMETHOD; + int is_classmethod = binding_func->func.flags & __Pyx_CYFUNCTION_CLASSMETHOD; + + if (binding_func->self) { + // Bound method call, put 'self' in the args tuple + Py_ssize_t i; + new_args = PyTuple_New(argc + 1); + if (!new_args) + return NULL; + + self = binding_func->self; +#if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) + Py_INCREF(self); +#endif + Py_INCREF(self); + PyTuple_SET_ITEM(new_args, 0, self); + + for (i = 0; i < argc; i++) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + PyObject *item = PyTuple_GET_ITEM(args, i); + Py_INCREF(item); +#else + PyObject *item = PySequence_ITEM(args, i); if (unlikely(!item)) goto bad; +#endif + PyTuple_SET_ITEM(new_args, i + 1, item); + } + + args = new_args; + } else if (binding_func->type) { + // Unbound method call + if (argc < 1) { + PyErr_SetString(PyExc_TypeError, "Need at least one argument, 0 given."); + return NULL; + } +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + self = PyTuple_GET_ITEM(args, 0); +#else + self = PySequence_ITEM(args, 0); if (unlikely(!self)) return NULL; +#endif + } + + if (self && !is_classmethod && !is_staticmethod) { + int is_instance = PyObject_IsInstance(self, binding_func->type); + if (unlikely(!is_instance)) { + PyErr_Format(PyExc_TypeError, + "First argument should be of type %.200s, got %.200s.", + ((PyTypeObject *) binding_func->type)->tp_name, + self->ob_type->tp_name); + goto bad; + } else if (unlikely(is_instance == -1)) { + goto bad; + } + } +#if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) + Py_XDECREF(self); + self = NULL; +#endif + + if (binding_func->__signatures__) { + PyObject *tup; + if (is_staticmethod && binding_func->func.flags & __Pyx_CYFUNCTION_CCLASS) { + // FIXME: this seems wrong, but we must currently pass the signatures dict as 'self' argument + tup = PyTuple_Pack(3, args, + kw == NULL ? Py_None : kw, + binding_func->func.defaults_tuple); + if (unlikely(!tup)) goto bad; + new_func = (__pyx_FusedFunctionObject *) __Pyx_CyFunction_CallMethod( + func, binding_func->__signatures__, tup, NULL); + } else { + tup = PyTuple_Pack(4, binding_func->__signatures__, args, + kw == NULL ? Py_None : kw, + binding_func->func.defaults_tuple); + if (unlikely(!tup)) goto bad; + new_func = (__pyx_FusedFunctionObject *) __pyx_FusedFunction_callfunction(func, tup, NULL); + } + Py_DECREF(tup); + + if (unlikely(!new_func)) + goto bad; + + Py_XINCREF(binding_func->func.func_classobj); + Py_CLEAR(new_func->func.func_classobj); + new_func->func.func_classobj = binding_func->func.func_classobj; + + func = (PyObject *) new_func; + } + + result = __pyx_FusedFunction_callfunction(func, args, kw); +bad: +#if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) + Py_XDECREF(self); +#endif + Py_XDECREF(new_args); + Py_XDECREF((PyObject *) new_func); + return result; +} + +static PyMemberDef __pyx_FusedFunction_members[] = { + {(char *) "__signatures__", + T_OBJECT, + offsetof(__pyx_FusedFunctionObject, __signatures__), + READONLY, + 0}, + {0, 0, 0, 0, 0}, +}; + +static PyMappingMethods __pyx_FusedFunction_mapping_methods = { + 0, + (binaryfunc) __pyx_FusedFunction_getitem, + 0, +}; + +static PyTypeObject __pyx_FusedFunctionType_type = { + PyVarObject_HEAD_INIT(0, 0) + "fused_cython_function", /*tp_name*/ + sizeof(__pyx_FusedFunctionObject), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + (destructor) __pyx_FusedFunction_dealloc, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ +#if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ +#else + 0, /*reserved*/ +#endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + &__pyx_FusedFunction_mapping_methods, /*tp_as_mapping*/ + 0, /*tp_hash*/ + (ternaryfunc) __pyx_FusedFunction_call, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, /*tp_flags*/ + 0, /*tp_doc*/ + (traverseproc) __pyx_FusedFunction_traverse, /*tp_traverse*/ + (inquiry) __pyx_FusedFunction_clear,/*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + 0, /*tp_methods*/ + __pyx_FusedFunction_members, /*tp_members*/ + // __doc__ is None for the fused function type, but we need it to be + // a descriptor for the instance's __doc__, so rebuild descriptors in our subclass + __pyx_CyFunction_getsets, /*tp_getset*/ + // NOTE: tp_base may be changed later during module initialisation when importing CyFunction across modules. + &__pyx_CyFunctionType_type, /*tp_base*/ + 0, /*tp_dict*/ + __pyx_FusedFunction_descr_get, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + 0, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ +#if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ +#endif +#if PY_VERSION_HEX >= 0x030800b1 + 0, /*tp_vectorcall*/ +#endif +#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ +#endif +}; + +static int __pyx_FusedFunction_init(void) { + // Set base from __Pyx_FetchCommonTypeFromSpec, in case it's different from the local static value. + __pyx_FusedFunctionType_type.tp_base = __pyx_CyFunctionType; + __pyx_FusedFunctionType = __Pyx_FetchCommonType(&__pyx_FusedFunctionType_type); + if (__pyx_FusedFunctionType == NULL) { + return -1; + } + return 0; +} + +//////////////////// ClassMethod.proto //////////////////// + +#include "descrobject.h" +static CYTHON_UNUSED PyObject* __Pyx_Method_ClassMethod(PyObject *method); /*proto*/ + +//////////////////// ClassMethod //////////////////// + +static PyObject* __Pyx_Method_ClassMethod(PyObject *method) { +#if CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM <= 0x05080000 + if (PyObject_TypeCheck(method, &PyWrapperDescr_Type)) { + // cdef classes + return PyClassMethod_New(method); + } +#else +#if CYTHON_COMPILING_IN_PYSTON || CYTHON_COMPILING_IN_PYPY + // special C-API function only in Pyston and PyPy >= 5.9 + if (PyMethodDescr_Check(method)) +#else + // It appears that PyMethodDescr_Type is not exposed anywhere in the CPython C-API + static PyTypeObject *methoddescr_type = NULL; + if (methoddescr_type == NULL) { + PyObject *meth = PyObject_GetAttrString((PyObject*)&PyList_Type, "append"); + if (!meth) return NULL; + methoddescr_type = Py_TYPE(meth); + Py_DECREF(meth); + } + if (__Pyx_TypeCheck(method, methoddescr_type)) +#endif + { + // cdef classes + PyMethodDescrObject *descr = (PyMethodDescrObject *)method; + #if PY_VERSION_HEX < 0x03020000 + PyTypeObject *d_type = descr->d_type; + #else + PyTypeObject *d_type = descr->d_common.d_type; + #endif + return PyDescr_NewClassMethod(d_type, descr->d_method); + } +#endif + else if (PyMethod_Check(method)) { + // python classes + return PyClassMethod_New(PyMethod_GET_FUNCTION(method)); + } + else if (PyCFunction_Check(method)) { + return PyClassMethod_New(method); + } +#ifdef __Pyx_CyFunction_USED + else if (__Pyx_CyFunction_Check(method)) { + return PyClassMethod_New(method); + } +#endif + PyErr_SetString(PyExc_TypeError, + "Class-level classmethod() can only be called on " + "a method_descriptor or instance method."); + return NULL; +} diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/Embed.c b/venv/lib/python3.8/site-packages/Cython/Utility/Embed.c new file mode 100644 index 0000000..8b8c6c8 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/Embed.c @@ -0,0 +1,210 @@ +//////////////////// MainFunction //////////////////// + +#ifdef __FreeBSD__ +#include +#endif + +#if PY_MAJOR_VERSION < 3 +int %(main_method)s(int argc, char** argv) { +#elif defined(WIN32) || defined(MS_WINDOWS) +int %(wmain_method)s(int argc, wchar_t **argv) { +#else +static int __Pyx_main(int argc, wchar_t **argv) { +#endif + /* 754 requires that FP exceptions run in "no stop" mode by default, + * and until C vendors implement C99's ways to control FP exceptions, + * Python requires non-stop mode. Alas, some platforms enable FP + * exceptions by default. Here we disable them. + */ +#ifdef __FreeBSD__ + fp_except_t m; + + m = fpgetmask(); + fpsetmask(m & ~FP_X_OFL); +#endif + if (argc && argv) + Py_SetProgramName(argv[0]); + Py_Initialize(); + if (argc && argv) + PySys_SetArgv(argc, argv); + { /* init module '%(module_name)s' as '__main__' */ + PyObject* m = NULL; + %(module_is_main)s = 1; + #if PY_MAJOR_VERSION < 3 + init%(module_name)s(); + #elif CYTHON_PEP489_MULTI_PHASE_INIT + m = PyInit_%(module_name)s(); + if (!PyModule_Check(m)) { + PyModuleDef *mdef = (PyModuleDef *) m; + PyObject *modname = PyUnicode_FromString("__main__"); + m = NULL; + if (modname) { + // FIXME: not currently calling PyModule_FromDefAndSpec() here because we do not have a module spec! + // FIXME: not currently setting __file__, __path__, __spec__, ... + m = PyModule_NewObject(modname); + Py_DECREF(modname); + if (m) PyModule_ExecDef(m, mdef); + } + } + #else + m = PyInit_%(module_name)s(); + #endif + if (PyErr_Occurred()) { + PyErr_Print(); /* This exits with the right code if SystemExit. */ + #if PY_MAJOR_VERSION < 3 + if (Py_FlushLine()) PyErr_Clear(); + #endif + return 1; + } + Py_XDECREF(m); + } + Py_Finalize(); + return 0; +} + + +#if PY_MAJOR_VERSION >= 3 && !defined(WIN32) && !defined(MS_WINDOWS) +#include + +static wchar_t* +__Pyx_char2wchar(char* arg) +{ + wchar_t *res; +#ifdef HAVE_BROKEN_MBSTOWCS + /* Some platforms have a broken implementation of + * mbstowcs which does not count the characters that + * would result from conversion. Use an upper bound. + */ + size_t argsize = strlen(arg); +#else + size_t argsize = mbstowcs(NULL, arg, 0); +#endif + size_t count; + unsigned char *in; + wchar_t *out; +#ifdef HAVE_MBRTOWC + mbstate_t mbs; +#endif + if (argsize != (size_t)-1) { + res = (wchar_t *)malloc((argsize+1)*sizeof(wchar_t)); + if (!res) + goto oom; + count = mbstowcs(res, arg, argsize+1); + if (count != (size_t)-1) { + wchar_t *tmp; + /* Only use the result if it contains no + surrogate characters. */ + for (tmp = res; *tmp != 0 && + (*tmp < 0xd800 || *tmp > 0xdfff); tmp++) + ; + if (*tmp == 0) + return res; + } + free(res); + } + /* Conversion failed. Fall back to escaping with surrogateescape. */ +#ifdef HAVE_MBRTOWC + /* Try conversion with mbrtwoc (C99), and escape non-decodable bytes. */ + + /* Overallocate; as multi-byte characters are in the argument, the + actual output could use less memory. */ + argsize = strlen(arg) + 1; + res = (wchar_t *)malloc(argsize*sizeof(wchar_t)); + if (!res) goto oom; + in = (unsigned char*)arg; + out = res; + memset(&mbs, 0, sizeof mbs); + while (argsize) { + size_t converted = mbrtowc(out, (char*)in, argsize, &mbs); + if (converted == 0) + /* Reached end of string; null char stored. */ + break; + if (converted == (size_t)-2) { + /* Incomplete character. This should never happen, + since we provide everything that we have - + unless there is a bug in the C library, or I + misunderstood how mbrtowc works. */ + fprintf(stderr, "unexpected mbrtowc result -2\\n"); + free(res); + return NULL; + } + if (converted == (size_t)-1) { + /* Conversion error. Escape as UTF-8b, and start over + in the initial shift state. */ + *out++ = 0xdc00 + *in++; + argsize--; + memset(&mbs, 0, sizeof mbs); + continue; + } + if (*out >= 0xd800 && *out <= 0xdfff) { + /* Surrogate character. Escape the original + byte sequence with surrogateescape. */ + argsize -= converted; + while (converted--) + *out++ = 0xdc00 + *in++; + continue; + } + /* successfully converted some bytes */ + in += converted; + argsize -= converted; + out++; + } +#else + /* Cannot use C locale for escaping; manually escape as if charset + is ASCII (i.e. escape all bytes > 128. This will still roundtrip + correctly in the locale's charset, which must be an ASCII superset. */ + res = (wchar_t *)malloc((strlen(arg)+1)*sizeof(wchar_t)); + if (!res) goto oom; + in = (unsigned char*)arg; + out = res; + while(*in) + if(*in < 128) + *out++ = *in++; + else + *out++ = 0xdc00 + *in++; + *out = 0; +#endif + return res; +oom: + fprintf(stderr, "out of memory\\n"); + return NULL; +} + +int +%(main_method)s(int argc, char **argv) +{ + if (!argc) { + return __Pyx_main(0, NULL); + } + else { + int i, res; + wchar_t **argv_copy = (wchar_t **)malloc(sizeof(wchar_t*)*argc); + /* We need a second copy, as Python might modify the first one. */ + wchar_t **argv_copy2 = (wchar_t **)malloc(sizeof(wchar_t*)*argc); + char *oldloc = strdup(setlocale(LC_ALL, NULL)); + if (!argv_copy || !argv_copy2 || !oldloc) { + fprintf(stderr, "out of memory\\n"); + free(argv_copy); + free(argv_copy2); + free(oldloc); + return 1; + } + res = 0; + setlocale(LC_ALL, ""); + for (i = 0; i < argc; i++) { + argv_copy2[i] = argv_copy[i] = __Pyx_char2wchar(argv[i]); + if (!argv_copy[i]) res = 1; /* failure, but continue to simplify cleanup */ + } + setlocale(LC_ALL, oldloc); + free(oldloc); + if (res == 0) + res = __Pyx_main(argc, argv_copy); + for (i = 0; i < argc; i++) { + free(argv_copy2[i]); + } + free(argv_copy); + free(argv_copy2); + return res; + } +} +#endif diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/Exceptions.c b/venv/lib/python3.8/site-packages/Cython/Utility/Exceptions.c new file mode 100644 index 0000000..ebbf65e --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/Exceptions.c @@ -0,0 +1,792 @@ +// Exception raising code +// +// Exceptions are raised by __Pyx_Raise() and stored as plain +// type/value/tb in PyThreadState->curexc_*. When being caught by an +// 'except' statement, curexc_* is moved over to exc_* by +// __Pyx_GetException() + + +/////////////// PyThreadStateGet.proto /////////////// +//@substitute: naming + +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *$local_tstate_cname; +#define __Pyx_PyThreadState_assign $local_tstate_cname = __Pyx_PyThreadState_Current; +#define __Pyx_PyErr_Occurred() $local_tstate_cname->curexc_type +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#define __Pyx_PyErr_Occurred() PyErr_Occurred() +#endif + + +/////////////// PyErrExceptionMatches.proto /////////////// +//@substitute: naming + +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState($local_tstate_cname, err) +static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); +#else +#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) +#endif + +/////////////// PyErrExceptionMatches /////////////// + +#if CYTHON_FAST_THREAD_STATE +static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + // the tighter subtype checking in Py3 allows faster out-of-order comparison + for (i=0; icurexc_type; + if (exc_type == err) return 1; + if (unlikely(!exc_type)) return 0; + if (unlikely(PyTuple_Check(err))) + return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); + return __Pyx_PyErr_GivenExceptionMatches(exc_type, err); +} +#endif + +/////////////// PyErrFetchRestore.proto /////////////// +//@substitute: naming +//@requires: PyThreadStateGet + +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState($local_tstate_cname, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState($local_tstate_cname, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); /*proto*/ +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); /*proto*/ + +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) +#else +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#endif + +#else +#define __Pyx_PyErr_Clear() PyErr_Clear() +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif + +/////////////// PyErrFetchRestore /////////////// + +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} + +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +} +#endif + +/////////////// RaiseException.proto /////////////// + +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); /*proto*/ + +/////////////// RaiseException /////////////// +//@requires: PyErrFetchRestore +//@requires: PyThreadStateGet + +// The following function is based on do_raise() from ceval.c. There +// are separate versions for Python2 and Python3 as exception handling +// has changed quite a lot between the two versions. + +#if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, + CYTHON_UNUSED PyObject *cause) { + __Pyx_PyThreadState_declare + /* 'cause' is only used in Py3 */ + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + + if (PyType_Check(type)) { + /* instantiate the type now (we don't know when and how it will be caught) */ +#if CYTHON_COMPILING_IN_PYPY + /* PyPy can't handle value == NULL */ + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + + } else { + /* Raising an instance. The value should be a dummy. */ + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + /* Normalize to raise , */ + value = type; + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + } + + __Pyx_PyThreadState_assign + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} + +#else /* Python 3+ */ + +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + // make sure value is an exception instance of type + PyObject *instance_class = NULL; + if (value && PyExceptionInstance_Check(value)) { + instance_class = (PyObject*) Py_TYPE(value); + if (instance_class != type) { + int is_subclass = PyObject_IsSubclass(instance_class, type); + if (!is_subclass) { + instance_class = NULL; + } else if (unlikely(is_subclass == -1)) { + // error on subclass test + goto bad; + } else { + // believe the instance + type = instance_class; + } + } + } + if (!instance_class) { + // instantiate the type now (we don't know when and how it will be caught) + // assuming that 'value' is an argument to the type's constructor + // not using PyErr_NormalizeException() to avoid ref-counting problems + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyObject_Call(type, args, NULL); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } + + if (cause) { + PyObject *fixed_cause; + if (cause == Py_None) { + // raise ... from None + fixed_cause = NULL; + } else if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + + PyErr_SetObject(type, value); + + if (tb) { +#if CYTHON_COMPILING_IN_PYPY + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); + Py_INCREF(tb); + PyErr_Restore(tmp_type, tmp_value, tb); + Py_XDECREF(tmp_tb); +#else + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } +#endif + } + +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + + +/////////////// GetTopmostException.proto /////////////// + +#if CYTHON_USE_EXC_INFO_STACK +static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate); +#endif + +/////////////// GetTopmostException /////////////// + +#if CYTHON_USE_EXC_INFO_STACK +// Copied from errors.c in CPython. +static _PyErr_StackItem * +__Pyx_PyErr_GetTopmostException(PyThreadState *tstate) +{ + _PyErr_StackItem *exc_info = tstate->exc_info; + while ((exc_info->exc_type == NULL || exc_info->exc_type == Py_None) && + exc_info->previous_item != NULL) + { + exc_info = exc_info->previous_item; + } + return exc_info; +} +#endif + + +/////////////// GetException.proto /////////////// +//@substitute: naming +//@requires: PyThreadStateGet + +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_GetException(type, value, tb) __Pyx__GetException($local_tstate_cname, type, value, tb) +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); /*proto*/ +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); /*proto*/ +#endif + +/////////////// GetException /////////////// + +#if CYTHON_FAST_THREAD_STATE +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) +#endif +{ + PyObject *local_type, *local_value, *local_tb; +#if CYTHON_FAST_THREAD_STATE + PyObject *tmp_type, *tmp_value, *tmp_tb; + local_type = tstate->curexc_type; + local_value = tstate->curexc_value; + local_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +#else + PyErr_Fetch(&local_type, &local_value, &local_tb); +#endif + PyErr_NormalizeException(&local_type, &local_value, &local_tb); +#if CYTHON_FAST_THREAD_STATE + if (unlikely(tstate->curexc_type)) +#else + if (unlikely(PyErr_Occurred())) +#endif + goto bad; + #if PY_MAJOR_VERSION >= 3 + if (local_tb) { + if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) + goto bad; + } + #endif + // traceback may be NULL for freshly raised exceptions + Py_XINCREF(local_tb); + // exception state may be temporarily empty in parallel loops (race condition) + Py_XINCREF(local_type); + Py_XINCREF(local_value); + *type = local_type; + *value = local_value; + *tb = local_tb; +#if CYTHON_FAST_THREAD_STATE + #if CYTHON_USE_EXC_INFO_STACK + { + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = local_type; + exc_info->exc_value = local_value; + exc_info->exc_traceback = local_tb; + } + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = local_type; + tstate->exc_value = local_value; + tstate->exc_traceback = local_tb; + #endif + // Make sure tstate is in a consistent state when we XDECREF + // these objects (DECREF may run arbitrary code). + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#else + PyErr_SetExcInfo(local_type, local_value, local_tb); +#endif + return 0; +bad: + *type = 0; + *value = 0; + *tb = 0; + Py_XDECREF(local_type); + Py_XDECREF(local_value); + Py_XDECREF(local_tb); + return -1; +} + +/////////////// ReRaiseException.proto /////////////// + +static CYTHON_INLINE void __Pyx_ReraiseException(void); /*proto*/ + +/////////////// ReRaiseException /////////////// +//@requires: GetTopmostException + +static CYTHON_INLINE void __Pyx_ReraiseException(void) { + PyObject *type = NULL, *value = NULL, *tb = NULL; +#if CYTHON_FAST_THREAD_STATE + PyThreadState *tstate = PyThreadState_GET(); + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); + type = exc_info->exc_type; + value = exc_info->exc_value; + tb = exc_info->exc_traceback; + #else + type = tstate->exc_type; + value = tstate->exc_value; + tb = tstate->exc_traceback; + #endif +#else + PyErr_GetExcInfo(&type, &value, &tb); +#endif + if (!type || type == Py_None) { +#if !CYTHON_FAST_THREAD_STATE + Py_XDECREF(type); + Py_XDECREF(value); + Py_XDECREF(tb); +#endif + // message copied from Py3 + PyErr_SetString(PyExc_RuntimeError, + "No active exception to reraise"); + } else { +#if CYTHON_FAST_THREAD_STATE + Py_INCREF(type); + Py_XINCREF(value); + Py_XINCREF(tb); + +#endif + PyErr_Restore(type, value, tb); + } +} + +/////////////// SaveResetException.proto /////////////// +//@substitute: naming +//@requires: PyThreadStateGet + +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave($local_tstate_cname, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); /*proto*/ +#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset($local_tstate_cname, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); /*proto*/ + +#else + +#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) +#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) +#endif + +/////////////// SaveResetException /////////////// +//@requires: GetTopmostException + +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); + *type = exc_info->exc_type; + *value = exc_info->exc_value; + *tb = exc_info->exc_traceback; + #else + *type = tstate->exc_type; + *value = tstate->exc_value; + *tb = tstate->exc_traceback; + #endif + Py_XINCREF(*type); + Py_XINCREF(*value); + Py_XINCREF(*tb); +} + +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = type; + exc_info->exc_value = value; + exc_info->exc_traceback = tb; + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = type; + tstate->exc_value = value; + tstate->exc_traceback = tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +#endif + +/////////////// SwapException.proto /////////////// +//@substitute: naming +//@requires: PyThreadStateGet + +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ExceptionSwap(type, value, tb) __Pyx__ExceptionSwap($local_tstate_cname, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); /*proto*/ +#else +static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb); /*proto*/ +#endif + +/////////////// SwapException /////////////// + +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + + exc_info->exc_type = *type; + exc_info->exc_value = *value; + exc_info->exc_traceback = *tb; + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + + tstate->exc_type = *type; + tstate->exc_value = *value; + tstate->exc_traceback = *tb; + #endif + + *type = tmp_type; + *value = tmp_value; + *tb = tmp_tb; +} + +#else + +static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_GetExcInfo(&tmp_type, &tmp_value, &tmp_tb); + PyErr_SetExcInfo(*type, *value, *tb); + *type = tmp_type; + *value = tmp_value; + *tb = tmp_tb; +} +#endif + +/////////////// WriteUnraisableException.proto /////////////// + +static void __Pyx_WriteUnraisable(const char *name, int clineno, + int lineno, const char *filename, + int full_traceback, int nogil); /*proto*/ + +/////////////// WriteUnraisableException /////////////// +//@requires: PyErrFetchRestore +//@requires: PyThreadStateGet + +static void __Pyx_WriteUnraisable(const char *name, CYTHON_UNUSED int clineno, + CYTHON_UNUSED int lineno, CYTHON_UNUSED const char *filename, + int full_traceback, CYTHON_UNUSED int nogil) { + PyObject *old_exc, *old_val, *old_tb; + PyObject *ctx; + __Pyx_PyThreadState_declare +#ifdef WITH_THREAD + PyGILState_STATE state; + if (nogil) + state = PyGILState_Ensure(); +#ifdef _MSC_VER + /* arbitrary, to suppress warning */ + else state = (PyGILState_STATE)-1; +#endif +#endif + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&old_exc, &old_val, &old_tb); + if (full_traceback) { + Py_XINCREF(old_exc); + Py_XINCREF(old_val); + Py_XINCREF(old_tb); + __Pyx_ErrRestore(old_exc, old_val, old_tb); + PyErr_PrintEx(1); + } + #if PY_MAJOR_VERSION < 3 + ctx = PyString_FromString(name); + #else + ctx = PyUnicode_FromString(name); + #endif + __Pyx_ErrRestore(old_exc, old_val, old_tb); + if (!ctx) { + PyErr_WriteUnraisable(Py_None); + } else { + PyErr_WriteUnraisable(ctx); + Py_DECREF(ctx); + } +#ifdef WITH_THREAD + if (nogil) + PyGILState_Release(state); +#endif +} + +/////////////// CLineInTraceback.proto /////////////// + +#ifdef CYTHON_CLINE_IN_TRACEBACK /* 0 or 1 to disable/enable C line display in tracebacks at C compile time */ +#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) +#else +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line);/*proto*/ +#endif + +/////////////// CLineInTraceback /////////////// +//@requires: ObjectHandling.c::PyObjectGetAttrStr +//@requires: ObjectHandling.c::PyDictVersioning +//@requires: PyErrFetchRestore +//@substitute: naming + +#ifndef CYTHON_CLINE_IN_TRACEBACK +static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) { + PyObject *use_cline; + PyObject *ptype, *pvalue, *ptraceback; +#if CYTHON_COMPILING_IN_CPYTHON + PyObject **cython_runtime_dict; +#endif + + if (unlikely(!${cython_runtime_cname})) { + // Very early error where the runtime module is not set up yet. + return c_line; + } + + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); + +#if CYTHON_COMPILING_IN_CPYTHON + cython_runtime_dict = _PyObject_GetDictPtr(${cython_runtime_cname}); + if (likely(cython_runtime_dict)) { + __PYX_PY_DICT_LOOKUP_IF_MODIFIED( + use_cline, *cython_runtime_dict, + __Pyx_PyDict_GetItemStr(*cython_runtime_dict, PYIDENT("cline_in_traceback"))) + } else +#endif + { + PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(${cython_runtime_cname}, PYIDENT("cline_in_traceback")); + if (use_cline_obj) { + use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; + Py_DECREF(use_cline_obj); + } else { + PyErr_Clear(); + use_cline = NULL; + } + } + if (!use_cline) { + c_line = 0; + PyObject_SetAttr(${cython_runtime_cname}, PYIDENT("cline_in_traceback"), Py_False); + } + else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { + c_line = 0; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + return c_line; +} +#endif + +/////////////// AddTraceback.proto /////////////// + +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); /*proto*/ + +/////////////// AddTraceback /////////////// +//@requires: ModuleSetupCode.c::CodeObjectCache +//@requires: CLineInTraceback +//@substitute: naming + +#include "compile.h" +#include "frameobject.h" +#include "traceback.h" + +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyObject *py_srcfile = 0; + PyObject *py_funcname = 0; + + #if PY_MAJOR_VERSION < 3 + py_srcfile = PyString_FromString(filename); + #else + py_srcfile = PyUnicode_FromString(filename); + #endif + if (!py_srcfile) goto bad; + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, $cfilenm_cname, c_line); + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, $cfilenm_cname, c_line); + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + #else + py_funcname = PyUnicode_FromString(funcname); + #endif + } + if (!py_funcname) goto bad; + py_code = __Pyx_PyCode_New( + 0, /*int argcount,*/ + 0, /*int kwonlyargcount,*/ + 0, /*int nlocals,*/ + 0, /*int stacksize,*/ + 0, /*int flags,*/ + $empty_bytes, /*PyObject *code,*/ + $empty_tuple, /*PyObject *consts,*/ + $empty_tuple, /*PyObject *names,*/ + $empty_tuple, /*PyObject *varnames,*/ + $empty_tuple, /*PyObject *freevars,*/ + $empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, /*int firstlineno,*/ + $empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + Py_DECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_srcfile); + Py_XDECREF(py_funcname); + return NULL; +} + +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + + if (c_line) { + c_line = __Pyx_CLineForTraceback(tstate, c_line); + } + + // Negate to avoid collisions between py and c lines. + py_code = $global_code_object_cache_find(c_line ? -c_line : py_line); + if (!py_code) { + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) goto bad; + $global_code_object_cache_insert(c_line ? -c_line : py_line, py_code); + } + py_frame = PyFrame_New( + tstate, /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + $moddict_cname, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/ExtensionTypes.c b/venv/lib/python3.8/site-packages/Cython/Utility/ExtensionTypes.c new file mode 100644 index 0000000..1b39c9e --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/ExtensionTypes.c @@ -0,0 +1,280 @@ +/////////////// PyType_Ready.proto /////////////// + +static int __Pyx_PyType_Ready(PyTypeObject *t); + +/////////////// PyType_Ready /////////////// + +// Wrapper around PyType_Ready() with some runtime checks and fixes +// to deal with multiple inheritance. +static int __Pyx_PyType_Ready(PyTypeObject *t) { + // Loop over all bases (except the first) and check that those + // really are heap types. Otherwise, it would not be safe to + // subclass them. + // + // We also check tp_dictoffset: it is unsafe to inherit + // tp_dictoffset from a base class because the object structures + // would not be compatible. So, if our extension type doesn't set + // tp_dictoffset (i.e. there is no __dict__ attribute in the object + // structure), we need to check that none of the base classes sets + // it either. + int r; + PyObject *bases = t->tp_bases; + if (bases) + { + Py_ssize_t i, n = PyTuple_GET_SIZE(bases); + for (i = 1; i < n; i++) /* Skip first base */ + { + PyObject *b0 = PyTuple_GET_ITEM(bases, i); + PyTypeObject *b; +#if PY_MAJOR_VERSION < 3 + /* Disallow old-style classes */ + if (PyClass_Check(b0)) + { + PyErr_Format(PyExc_TypeError, "base class '%.200s' is an old-style class", + PyString_AS_STRING(((PyClassObject*)b0)->cl_name)); + return -1; + } +#endif + b = (PyTypeObject*)b0; + if (!PyType_HasFeature(b, Py_TPFLAGS_HEAPTYPE)) + { + PyErr_Format(PyExc_TypeError, "base class '%.200s' is not a heap type", + b->tp_name); + return -1; + } + if (t->tp_dictoffset == 0 && b->tp_dictoffset) + { + PyErr_Format(PyExc_TypeError, + "extension type '%.200s' has no __dict__ slot, but base type '%.200s' has: " + "either add 'cdef dict __dict__' to the extension type " + "or add '__slots__ = [...]' to the base type", + t->tp_name, b->tp_name); + return -1; + } + } + } + +#if PY_VERSION_HEX >= 0x03050000 + { + // Make sure GC does not pick up our non-heap type as heap type with this hack! + // For details, see https://github.com/cython/cython/issues/3603 + PyObject *ret, *py_status; + int gc_was_enabled; + PyObject *gc = PyImport_Import(PYUNICODE("gc")); + if (unlikely(!gc)) return -1; + py_status = PyObject_CallMethodObjArgs(gc, PYUNICODE("isenabled"), NULL); + if (unlikely(!py_status)) { + Py_DECREF(gc); + return -1; + } + gc_was_enabled = __Pyx_PyObject_IsTrue(py_status); + Py_DECREF(py_status); + if (gc_was_enabled > 0) { + ret = PyObject_CallMethodObjArgs(gc, PYUNICODE("disable"), NULL); + if (unlikely(!ret)) { + Py_DECREF(gc); + return -1; + } + Py_DECREF(ret); + } else if (unlikely(gc_was_enabled == -1)) { + Py_DECREF(gc); + return -1; + } + + // As of https://bugs.python.org/issue22079 + // PyType_Ready enforces that all bases of a non-heap type are + // non-heap. We know that this is the case for the solid base but + // other bases are heap allocated and are kept alive through the + // tp_bases reference. + // Other than this check, the Py_TPFLAGS_HEAPTYPE flag is unused + // in PyType_Ready(). + t->tp_flags |= Py_TPFLAGS_HEAPTYPE; +#endif + + r = PyType_Ready(t); + +#if PY_VERSION_HEX >= 0x03050000 + t->tp_flags &= ~Py_TPFLAGS_HEAPTYPE; + + if (gc_was_enabled) { + PyObject *t, *v, *tb; + PyErr_Fetch(&t, &v, &tb); + ret = PyObject_CallMethodObjArgs(gc, PYUNICODE("enable"), NULL); + if (likely(ret || r == -1)) { + Py_XDECREF(ret); + // do not overwrite exceptions raised by PyType_Ready() above + PyErr_Restore(t, v, tb); + } else { + // PyType_Ready() succeeded, but gc.enable() failed. + Py_XDECREF(t); + Py_XDECREF(v); + Py_XDECREF(tb); + r = -1; + } + } + Py_DECREF(gc); + } +#endif + + return r; +} + +/////////////// CallNextTpDealloc.proto /////////////// + +static void __Pyx_call_next_tp_dealloc(PyObject* obj, destructor current_tp_dealloc); + +/////////////// CallNextTpDealloc /////////////// + +static void __Pyx_call_next_tp_dealloc(PyObject* obj, destructor current_tp_dealloc) { + PyTypeObject* type = Py_TYPE(obj); + /* try to find the first parent type that has a different tp_dealloc() function */ + while (type && type->tp_dealloc != current_tp_dealloc) + type = type->tp_base; + while (type && type->tp_dealloc == current_tp_dealloc) + type = type->tp_base; + if (type) + type->tp_dealloc(obj); +} + +/////////////// CallNextTpTraverse.proto /////////////// + +static int __Pyx_call_next_tp_traverse(PyObject* obj, visitproc v, void *a, traverseproc current_tp_traverse); + +/////////////// CallNextTpTraverse /////////////// + +static int __Pyx_call_next_tp_traverse(PyObject* obj, visitproc v, void *a, traverseproc current_tp_traverse) { + PyTypeObject* type = Py_TYPE(obj); + /* try to find the first parent type that has a different tp_traverse() function */ + while (type && type->tp_traverse != current_tp_traverse) + type = type->tp_base; + while (type && type->tp_traverse == current_tp_traverse) + type = type->tp_base; + if (type && type->tp_traverse) + return type->tp_traverse(obj, v, a); + // FIXME: really ignore? + return 0; +} + +/////////////// CallNextTpClear.proto /////////////// + +static void __Pyx_call_next_tp_clear(PyObject* obj, inquiry current_tp_dealloc); + +/////////////// CallNextTpClear /////////////// + +static void __Pyx_call_next_tp_clear(PyObject* obj, inquiry current_tp_clear) { + PyTypeObject* type = Py_TYPE(obj); + /* try to find the first parent type that has a different tp_clear() function */ + while (type && type->tp_clear != current_tp_clear) + type = type->tp_base; + while (type && type->tp_clear == current_tp_clear) + type = type->tp_base; + if (type && type->tp_clear) + type->tp_clear(obj); +} + +/////////////// SetupReduce.proto /////////////// + +static int __Pyx_setup_reduce(PyObject* type_obj); + +/////////////// SetupReduce /////////////// +//@requires: ObjectHandling.c::PyObjectGetAttrStrNoError +//@requires: ObjectHandling.c::PyObjectGetAttrStr +//@substitute: naming + +static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { + int ret; + PyObject *name_attr; + + name_attr = __Pyx_PyObject_GetAttrStr(meth, PYIDENT("__name__")); + if (likely(name_attr)) { + ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); + } else { + ret = -1; + } + + if (unlikely(ret < 0)) { + PyErr_Clear(); + ret = 0; + } + + Py_XDECREF(name_attr); + return ret; +} + +static int __Pyx_setup_reduce(PyObject* type_obj) { + int ret = 0; + PyObject *object_reduce = NULL; + PyObject *object_reduce_ex = NULL; + PyObject *reduce = NULL; + PyObject *reduce_ex = NULL; + PyObject *reduce_cython = NULL; + PyObject *setstate = NULL; + PyObject *setstate_cython = NULL; + +#if CYTHON_USE_PYTYPE_LOOKUP + if (_PyType_Lookup((PyTypeObject*)type_obj, PYIDENT("__getstate__"))) goto __PYX_GOOD; +#else + if (PyObject_HasAttr(type_obj, PYIDENT("__getstate__"))) goto __PYX_GOOD; +#endif + +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, PYIDENT("__reduce_ex__")); if (!object_reduce_ex) goto __PYX_BAD; +#else + object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, PYIDENT("__reduce_ex__")); if (!object_reduce_ex) goto __PYX_BAD; +#endif + + reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, PYIDENT("__reduce_ex__")); if (unlikely(!reduce_ex)) goto __PYX_BAD; + if (reduce_ex == object_reduce_ex) { + +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce = _PyType_Lookup(&PyBaseObject_Type, PYIDENT("__reduce__")); if (!object_reduce) goto __PYX_BAD; +#else + object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, PYIDENT("__reduce__")); if (!object_reduce) goto __PYX_BAD; +#endif + reduce = __Pyx_PyObject_GetAttrStr(type_obj, PYIDENT("__reduce__")); if (unlikely(!reduce)) goto __PYX_BAD; + + if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, PYIDENT("__reduce_cython__"))) { + reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, PYIDENT("__reduce_cython__")); + if (likely(reduce_cython)) { + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, PYIDENT("__reduce__"), reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, PYIDENT("__reduce_cython__")); if (unlikely(ret < 0)) goto __PYX_BAD; + } else if (reduce == object_reduce || PyErr_Occurred()) { + // Ignore if we're done, i.e. if 'reduce' already has the right name and the original is gone. + // Otherwise: error. + goto __PYX_BAD; + } + + setstate = __Pyx_PyObject_GetAttrStr(type_obj, PYIDENT("__setstate__")); + if (!setstate) PyErr_Clear(); + if (!setstate || __Pyx_setup_reduce_is_named(setstate, PYIDENT("__setstate_cython__"))) { + setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, PYIDENT("__setstate_cython__")); + if (likely(setstate_cython)) { + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, PYIDENT("__setstate__"), setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, PYIDENT("__setstate_cython__")); if (unlikely(ret < 0)) goto __PYX_BAD; + } else if (!setstate || PyErr_Occurred()) { + // Ignore if we're done, i.e. if 'setstate' already has the right name and the original is gone. + // Otherwise: error. + goto __PYX_BAD; + } + } + PyType_Modified((PyTypeObject*)type_obj); + } + } + goto __PYX_GOOD; + +__PYX_BAD: + if (!PyErr_Occurred()) + PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name); + ret = -1; +__PYX_GOOD: +#if !CYTHON_USE_PYTYPE_LOOKUP + Py_XDECREF(object_reduce); + Py_XDECREF(object_reduce_ex); +#endif + Py_XDECREF(reduce); + Py_XDECREF(reduce_ex); + Py_XDECREF(reduce_cython); + Py_XDECREF(setstate); + Py_XDECREF(setstate_cython); + return ret; +} diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/FunctionArguments.c b/venv/lib/python3.8/site-packages/Cython/Utility/FunctionArguments.c new file mode 100644 index 0000000..8333d93 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/FunctionArguments.c @@ -0,0 +1,352 @@ +//////////////////// ArgTypeTest.proto //////////////////// + + +#define __Pyx_ArgTypeTest(obj, type, none_allowed, name, exact) \ + ((likely((Py_TYPE(obj) == type) | (none_allowed && (obj == Py_None)))) ? 1 : \ + __Pyx__ArgTypeTest(obj, type, name, exact)) + +static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact); /*proto*/ + +//////////////////// ArgTypeTest //////////////////// + +static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact) +{ + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + else if (exact) { + #if PY_MAJOR_VERSION == 2 + if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1; + #endif + } + else { + if (likely(__Pyx_TypeCheck(obj, type))) return 1; + } + PyErr_Format(PyExc_TypeError, + "Argument '%.200s' has incorrect type (expected %.200s, got %.200s)", + name, type->tp_name, Py_TYPE(obj)->tp_name); + return 0; +} + +//////////////////// RaiseArgTupleInvalid.proto //////////////////// + +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); /*proto*/ + +//////////////////// RaiseArgTupleInvalid //////////////////// + +// __Pyx_RaiseArgtupleInvalid raises the correct exception when too +// many or too few positional arguments were found. This handles +// Py_ssize_t formatting correctly. + +static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + + +//////////////////// RaiseKeywordRequired.proto //////////////////// + +static void __Pyx_RaiseKeywordRequired(const char* func_name, PyObject* kw_name); /*proto*/ + +//////////////////// RaiseKeywordRequired //////////////////// + +static void __Pyx_RaiseKeywordRequired(const char* func_name, PyObject* kw_name) { + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() needs keyword-only argument %U", func_name, kw_name); + #else + "%s() needs keyword-only argument %s", func_name, + PyString_AS_STRING(kw_name)); + #endif +} + + +//////////////////// RaiseDoubleKeywords.proto //////////////////// + +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); /*proto*/ + +//////////////////// RaiseDoubleKeywords //////////////////// + +static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + + +//////////////////// RaiseMappingExpected.proto //////////////////// + +static void __Pyx_RaiseMappingExpectedError(PyObject* arg); /*proto*/ + +//////////////////// RaiseMappingExpected //////////////////// + +static void __Pyx_RaiseMappingExpectedError(PyObject* arg) { + PyErr_Format(PyExc_TypeError, "'%.200s' object is not a mapping", Py_TYPE(arg)->tp_name); +} + + +//////////////////// KeywordStringCheck.proto //////////////////// + +static int __Pyx_CheckKeywordStrings(PyObject *kwdict, const char* function_name, int kw_allowed); /*proto*/ + +//////////////////// KeywordStringCheck //////////////////// + +// __Pyx_CheckKeywordStrings raises an error if non-string keywords +// were passed to a function, or if any keywords were passed to a +// function that does not accept them. + +static int __Pyx_CheckKeywordStrings( + PyObject *kwdict, + const char* function_name, + int kw_allowed) +{ + PyObject* key = 0; + Py_ssize_t pos = 0; +#if CYTHON_COMPILING_IN_PYPY + /* PyPy appears to check keywords at call time, not at unpacking time => not much to do here */ + if (!kw_allowed && PyDict_Next(kwdict, &pos, &key, 0)) + goto invalid_keyword; + return 1; +#else + while (PyDict_Next(kwdict, &pos, &key, 0)) { + #if PY_MAJOR_VERSION < 3 + if (unlikely(!PyString_Check(key))) + #endif + if (unlikely(!PyUnicode_Check(key))) + goto invalid_keyword_type; + } + if ((!kw_allowed) && unlikely(key)) + goto invalid_keyword; + return 1; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + return 0; +#endif +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif + return 0; +} + + +//////////////////// ParseKeywords.proto //////////////////// + +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[], \ + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, \ + const char* function_name); /*proto*/ + +//////////////////// ParseKeywords //////////////////// +//@requires: RaiseDoubleKeywords + +// __Pyx_ParseOptionalKeywords copies the optional/unknown keyword +// arguments from the kwds dict into kwds2. If kwds2 is NULL, unknown +// keywords will raise an invalid keyword error. +// +// Three kinds of errors are checked: 1) non-string keywords, 2) +// unexpected keywords and 3) overlap with positional arguments. +// +// If num_posargs is greater 0, it denotes the number of positional +// arguments that were passed and that must therefore not appear +// amongst the keywords as well. +// +// This method does not check for required keyword arguments. + +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + + while (PyDict_Next(kwds, &pos, &key, &value)) { + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; + continue; + } + + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + // not found after positional args, check for duplicate + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = (**name == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : + #endif + // In Py2, we may need to convert the argument name from str to unicode for comparison. + PyUnicode_Compare(**name, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + // not found after positional args, check for duplicate + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : + #endif + // need to convert argument name from bytes to unicode for comparison + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + return -1; +} + + +//////////////////// MergeKeywords.proto //////////////////// + +static int __Pyx_MergeKeywords(PyObject *kwdict, PyObject *source_mapping); /*proto*/ + +//////////////////// MergeKeywords //////////////////// +//@requires: RaiseDoubleKeywords +//@requires: Optimize.c::dict_iter + +static int __Pyx_MergeKeywords(PyObject *kwdict, PyObject *source_mapping) { + PyObject *iter, *key = NULL, *value = NULL; + int source_is_dict, result; + Py_ssize_t orig_length, ppos = 0; + + iter = __Pyx_dict_iterator(source_mapping, 0, PYIDENT("items"), &orig_length, &source_is_dict); + if (unlikely(!iter)) { + // slow fallback: try converting to dict, then iterate + PyObject *args; + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; + PyErr_Clear(); + args = PyTuple_Pack(1, source_mapping); + if (likely(args)) { + PyObject *fallback = PyObject_Call((PyObject*)&PyDict_Type, args, NULL); + Py_DECREF(args); + if (likely(fallback)) { + iter = __Pyx_dict_iterator(fallback, 1, PYIDENT("items"), &orig_length, &source_is_dict); + Py_DECREF(fallback); + } + } + if (unlikely(!iter)) goto bad; + } + + while (1) { + result = __Pyx_dict_iter_next(iter, orig_length, &ppos, &key, &value, NULL, source_is_dict); + if (unlikely(result < 0)) goto bad; + if (!result) break; + + if (unlikely(PyDict_Contains(kwdict, key))) { + __Pyx_RaiseDoubleKeywordsError("function", key); + result = -1; + } else { + result = PyDict_SetItem(kwdict, key, value); + } + Py_DECREF(key); + Py_DECREF(value); + if (unlikely(result < 0)) goto bad; + } + Py_XDECREF(iter); + return 0; + +bad: + Py_XDECREF(iter); + return -1; +} diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/ImportExport.c b/venv/lib/python3.8/site-packages/Cython/Utility/ImportExport.c new file mode 100644 index 0000000..676bc4c --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/ImportExport.c @@ -0,0 +1,738 @@ +/////////////// PyIdentifierFromString.proto /////////////// + +#if !defined(__Pyx_PyIdentifier_FromString) +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyIdentifier_FromString(s) PyString_FromString(s) +#else + #define __Pyx_PyIdentifier_FromString(s) PyUnicode_FromString(s) +#endif +#endif + + +/////////////// Import.proto /////////////// + +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); /*proto*/ + +/////////////// Import /////////////// +//@requires: ObjectHandling.c::PyObjectGetAttrStr +//@substitute: naming + +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *empty_list = 0; + PyObject *module = 0; + PyObject *global_dict = 0; + PyObject *empty_dict = 0; + PyObject *list; + #if PY_MAJOR_VERSION < 3 + PyObject *py_import; + py_import = __Pyx_PyObject_GetAttrStr($builtins_cname, PYIDENT("__import__")); + if (!py_import) + goto bad; + #endif + if (from_list) + list = from_list; + else { + empty_list = PyList_New(0); + if (!empty_list) + goto bad; + list = empty_list; + } + global_dict = PyModule_GetDict($module_cname); + if (!global_dict) + goto bad; + empty_dict = PyDict_New(); + if (!empty_dict) + goto bad; + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + // Avoid C compiler warning if strchr() evaluates to false at compile time. + if ((1) && (strchr(__Pyx_MODULE_NAME, '.'))) { + /* try package relative import first */ + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, 1); + if (!module) { + if (!PyErr_ExceptionMatches(PyExc_ImportError)) + goto bad; + PyErr_Clear(); + } + } + level = 0; /* try absolute import on failure */ + } + #endif + if (!module) { + #if PY_MAJOR_VERSION < 3 + PyObject *py_level = PyInt_FromLong(level); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, (PyObject *)NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, level); + #endif + } + } +bad: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_import); + #endif + Py_XDECREF(empty_list); + Py_XDECREF(empty_dict); + return module; +} + + +/////////////// ImportFrom.proto /////////////// + +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); /*proto*/ + +/////////////// ImportFrom /////////////// +//@requires: ObjectHandling.c::PyObjectGetAttrStr + +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { + PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); + if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Format(PyExc_ImportError, + #if PY_MAJOR_VERSION < 3 + "cannot import name %.230s", PyString_AS_STRING(name)); + #else + "cannot import name %S", name); + #endif + } + return value; +} + + +/////////////// ImportStar /////////////// +//@substitute: naming + +/* import_all_from is an unexposed function from ceval.c */ + +static int +__Pyx_import_all_from(PyObject *locals, PyObject *v) +{ + PyObject *all = PyObject_GetAttrString(v, "__all__"); + PyObject *dict, *name, *value; + int skip_leading_underscores = 0; + int pos, err; + + if (all == NULL) { + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) + return -1; /* Unexpected error */ + PyErr_Clear(); + dict = PyObject_GetAttrString(v, "__dict__"); + if (dict == NULL) { + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) + return -1; + PyErr_SetString(PyExc_ImportError, + "from-import-* object has no __dict__ and no __all__"); + return -1; + } +#if PY_MAJOR_VERSION < 3 + all = PyObject_CallMethod(dict, (char *)"keys", NULL); +#else + all = PyMapping_Keys(dict); +#endif + Py_DECREF(dict); + if (all == NULL) + return -1; + skip_leading_underscores = 1; + } + + for (pos = 0, err = 0; ; pos++) { + name = PySequence_GetItem(all, pos); + if (name == NULL) { + if (!PyErr_ExceptionMatches(PyExc_IndexError)) + err = -1; + else + PyErr_Clear(); + break; + } + if (skip_leading_underscores && +#if PY_MAJOR_VERSION < 3 + PyString_Check(name) && + PyString_AS_STRING(name)[0] == '_') +#else + PyUnicode_Check(name) && + PyUnicode_AS_UNICODE(name)[0] == '_') +#endif + { + Py_DECREF(name); + continue; + } + value = PyObject_GetAttr(v, name); + if (value == NULL) + err = -1; + else if (PyDict_CheckExact(locals)) + err = PyDict_SetItem(locals, name, value); + else + err = PyObject_SetItem(locals, name, value); + Py_DECREF(name); + Py_XDECREF(value); + if (err != 0) + break; + } + Py_DECREF(all); + return err; +} + + +static int ${import_star}(PyObject* m) { + + int i; + int ret = -1; + char* s; + PyObject *locals = 0; + PyObject *list = 0; +#if PY_MAJOR_VERSION >= 3 + PyObject *utf8_name = 0; +#endif + PyObject *name; + PyObject *item; + + locals = PyDict_New(); if (!locals) goto bad; + if (__Pyx_import_all_from(locals, m) < 0) goto bad; + list = PyDict_Items(locals); if (!list) goto bad; + + for(i=0; i= 3 + utf8_name = PyUnicode_AsUTF8String(name); + if (!utf8_name) goto bad; + s = PyBytes_AS_STRING(utf8_name); + if (${import_star_set}(item, name, s) < 0) goto bad; + Py_DECREF(utf8_name); utf8_name = 0; +#else + s = PyString_AsString(name); + if (!s) goto bad; + if (${import_star_set}(item, name, s) < 0) goto bad; +#endif + } + ret = 0; + +bad: + Py_XDECREF(locals); + Py_XDECREF(list); +#if PY_MAJOR_VERSION >= 3 + Py_XDECREF(utf8_name); +#endif + return ret; +} + + +/////////////// SetPackagePathFromImportLib.proto /////////////// + +// PY_VERSION_HEX >= 0x03030000 +#if PY_MAJOR_VERSION >= 3 && !CYTHON_PEP489_MULTI_PHASE_INIT +static int __Pyx_SetPackagePathFromImportLib(const char* parent_package_name, PyObject *module_name); +#else +#define __Pyx_SetPackagePathFromImportLib(a, b) 0 +#endif + +/////////////// SetPackagePathFromImportLib /////////////// +//@requires: ObjectHandling.c::PyObjectGetAttrStr +//@substitute: naming + +// PY_VERSION_HEX >= 0x03030000 +#if PY_MAJOR_VERSION >= 3 && !CYTHON_PEP489_MULTI_PHASE_INIT +static int __Pyx_SetPackagePathFromImportLib(const char* parent_package_name, PyObject *module_name) { + PyObject *importlib, *loader, *osmod, *ossep, *parts, *package_path; + PyObject *path = NULL, *file_path = NULL; + int result; + if (parent_package_name) { + PyObject *package = PyImport_ImportModule(parent_package_name); + if (unlikely(!package)) + goto bad; + path = PyObject_GetAttrString(package, "__path__"); + Py_DECREF(package); + if (unlikely(!path) || unlikely(path == Py_None)) + goto bad; + } else { + path = Py_None; Py_INCREF(Py_None); + } + // package_path = [importlib.find_loader(module_name, path).path.rsplit(os.sep, 1)[0]] + importlib = PyImport_ImportModule("importlib"); + if (unlikely(!importlib)) + goto bad; + loader = PyObject_CallMethod(importlib, "find_loader", "(OO)", module_name, path); + Py_DECREF(importlib); + Py_DECREF(path); path = NULL; + if (unlikely(!loader)) + goto bad; + file_path = PyObject_GetAttrString(loader, "path"); + Py_DECREF(loader); + if (unlikely(!file_path)) + goto bad; + + if (unlikely(PyObject_SetAttrString($module_cname, "__file__", file_path) < 0)) + goto bad; + + osmod = PyImport_ImportModule("os"); + if (unlikely(!osmod)) + goto bad; + ossep = PyObject_GetAttrString(osmod, "sep"); + Py_DECREF(osmod); + if (unlikely(!ossep)) + goto bad; + parts = PyObject_CallMethod(file_path, "rsplit", "(Oi)", ossep, 1); + Py_DECREF(file_path); file_path = NULL; + Py_DECREF(ossep); + if (unlikely(!parts)) + goto bad; + package_path = Py_BuildValue("[O]", PyList_GET_ITEM(parts, 0)); + Py_DECREF(parts); + if (unlikely(!package_path)) + goto bad; + goto set_path; + +bad: + PyErr_WriteUnraisable(module_name); + Py_XDECREF(path); + Py_XDECREF(file_path); + + // set an empty path list on failure + PyErr_Clear(); + package_path = PyList_New(0); + if (unlikely(!package_path)) + return -1; + +set_path: + result = PyObject_SetAttrString($module_cname, "__path__", package_path); + Py_DECREF(package_path); + return result; +} +#endif + + +/////////////// TypeImport.proto /////////////// + +#ifndef __PYX_HAVE_RT_ImportType_proto +#define __PYX_HAVE_RT_ImportType_proto + +enum __Pyx_ImportType_CheckSize { + __Pyx_ImportType_CheckSize_Error = 0, + __Pyx_ImportType_CheckSize_Warn = 1, + __Pyx_ImportType_CheckSize_Ignore = 2 +}; + +static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, enum __Pyx_ImportType_CheckSize check_size); /*proto*/ + +#endif + +/////////////// TypeImport /////////////// + +#ifndef __PYX_HAVE_RT_ImportType +#define __PYX_HAVE_RT_ImportType +static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, const char *class_name, + size_t size, enum __Pyx_ImportType_CheckSize check_size) +{ + PyObject *result = 0; + char warning[200]; + Py_ssize_t basicsize; +#ifdef Py_LIMITED_API + PyObject *py_basicsize; +#endif + + result = PyObject_GetAttrString(module, class_name); + if (!result) + goto bad; + if (!PyType_Check(result)) { + PyErr_Format(PyExc_TypeError, + "%.200s.%.200s is not a type object", + module_name, class_name); + goto bad; + } +#ifndef Py_LIMITED_API + basicsize = ((PyTypeObject *)result)->tp_basicsize; +#else + py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); + if (!py_basicsize) + goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) + goto bad; +#endif + if ((size_t)basicsize < size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + goto bad; + } + if (check_size == __Pyx_ImportType_CheckSize_Error && (size_t)basicsize != size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + goto bad; + } + else if (check_size == __Pyx_ImportType_CheckSize_Warn && (size_t)basicsize > size) { + PyOS_snprintf(warning, sizeof(warning), + "%s.%s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; + } + /* check_size == __Pyx_ImportType_CheckSize_Ignore does not warn nor error */ + return (PyTypeObject *)result; +bad: + Py_XDECREF(result); + return NULL; +} +#endif + +/////////////// FunctionImport.proto /////////////// + +static int __Pyx_ImportFunction(PyObject *module, const char *funcname, void (**f)(void), const char *sig); /*proto*/ + +/////////////// FunctionImport /////////////// +//@substitute: naming + +#ifndef __PYX_HAVE_RT_ImportFunction +#define __PYX_HAVE_RT_ImportFunction +static int __Pyx_ImportFunction(PyObject *module, const char *funcname, void (**f)(void), const char *sig) { + PyObject *d = 0; + PyObject *cobj = 0; + union { + void (*fp)(void); + void *p; + } tmp; + + d = PyObject_GetAttrString(module, (char *)"$api_name"); + if (!d) + goto bad; + cobj = PyDict_GetItemString(d, funcname); + if (!cobj) { + PyErr_Format(PyExc_ImportError, + "%.200s does not export expected C function %.200s", + PyModule_GetName(module), funcname); + goto bad; + } +#if PY_VERSION_HEX >= 0x02070000 + if (!PyCapsule_IsValid(cobj, sig)) { + PyErr_Format(PyExc_TypeError, + "C function %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", + PyModule_GetName(module), funcname, sig, PyCapsule_GetName(cobj)); + goto bad; + } + tmp.p = PyCapsule_GetPointer(cobj, sig); +#else + {const char *desc, *s1, *s2; + desc = (const char *)PyCObject_GetDesc(cobj); + if (!desc) + goto bad; + s1 = desc; s2 = sig; + while (*s1 != '\0' && *s1 == *s2) { s1++; s2++; } + if (*s1 != *s2) { + PyErr_Format(PyExc_TypeError, + "C function %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", + PyModule_GetName(module), funcname, sig, desc); + goto bad; + } + tmp.p = PyCObject_AsVoidPtr(cobj);} +#endif + *f = tmp.fp; + if (!(*f)) + goto bad; + Py_DECREF(d); + return 0; +bad: + Py_XDECREF(d); + return -1; +} +#endif + +/////////////// FunctionExport.proto /////////////// + +static int __Pyx_ExportFunction(const char *name, void (*f)(void), const char *sig); /*proto*/ + +/////////////// FunctionExport /////////////// +//@substitute: naming + +static int __Pyx_ExportFunction(const char *name, void (*f)(void), const char *sig) { + PyObject *d = 0; + PyObject *cobj = 0; + union { + void (*fp)(void); + void *p; + } tmp; + + d = PyObject_GetAttrString($module_cname, (char *)"$api_name"); + if (!d) { + PyErr_Clear(); + d = PyDict_New(); + if (!d) + goto bad; + Py_INCREF(d); + if (PyModule_AddObject($module_cname, (char *)"$api_name", d) < 0) + goto bad; + } + tmp.fp = f; +#if PY_VERSION_HEX >= 0x02070000 + cobj = PyCapsule_New(tmp.p, sig, 0); +#else + cobj = PyCObject_FromVoidPtrAndDesc(tmp.p, (void *)sig, 0); +#endif + if (!cobj) + goto bad; + if (PyDict_SetItemString(d, name, cobj) < 0) + goto bad; + Py_DECREF(cobj); + Py_DECREF(d); + return 0; +bad: + Py_XDECREF(cobj); + Py_XDECREF(d); + return -1; +} + +/////////////// VoidPtrImport.proto /////////////// + +static int __Pyx_ImportVoidPtr(PyObject *module, const char *name, void **p, const char *sig); /*proto*/ + +/////////////// VoidPtrImport /////////////// +//@substitute: naming + +#ifndef __PYX_HAVE_RT_ImportVoidPtr +#define __PYX_HAVE_RT_ImportVoidPtr +static int __Pyx_ImportVoidPtr(PyObject *module, const char *name, void **p, const char *sig) { + PyObject *d = 0; + PyObject *cobj = 0; + + d = PyObject_GetAttrString(module, (char *)"$api_name"); + if (!d) + goto bad; + cobj = PyDict_GetItemString(d, name); + if (!cobj) { + PyErr_Format(PyExc_ImportError, + "%.200s does not export expected C variable %.200s", + PyModule_GetName(module), name); + goto bad; + } +#if PY_VERSION_HEX >= 0x02070000 + if (!PyCapsule_IsValid(cobj, sig)) { + PyErr_Format(PyExc_TypeError, + "C variable %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", + PyModule_GetName(module), name, sig, PyCapsule_GetName(cobj)); + goto bad; + } + *p = PyCapsule_GetPointer(cobj, sig); +#else + {const char *desc, *s1, *s2; + desc = (const char *)PyCObject_GetDesc(cobj); + if (!desc) + goto bad; + s1 = desc; s2 = sig; + while (*s1 != '\0' && *s1 == *s2) { s1++; s2++; } + if (*s1 != *s2) { + PyErr_Format(PyExc_TypeError, + "C variable %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", + PyModule_GetName(module), name, sig, desc); + goto bad; + } + *p = PyCObject_AsVoidPtr(cobj);} +#endif + if (!(*p)) + goto bad; + Py_DECREF(d); + return 0; +bad: + Py_XDECREF(d); + return -1; +} +#endif + +/////////////// VoidPtrExport.proto /////////////// + +static int __Pyx_ExportVoidPtr(PyObject *name, void *p, const char *sig); /*proto*/ + +/////////////// VoidPtrExport /////////////// +//@substitute: naming +//@requires: ObjectHandling.c::PyObjectSetAttrStr + +static int __Pyx_ExportVoidPtr(PyObject *name, void *p, const char *sig) { + PyObject *d; + PyObject *cobj = 0; + + d = PyDict_GetItem($moddict_cname, PYIDENT("$api_name")); + Py_XINCREF(d); + if (!d) { + d = PyDict_New(); + if (!d) + goto bad; + if (__Pyx_PyObject_SetAttrStr($module_cname, PYIDENT("$api_name"), d) < 0) + goto bad; + } +#if PY_VERSION_HEX >= 0x02070000 + cobj = PyCapsule_New(p, sig, 0); +#else + cobj = PyCObject_FromVoidPtrAndDesc(p, (void *)sig, 0); +#endif + if (!cobj) + goto bad; + if (PyDict_SetItem(d, name, cobj) < 0) + goto bad; + Py_DECREF(cobj); + Py_DECREF(d); + return 0; +bad: + Py_XDECREF(cobj); + Py_XDECREF(d); + return -1; +} + + +/////////////// SetVTable.proto /////////////// + +static int __Pyx_SetVtable(PyObject *dict, void *vtable); /*proto*/ + +/////////////// SetVTable /////////////// + +static int __Pyx_SetVtable(PyObject *dict, void *vtable) { +#if PY_VERSION_HEX >= 0x02070000 + PyObject *ob = PyCapsule_New(vtable, 0, 0); +#else + PyObject *ob = PyCObject_FromVoidPtr(vtable, 0); +#endif + if (!ob) + goto bad; + if (PyDict_SetItem(dict, PYIDENT("__pyx_vtable__"), ob) < 0) + goto bad; + Py_DECREF(ob); + return 0; +bad: + Py_XDECREF(ob); + return -1; +} + + +/////////////// GetVTable.proto /////////////// + +static void* __Pyx_GetVtable(PyObject *dict); /*proto*/ + +/////////////// GetVTable /////////////// + +static void* __Pyx_GetVtable(PyObject *dict) { + void* ptr; + PyObject *ob = PyObject_GetItem(dict, PYIDENT("__pyx_vtable__")); + if (!ob) + goto bad; +#if PY_VERSION_HEX >= 0x02070000 + ptr = PyCapsule_GetPointer(ob, 0); +#else + ptr = PyCObject_AsVoidPtr(ob); +#endif + if (!ptr && !PyErr_Occurred()) + PyErr_SetString(PyExc_RuntimeError, "invalid vtable found for imported type"); + Py_DECREF(ob); + return ptr; +bad: + Py_XDECREF(ob); + return NULL; +} + + +/////////////// MergeVTables.proto /////////////// +//@requires: GetVTable + +static int __Pyx_MergeVtables(PyTypeObject *type); /*proto*/ + +/////////////// MergeVTables /////////////// + +static int __Pyx_MergeVtables(PyTypeObject *type) { + int i; + void** base_vtables; + void* unknown = (void*)-1; + PyObject* bases = type->tp_bases; + int base_depth = 0; + { + PyTypeObject* base = type->tp_base; + while (base) { + base_depth += 1; + base = base->tp_base; + } + } + base_vtables = (void**) malloc(sizeof(void*) * (base_depth + 1)); + base_vtables[0] = unknown; + // Could do MRO resolution of individual methods in the future, assuming + // compatible vtables, but for now simply require a common vtable base. + // Note that if the vtables of various bases are extended separately, + // resolution isn't possible and we must reject it just as when the + // instance struct is so extended. (It would be good to also do this + // check when a multiple-base class is created in pure Python as well.) + for (i = 1; i < PyTuple_GET_SIZE(bases); i++) { + void* base_vtable = __Pyx_GetVtable(((PyTypeObject*)PyTuple_GET_ITEM(bases, i))->tp_dict); + if (base_vtable != NULL) { + int j; + PyTypeObject* base = type->tp_base; + for (j = 0; j < base_depth; j++) { + if (base_vtables[j] == unknown) { + base_vtables[j] = __Pyx_GetVtable(base->tp_dict); + base_vtables[j + 1] = unknown; + } + if (base_vtables[j] == base_vtable) { + break; + } else if (base_vtables[j] == NULL) { + // No more potential matching bases (with vtables). + goto bad; + } + base = base->tp_base; + } + } + } + PyErr_Clear(); + free(base_vtables); + return 0; +bad: + PyErr_Format( + PyExc_TypeError, + "multiple bases have vtable conflict: '%s' and '%s'", + type->tp_base->tp_name, ((PyTypeObject*)PyTuple_GET_ITEM(bases, i))->tp_name); + free(base_vtables); + return -1; +} + + +/////////////// ImportNumPyArray.proto /////////////// + +static PyObject *__pyx_numpy_ndarray = NULL; + +static PyObject* __Pyx_ImportNumPyArrayTypeIfAvailable(void); /*proto*/ + +/////////////// ImportNumPyArray.cleanup /////////////// +Py_CLEAR(__pyx_numpy_ndarray); + +/////////////// ImportNumPyArray /////////////// +//@requires: ImportExport.c::Import + +static PyObject* __Pyx__ImportNumPyArray(void) { + PyObject *numpy_module, *ndarray_object = NULL; + numpy_module = __Pyx_Import(PYIDENT("numpy"), NULL, 0); + if (likely(numpy_module)) { + ndarray_object = PyObject_GetAttrString(numpy_module, "ndarray"); + Py_DECREF(numpy_module); + } + if (unlikely(!ndarray_object)) { + // ImportError, AttributeError, ... + PyErr_Clear(); + } + if (unlikely(!ndarray_object || !PyObject_TypeCheck(ndarray_object, &PyType_Type))) { + Py_XDECREF(ndarray_object); + Py_INCREF(Py_None); + ndarray_object = Py_None; + } + return ndarray_object; +} + +static CYTHON_INLINE PyObject* __Pyx_ImportNumPyArrayTypeIfAvailable(void) { + if (unlikely(!__pyx_numpy_ndarray)) { + __pyx_numpy_ndarray = __Pyx__ImportNumPyArray(); + } + Py_INCREF(__pyx_numpy_ndarray); + return __pyx_numpy_ndarray; +} diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/MemoryView.pyx b/venv/lib/python3.8/site-packages/Cython/Utility/MemoryView.pyx new file mode 100644 index 0000000..3c92d5d --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/MemoryView.pyx @@ -0,0 +1,1493 @@ +#################### View.MemoryView #################### + +# This utility provides cython.array and cython.view.memoryview + +from __future__ import absolute_import + +cimport cython + +# from cpython cimport ... +cdef extern from "Python.h": + int PyIndex_Check(object) + object PyLong_FromVoidPtr(void *) + +cdef extern from "pythread.h": + ctypedef void *PyThread_type_lock + + PyThread_type_lock PyThread_allocate_lock() + void PyThread_free_lock(PyThread_type_lock) + int PyThread_acquire_lock(PyThread_type_lock, int mode) nogil + void PyThread_release_lock(PyThread_type_lock) nogil + +cdef extern from "": + void *memset(void *b, int c, size_t len) + +cdef extern from *: + int __Pyx_GetBuffer(object, Py_buffer *, int) except -1 + void __Pyx_ReleaseBuffer(Py_buffer *) + + ctypedef struct PyObject + ctypedef Py_ssize_t Py_intptr_t + void Py_INCREF(PyObject *) + void Py_DECREF(PyObject *) + + void* PyMem_Malloc(size_t n) + void PyMem_Free(void *p) + void* PyObject_Malloc(size_t n) + void PyObject_Free(void *p) + + cdef struct __pyx_memoryview "__pyx_memoryview_obj": + Py_buffer view + PyObject *obj + __Pyx_TypeInfo *typeinfo + + ctypedef struct {{memviewslice_name}}: + __pyx_memoryview *memview + char *data + Py_ssize_t shape[{{max_dims}}] + Py_ssize_t strides[{{max_dims}}] + Py_ssize_t suboffsets[{{max_dims}}] + + void __PYX_INC_MEMVIEW({{memviewslice_name}} *memslice, int have_gil) + void __PYX_XDEC_MEMVIEW({{memviewslice_name}} *memslice, int have_gil) + + ctypedef struct __pyx_buffer "Py_buffer": + PyObject *obj + + PyObject *Py_None + + cdef enum: + PyBUF_C_CONTIGUOUS, + PyBUF_F_CONTIGUOUS, + PyBUF_ANY_CONTIGUOUS + PyBUF_FORMAT + PyBUF_WRITABLE + PyBUF_STRIDES + PyBUF_INDIRECT + PyBUF_ND + PyBUF_RECORDS + PyBUF_RECORDS_RO + + ctypedef struct __Pyx_TypeInfo: + pass + + cdef object capsule "__pyx_capsule_create" (void *p, char *sig) + cdef int __pyx_array_getbuffer(PyObject *obj, Py_buffer view, int flags) + cdef int __pyx_memoryview_getbuffer(PyObject *obj, Py_buffer view, int flags) + +cdef extern from *: + ctypedef int __pyx_atomic_int + {{memviewslice_name}} slice_copy_contig "__pyx_memoryview_copy_new_contig"( + __Pyx_memviewslice *from_mvs, + char *mode, int ndim, + size_t sizeof_dtype, int contig_flag, + bint dtype_is_object) nogil except * + bint slice_is_contig "__pyx_memviewslice_is_contig" ( + {{memviewslice_name}} mvs, char order, int ndim) nogil + bint slices_overlap "__pyx_slices_overlap" ({{memviewslice_name}} *slice1, + {{memviewslice_name}} *slice2, + int ndim, size_t itemsize) nogil + + +cdef extern from "": + void *malloc(size_t) nogil + void free(void *) nogil + void *memcpy(void *dest, void *src, size_t n) nogil + + + + +# +### cython.array class +# + +@cname("__pyx_array") +cdef class array: + + cdef: + char *data + Py_ssize_t len + char *format + int ndim + Py_ssize_t *_shape + Py_ssize_t *_strides + Py_ssize_t itemsize + unicode mode # FIXME: this should have been a simple 'char' + bytes _format + void (*callback_free_data)(void *data) + # cdef object _memview + cdef bint free_data + cdef bint dtype_is_object + + def __cinit__(array self, tuple shape, Py_ssize_t itemsize, format not None, + mode="c", bint allocate_buffer=True): + + cdef int idx + cdef Py_ssize_t i, dim + cdef PyObject **p + + self.ndim = len(shape) + self.itemsize = itemsize + + if not self.ndim: + raise ValueError("Empty shape tuple for cython.array") + + if itemsize <= 0: + raise ValueError("itemsize <= 0 for cython.array") + + if not isinstance(format, bytes): + format = format.encode('ASCII') + self._format = format # keep a reference to the byte string + self.format = self._format + + # use single malloc() for both shape and strides + self._shape = PyObject_Malloc(sizeof(Py_ssize_t)*self.ndim*2) + self._strides = self._shape + self.ndim + + if not self._shape: + raise MemoryError("unable to allocate shape and strides.") + + # cdef Py_ssize_t dim, stride + for idx, dim in enumerate(shape): + if dim <= 0: + raise ValueError("Invalid shape in axis %d: %d." % (idx, dim)) + self._shape[idx] = dim + + cdef char order + if mode == 'fortran': + order = b'F' + self.mode = u'fortran' + elif mode == 'c': + order = b'C' + self.mode = u'c' + else: + raise ValueError("Invalid mode, expected 'c' or 'fortran', got %s" % mode) + + self.len = fill_contig_strides_array(self._shape, self._strides, + itemsize, self.ndim, order) + + self.free_data = allocate_buffer + self.dtype_is_object = format == b'O' + if allocate_buffer: + # use malloc() for backwards compatibility + # in case external code wants to change the data pointer + self.data = malloc(self.len) + if not self.data: + raise MemoryError("unable to allocate array data.") + + if self.dtype_is_object: + p = self.data + for i in range(self.len / itemsize): + p[i] = Py_None + Py_INCREF(Py_None) + + @cname('getbuffer') + def __getbuffer__(self, Py_buffer *info, int flags): + cdef int bufmode = -1 + if self.mode == u"c": + bufmode = PyBUF_C_CONTIGUOUS | PyBUF_ANY_CONTIGUOUS + elif self.mode == u"fortran": + bufmode = PyBUF_F_CONTIGUOUS | PyBUF_ANY_CONTIGUOUS + if not (flags & bufmode): + raise ValueError("Can only create a buffer that is contiguous in memory.") + info.buf = self.data + info.len = self.len + info.ndim = self.ndim + info.shape = self._shape + info.strides = self._strides + info.suboffsets = NULL + info.itemsize = self.itemsize + info.readonly = 0 + + if flags & PyBUF_FORMAT: + info.format = self.format + else: + info.format = NULL + + info.obj = self + + __pyx_getbuffer = capsule( &__pyx_array_getbuffer, "getbuffer(obj, view, flags)") + + def __dealloc__(array self): + if self.callback_free_data != NULL: + self.callback_free_data(self.data) + elif self.free_data: + if self.dtype_is_object: + refcount_objects_in_slice(self.data, self._shape, + self._strides, self.ndim, False) + free(self.data) + PyObject_Free(self._shape) + + @property + def memview(self): + return self.get_memview() + + @cname('get_memview') + cdef get_memview(self): + flags = PyBUF_ANY_CONTIGUOUS|PyBUF_FORMAT|PyBUF_WRITABLE + return memoryview(self, flags, self.dtype_is_object) + + def __len__(self): + return self._shape[0] + + def __getattr__(self, attr): + return getattr(self.memview, attr) + + def __getitem__(self, item): + return self.memview[item] + + def __setitem__(self, item, value): + self.memview[item] = value + + +@cname("__pyx_array_new") +cdef array array_cwrapper(tuple shape, Py_ssize_t itemsize, char *format, + char *mode, char *buf): + cdef array result + + if buf == NULL: + result = array(shape, itemsize, format, mode.decode('ASCII')) + else: + result = array(shape, itemsize, format, mode.decode('ASCII'), + allocate_buffer=False) + result.data = buf + + return result + + +# +### Memoryview constants and cython.view.memoryview class +# + +# Disable generic_contiguous, as it makes trouble verifying contiguity: +# - 'contiguous' or '::1' means the dimension is contiguous with dtype +# - 'indirect_contiguous' means a contiguous list of pointers +# - dtype contiguous must be contiguous in the first or last dimension +# from the start, or from the dimension following the last indirect dimension +# +# e.g. +# int[::indirect_contiguous, ::contiguous, :] +# +# is valid (list of pointers to 2d fortran-contiguous array), but +# +# int[::generic_contiguous, ::contiguous, :] +# +# would mean you'd have assert dimension 0 to be indirect (and pointer contiguous) at runtime. +# So it doesn't bring any performance benefit, and it's only confusing. + +@cname('__pyx_MemviewEnum') +cdef class Enum(object): + cdef object name + def __init__(self, name): + self.name = name + def __repr__(self): + return self.name + +cdef generic = Enum("") +cdef strided = Enum("") # default +cdef indirect = Enum("") +# Disable generic_contiguous, as it is a troublemaker +#cdef generic_contiguous = Enum("") +cdef contiguous = Enum("") +cdef indirect_contiguous = Enum("") + +# 'follow' is implied when the first or last axis is ::1 + + +@cname('__pyx_align_pointer') +cdef void *align_pointer(void *memory, size_t alignment) nogil: + "Align pointer memory on a given boundary" + cdef Py_intptr_t aligned_p = memory + cdef size_t offset + + with cython.cdivision(True): + offset = aligned_p % alignment + + if offset > 0: + aligned_p += alignment - offset + + return aligned_p + + +# pre-allocate thread locks for reuse +## note that this could be implemented in a more beautiful way in "normal" Cython, +## but this code gets merged into the user module and not everything works there. +DEF THREAD_LOCKS_PREALLOCATED = 8 +cdef int __pyx_memoryview_thread_locks_used = 0 +cdef PyThread_type_lock[THREAD_LOCKS_PREALLOCATED] __pyx_memoryview_thread_locks = [ + PyThread_allocate_lock(), + PyThread_allocate_lock(), + PyThread_allocate_lock(), + PyThread_allocate_lock(), + PyThread_allocate_lock(), + PyThread_allocate_lock(), + PyThread_allocate_lock(), + PyThread_allocate_lock(), +] + + +@cname('__pyx_memoryview') +cdef class memoryview(object): + + cdef object obj + cdef object _size + cdef object _array_interface + cdef PyThread_type_lock lock + # the following array will contain a single __pyx_atomic int with + # suitable alignment + cdef __pyx_atomic_int acquisition_count[2] + cdef __pyx_atomic_int *acquisition_count_aligned_p + cdef Py_buffer view + cdef int flags + cdef bint dtype_is_object + cdef __Pyx_TypeInfo *typeinfo + + def __cinit__(memoryview self, object obj, int flags, bint dtype_is_object=False): + self.obj = obj + self.flags = flags + if type(self) is memoryview or obj is not None: + __Pyx_GetBuffer(obj, &self.view, flags) + if self.view.obj == NULL: + (<__pyx_buffer *> &self.view).obj = Py_None + Py_INCREF(Py_None) + + global __pyx_memoryview_thread_locks_used + if __pyx_memoryview_thread_locks_used < THREAD_LOCKS_PREALLOCATED: + self.lock = __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used] + __pyx_memoryview_thread_locks_used += 1 + if self.lock is NULL: + self.lock = PyThread_allocate_lock() + if self.lock is NULL: + raise MemoryError + + if flags & PyBUF_FORMAT: + self.dtype_is_object = (self.view.format[0] == b'O' and self.view.format[1] == b'\0') + else: + self.dtype_is_object = dtype_is_object + + self.acquisition_count_aligned_p = <__pyx_atomic_int *> align_pointer( + &self.acquisition_count[0], sizeof(__pyx_atomic_int)) + self.typeinfo = NULL + + def __dealloc__(memoryview self): + if self.obj is not None: + __Pyx_ReleaseBuffer(&self.view) + elif (<__pyx_buffer *> &self.view).obj == Py_None: + # Undo the incref in __cinit__() above. + (<__pyx_buffer *> &self.view).obj = NULL + Py_DECREF(Py_None) + + cdef int i + global __pyx_memoryview_thread_locks_used + if self.lock != NULL: + for i in range(__pyx_memoryview_thread_locks_used): + if __pyx_memoryview_thread_locks[i] is self.lock: + __pyx_memoryview_thread_locks_used -= 1 + if i != __pyx_memoryview_thread_locks_used: + __pyx_memoryview_thread_locks[i], __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used] = ( + __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used], __pyx_memoryview_thread_locks[i]) + break + else: + PyThread_free_lock(self.lock) + + cdef char *get_item_pointer(memoryview self, object index) except NULL: + cdef Py_ssize_t dim + cdef char *itemp = self.view.buf + + for dim, idx in enumerate(index): + itemp = pybuffer_index(&self.view, itemp, idx, dim) + + return itemp + + #@cname('__pyx_memoryview_getitem') + def __getitem__(memoryview self, object index): + if index is Ellipsis: + return self + + have_slices, indices = _unellipsify(index, self.view.ndim) + + cdef char *itemp + if have_slices: + return memview_slice(self, indices) + else: + itemp = self.get_item_pointer(indices) + return self.convert_item_to_object(itemp) + + def __setitem__(memoryview self, object index, object value): + if self.view.readonly: + raise TypeError("Cannot assign to read-only memoryview") + + have_slices, index = _unellipsify(index, self.view.ndim) + + if have_slices: + obj = self.is_slice(value) + if obj: + self.setitem_slice_assignment(self[index], obj) + else: + self.setitem_slice_assign_scalar(self[index], value) + else: + self.setitem_indexed(index, value) + + cdef is_slice(self, obj): + if not isinstance(obj, memoryview): + try: + obj = memoryview(obj, self.flags & ~PyBUF_WRITABLE | PyBUF_ANY_CONTIGUOUS, + self.dtype_is_object) + except TypeError: + return None + + return obj + + cdef setitem_slice_assignment(self, dst, src): + cdef {{memviewslice_name}} dst_slice + cdef {{memviewslice_name}} src_slice + + memoryview_copy_contents(get_slice_from_memview(src, &src_slice)[0], + get_slice_from_memview(dst, &dst_slice)[0], + src.ndim, dst.ndim, self.dtype_is_object) + + cdef setitem_slice_assign_scalar(self, memoryview dst, value): + cdef int array[128] + cdef void *tmp = NULL + cdef void *item + + cdef {{memviewslice_name}} *dst_slice + cdef {{memviewslice_name}} tmp_slice + dst_slice = get_slice_from_memview(dst, &tmp_slice) + + if self.view.itemsize > sizeof(array): + tmp = PyMem_Malloc(self.view.itemsize) + if tmp == NULL: + raise MemoryError + item = tmp + else: + item = array + + try: + if self.dtype_is_object: + ( item)[0] = value + else: + self.assign_item_from_object( item, value) + + # It would be easy to support indirect dimensions, but it's easier + # to disallow :) + if self.view.suboffsets != NULL: + assert_direct_dimensions(self.view.suboffsets, self.view.ndim) + slice_assign_scalar(dst_slice, dst.view.ndim, self.view.itemsize, + item, self.dtype_is_object) + finally: + PyMem_Free(tmp) + + cdef setitem_indexed(self, index, value): + cdef char *itemp = self.get_item_pointer(index) + self.assign_item_from_object(itemp, value) + + cdef convert_item_to_object(self, char *itemp): + """Only used if instantiated manually by the user, or if Cython doesn't + know how to convert the type""" + import struct + cdef bytes bytesitem + # Do a manual and complete check here instead of this easy hack + bytesitem = itemp[:self.view.itemsize] + try: + result = struct.unpack(self.view.format, bytesitem) + except struct.error: + raise ValueError("Unable to convert item to object") + else: + if len(self.view.format) == 1: + return result[0] + return result + + cdef assign_item_from_object(self, char *itemp, object value): + """Only used if instantiated manually by the user, or if Cython doesn't + know how to convert the type""" + import struct + cdef char c + cdef bytes bytesvalue + cdef Py_ssize_t i + + if isinstance(value, tuple): + bytesvalue = struct.pack(self.view.format, *value) + else: + bytesvalue = struct.pack(self.view.format, value) + + for i, c in enumerate(bytesvalue): + itemp[i] = c + + @cname('getbuffer') + def __getbuffer__(self, Py_buffer *info, int flags): + if flags & PyBUF_WRITABLE and self.view.readonly: + raise ValueError("Cannot create writable memory view from read-only memoryview") + + if flags & PyBUF_ND: + info.shape = self.view.shape + else: + info.shape = NULL + + if flags & PyBUF_STRIDES: + info.strides = self.view.strides + else: + info.strides = NULL + + if flags & PyBUF_INDIRECT: + info.suboffsets = self.view.suboffsets + else: + info.suboffsets = NULL + + if flags & PyBUF_FORMAT: + info.format = self.view.format + else: + info.format = NULL + + info.buf = self.view.buf + info.ndim = self.view.ndim + info.itemsize = self.view.itemsize + info.len = self.view.len + info.readonly = self.view.readonly + info.obj = self + + __pyx_getbuffer = capsule( &__pyx_memoryview_getbuffer, "getbuffer(obj, view, flags)") + + # Some properties that have the same semantics as in NumPy + @property + def T(self): + cdef _memoryviewslice result = memoryview_copy(self) + transpose_memslice(&result.from_slice) + return result + + @property + def base(self): + return self.obj + + @property + def shape(self): + return tuple([length for length in self.view.shape[:self.view.ndim]]) + + @property + def strides(self): + if self.view.strides == NULL: + # Note: we always ask for strides, so if this is not set it's a bug + raise ValueError("Buffer view does not expose strides") + + return tuple([stride for stride in self.view.strides[:self.view.ndim]]) + + @property + def suboffsets(self): + if self.view.suboffsets == NULL: + return (-1,) * self.view.ndim + + return tuple([suboffset for suboffset in self.view.suboffsets[:self.view.ndim]]) + + @property + def ndim(self): + return self.view.ndim + + @property + def itemsize(self): + return self.view.itemsize + + @property + def nbytes(self): + return self.size * self.view.itemsize + + @property + def size(self): + if self._size is None: + result = 1 + + for length in self.view.shape[:self.view.ndim]: + result *= length + + self._size = result + + return self._size + + def __len__(self): + if self.view.ndim >= 1: + return self.view.shape[0] + + return 0 + + def __repr__(self): + return "" % (self.base.__class__.__name__, + id(self)) + + def __str__(self): + return "" % (self.base.__class__.__name__,) + + # Support the same attributes as memoryview slices + def is_c_contig(self): + cdef {{memviewslice_name}} *mslice + cdef {{memviewslice_name}} tmp + mslice = get_slice_from_memview(self, &tmp) + return slice_is_contig(mslice[0], 'C', self.view.ndim) + + def is_f_contig(self): + cdef {{memviewslice_name}} *mslice + cdef {{memviewslice_name}} tmp + mslice = get_slice_from_memview(self, &tmp) + return slice_is_contig(mslice[0], 'F', self.view.ndim) + + def copy(self): + cdef {{memviewslice_name}} mslice + cdef int flags = self.flags & ~PyBUF_F_CONTIGUOUS + + slice_copy(self, &mslice) + mslice = slice_copy_contig(&mslice, "c", self.view.ndim, + self.view.itemsize, + flags|PyBUF_C_CONTIGUOUS, + self.dtype_is_object) + + return memoryview_copy_from_slice(self, &mslice) + + def copy_fortran(self): + cdef {{memviewslice_name}} src, dst + cdef int flags = self.flags & ~PyBUF_C_CONTIGUOUS + + slice_copy(self, &src) + dst = slice_copy_contig(&src, "fortran", self.view.ndim, + self.view.itemsize, + flags|PyBUF_F_CONTIGUOUS, + self.dtype_is_object) + + return memoryview_copy_from_slice(self, &dst) + + +@cname('__pyx_memoryview_new') +cdef memoryview_cwrapper(object o, int flags, bint dtype_is_object, __Pyx_TypeInfo *typeinfo): + cdef memoryview result = memoryview(o, flags, dtype_is_object) + result.typeinfo = typeinfo + return result + +@cname('__pyx_memoryview_check') +cdef inline bint memoryview_check(object o): + return isinstance(o, memoryview) + +cdef tuple _unellipsify(object index, int ndim): + """ + Replace all ellipses with full slices and fill incomplete indices with + full slices. + """ + if not isinstance(index, tuple): + tup = (index,) + else: + tup = index + + result = [] + have_slices = False + seen_ellipsis = False + for idx, item in enumerate(tup): + if item is Ellipsis: + if not seen_ellipsis: + result.extend([slice(None)] * (ndim - len(tup) + 1)) + seen_ellipsis = True + else: + result.append(slice(None)) + have_slices = True + else: + if not isinstance(item, slice) and not PyIndex_Check(item): + raise TypeError("Cannot index with type '%s'" % type(item)) + + have_slices = have_slices or isinstance(item, slice) + result.append(item) + + nslices = ndim - len(result) + if nslices: + result.extend([slice(None)] * nslices) + + return have_slices or nslices, tuple(result) + +cdef assert_direct_dimensions(Py_ssize_t *suboffsets, int ndim): + for suboffset in suboffsets[:ndim]: + if suboffset >= 0: + raise ValueError("Indirect dimensions not supported") + +# +### Slicing a memoryview +# + +@cname('__pyx_memview_slice') +cdef memoryview memview_slice(memoryview memview, object indices): + cdef int new_ndim = 0, suboffset_dim = -1, dim + cdef bint negative_step + cdef {{memviewslice_name}} src, dst + cdef {{memviewslice_name}} *p_src + + # dst is copied by value in memoryview_fromslice -- initialize it + # src is never copied + memset(&dst, 0, sizeof(dst)) + + cdef _memoryviewslice memviewsliceobj + + assert memview.view.ndim > 0 + + if isinstance(memview, _memoryviewslice): + memviewsliceobj = memview + p_src = &memviewsliceobj.from_slice + else: + slice_copy(memview, &src) + p_src = &src + + # Note: don't use variable src at this point + # SubNote: we should be able to declare variables in blocks... + + # memoryview_fromslice() will inc our dst slice + dst.memview = p_src.memview + dst.data = p_src.data + + # Put everything in temps to avoid this bloody warning: + # "Argument evaluation order in C function call is undefined and + # may not be as expected" + cdef {{memviewslice_name}} *p_dst = &dst + cdef int *p_suboffset_dim = &suboffset_dim + cdef Py_ssize_t start, stop, step + cdef bint have_start, have_stop, have_step + + for dim, index in enumerate(indices): + if PyIndex_Check(index): + slice_memviewslice( + p_dst, p_src.shape[dim], p_src.strides[dim], p_src.suboffsets[dim], + dim, new_ndim, p_suboffset_dim, + index, 0, 0, # start, stop, step + 0, 0, 0, # have_{start,stop,step} + False) + elif index is None: + p_dst.shape[new_ndim] = 1 + p_dst.strides[new_ndim] = 0 + p_dst.suboffsets[new_ndim] = -1 + new_ndim += 1 + else: + start = index.start or 0 + stop = index.stop or 0 + step = index.step or 0 + + have_start = index.start is not None + have_stop = index.stop is not None + have_step = index.step is not None + + slice_memviewslice( + p_dst, p_src.shape[dim], p_src.strides[dim], p_src.suboffsets[dim], + dim, new_ndim, p_suboffset_dim, + start, stop, step, + have_start, have_stop, have_step, + True) + new_ndim += 1 + + if isinstance(memview, _memoryviewslice): + return memoryview_fromslice(dst, new_ndim, + memviewsliceobj.to_object_func, + memviewsliceobj.to_dtype_func, + memview.dtype_is_object) + else: + return memoryview_fromslice(dst, new_ndim, NULL, NULL, + memview.dtype_is_object) + + +# +### Slicing in a single dimension of a memoryviewslice +# + +cdef extern from "": + void abort() nogil + void printf(char *s, ...) nogil + +cdef extern from "": + ctypedef struct FILE + FILE *stderr + int fputs(char *s, FILE *stream) + +cdef extern from "pystate.h": + void PyThreadState_Get() nogil + + # These are not actually nogil, but we check for the GIL before calling them + void PyErr_SetString(PyObject *type, char *msg) nogil + PyObject *PyErr_Format(PyObject *exc, char *msg, ...) nogil + +@cname('__pyx_memoryview_slice_memviewslice') +cdef int slice_memviewslice( + {{memviewslice_name}} *dst, + Py_ssize_t shape, Py_ssize_t stride, Py_ssize_t suboffset, + int dim, int new_ndim, int *suboffset_dim, + Py_ssize_t start, Py_ssize_t stop, Py_ssize_t step, + int have_start, int have_stop, int have_step, + bint is_slice) nogil except -1: + """ + Create a new slice dst given slice src. + + dim - the current src dimension (indexing will make dimensions + disappear) + new_dim - the new dst dimension + suboffset_dim - pointer to a single int initialized to -1 to keep track of + where slicing offsets should be added + """ + + cdef Py_ssize_t new_shape + cdef bint negative_step + + if not is_slice: + # index is a normal integer-like index + if start < 0: + start += shape + if not 0 <= start < shape: + _err_dim(IndexError, "Index out of bounds (axis %d)", dim) + else: + # index is a slice + negative_step = have_step != 0 and step < 0 + + if have_step and step == 0: + _err_dim(ValueError, "Step may not be zero (axis %d)", dim) + + # check our bounds and set defaults + if have_start: + if start < 0: + start += shape + if start < 0: + start = 0 + elif start >= shape: + if negative_step: + start = shape - 1 + else: + start = shape + else: + if negative_step: + start = shape - 1 + else: + start = 0 + + if have_stop: + if stop < 0: + stop += shape + if stop < 0: + stop = 0 + elif stop > shape: + stop = shape + else: + if negative_step: + stop = -1 + else: + stop = shape + + if not have_step: + step = 1 + + # len = ceil( (stop - start) / step ) + with cython.cdivision(True): + new_shape = (stop - start) // step + + if (stop - start) - step * new_shape: + new_shape += 1 + + if new_shape < 0: + new_shape = 0 + + # shape/strides/suboffsets + dst.strides[new_ndim] = stride * step + dst.shape[new_ndim] = new_shape + dst.suboffsets[new_ndim] = suboffset + + # Add the slicing or idexing offsets to the right suboffset or base data * + if suboffset_dim[0] < 0: + dst.data += start * stride + else: + dst.suboffsets[suboffset_dim[0]] += start * stride + + if suboffset >= 0: + if not is_slice: + if new_ndim == 0: + dst.data = ( dst.data)[0] + suboffset + else: + _err_dim(IndexError, "All dimensions preceding dimension %d " + "must be indexed and not sliced", dim) + else: + suboffset_dim[0] = new_ndim + + return 0 + +# +### Index a memoryview +# +@cname('__pyx_pybuffer_index') +cdef char *pybuffer_index(Py_buffer *view, char *bufp, Py_ssize_t index, + Py_ssize_t dim) except NULL: + cdef Py_ssize_t shape, stride, suboffset = -1 + cdef Py_ssize_t itemsize = view.itemsize + cdef char *resultp + + if view.ndim == 0: + shape = view.len / itemsize + stride = itemsize + else: + shape = view.shape[dim] + stride = view.strides[dim] + if view.suboffsets != NULL: + suboffset = view.suboffsets[dim] + + if index < 0: + index += view.shape[dim] + if index < 0: + raise IndexError("Out of bounds on buffer access (axis %d)" % dim) + + if index >= shape: + raise IndexError("Out of bounds on buffer access (axis %d)" % dim) + + resultp = bufp + index * stride + if suboffset >= 0: + resultp = ( resultp)[0] + suboffset + + return resultp + +# +### Transposing a memoryviewslice +# +@cname('__pyx_memslice_transpose') +cdef int transpose_memslice({{memviewslice_name}} *memslice) nogil except 0: + cdef int ndim = memslice.memview.view.ndim + + cdef Py_ssize_t *shape = memslice.shape + cdef Py_ssize_t *strides = memslice.strides + + # reverse strides and shape + cdef int i, j + for i in range(ndim / 2): + j = ndim - 1 - i + strides[i], strides[j] = strides[j], strides[i] + shape[i], shape[j] = shape[j], shape[i] + + if memslice.suboffsets[i] >= 0 or memslice.suboffsets[j] >= 0: + _err(ValueError, "Cannot transpose memoryview with indirect dimensions") + + return 1 + +# +### Creating new memoryview objects from slices and memoryviews +# +@cname('__pyx_memoryviewslice') +cdef class _memoryviewslice(memoryview): + "Internal class for passing memoryview slices to Python" + + # We need this to keep our shape/strides/suboffset pointers valid + cdef {{memviewslice_name}} from_slice + # We need this only to print it's class' name + cdef object from_object + + cdef object (*to_object_func)(char *) + cdef int (*to_dtype_func)(char *, object) except 0 + + def __dealloc__(self): + __PYX_XDEC_MEMVIEW(&self.from_slice, 1) + + cdef convert_item_to_object(self, char *itemp): + if self.to_object_func != NULL: + return self.to_object_func(itemp) + else: + return memoryview.convert_item_to_object(self, itemp) + + cdef assign_item_from_object(self, char *itemp, object value): + if self.to_dtype_func != NULL: + self.to_dtype_func(itemp, value) + else: + memoryview.assign_item_from_object(self, itemp, value) + + @property + def base(self): + return self.from_object + + __pyx_getbuffer = capsule( &__pyx_memoryview_getbuffer, "getbuffer(obj, view, flags)") + + +@cname('__pyx_memoryview_fromslice') +cdef memoryview_fromslice({{memviewslice_name}} memviewslice, + int ndim, + object (*to_object_func)(char *), + int (*to_dtype_func)(char *, object) except 0, + bint dtype_is_object): + + cdef _memoryviewslice result + + if memviewslice.memview == Py_None: + return None + + # assert 0 < ndim <= memviewslice.memview.view.ndim, ( + # ndim, memviewslice.memview.view.ndim) + + result = _memoryviewslice(None, 0, dtype_is_object) + + result.from_slice = memviewslice + __PYX_INC_MEMVIEW(&memviewslice, 1) + + result.from_object = ( memviewslice.memview).base + result.typeinfo = memviewslice.memview.typeinfo + + result.view = memviewslice.memview.view + result.view.buf = memviewslice.data + result.view.ndim = ndim + (<__pyx_buffer *> &result.view).obj = Py_None + Py_INCREF(Py_None) + + if (memviewslice.memview).flags & PyBUF_WRITABLE: + result.flags = PyBUF_RECORDS + else: + result.flags = PyBUF_RECORDS_RO + + result.view.shape = result.from_slice.shape + result.view.strides = result.from_slice.strides + + # only set suboffsets if actually used, otherwise set to NULL to improve compatibility + result.view.suboffsets = NULL + for suboffset in result.from_slice.suboffsets[:ndim]: + if suboffset >= 0: + result.view.suboffsets = result.from_slice.suboffsets + break + + result.view.len = result.view.itemsize + for length in result.view.shape[:ndim]: + result.view.len *= length + + result.to_object_func = to_object_func + result.to_dtype_func = to_dtype_func + + return result + +@cname('__pyx_memoryview_get_slice_from_memoryview') +cdef {{memviewslice_name}} *get_slice_from_memview(memoryview memview, + {{memviewslice_name}} *mslice) except NULL: + cdef _memoryviewslice obj + if isinstance(memview, _memoryviewslice): + obj = memview + return &obj.from_slice + else: + slice_copy(memview, mslice) + return mslice + +@cname('__pyx_memoryview_slice_copy') +cdef void slice_copy(memoryview memview, {{memviewslice_name}} *dst): + cdef int dim + cdef (Py_ssize_t*) shape, strides, suboffsets + + shape = memview.view.shape + strides = memview.view.strides + suboffsets = memview.view.suboffsets + + dst.memview = <__pyx_memoryview *> memview + dst.data = memview.view.buf + + for dim in range(memview.view.ndim): + dst.shape[dim] = shape[dim] + dst.strides[dim] = strides[dim] + dst.suboffsets[dim] = suboffsets[dim] if suboffsets else -1 + +@cname('__pyx_memoryview_copy_object') +cdef memoryview_copy(memoryview memview): + "Create a new memoryview object" + cdef {{memviewslice_name}} memviewslice + slice_copy(memview, &memviewslice) + return memoryview_copy_from_slice(memview, &memviewslice) + +@cname('__pyx_memoryview_copy_object_from_slice') +cdef memoryview_copy_from_slice(memoryview memview, {{memviewslice_name}} *memviewslice): + """ + Create a new memoryview object from a given memoryview object and slice. + """ + cdef object (*to_object_func)(char *) + cdef int (*to_dtype_func)(char *, object) except 0 + + if isinstance(memview, _memoryviewslice): + to_object_func = (<_memoryviewslice> memview).to_object_func + to_dtype_func = (<_memoryviewslice> memview).to_dtype_func + else: + to_object_func = NULL + to_dtype_func = NULL + + return memoryview_fromslice(memviewslice[0], memview.view.ndim, + to_object_func, to_dtype_func, + memview.dtype_is_object) + + +# +### Copy the contents of a memoryview slices +# +cdef Py_ssize_t abs_py_ssize_t(Py_ssize_t arg) nogil: + if arg < 0: + return -arg + else: + return arg + +@cname('__pyx_get_best_slice_order') +cdef char get_best_order({{memviewslice_name}} *mslice, int ndim) nogil: + """ + Figure out the best memory access order for a given slice. + """ + cdef int i + cdef Py_ssize_t c_stride = 0 + cdef Py_ssize_t f_stride = 0 + + for i in range(ndim - 1, -1, -1): + if mslice.shape[i] > 1: + c_stride = mslice.strides[i] + break + + for i in range(ndim): + if mslice.shape[i] > 1: + f_stride = mslice.strides[i] + break + + if abs_py_ssize_t(c_stride) <= abs_py_ssize_t(f_stride): + return 'C' + else: + return 'F' + +@cython.cdivision(True) +cdef void _copy_strided_to_strided(char *src_data, Py_ssize_t *src_strides, + char *dst_data, Py_ssize_t *dst_strides, + Py_ssize_t *src_shape, Py_ssize_t *dst_shape, + int ndim, size_t itemsize) nogil: + # Note: src_extent is 1 if we're broadcasting + # dst_extent always >= src_extent as we don't do reductions + cdef Py_ssize_t i + cdef Py_ssize_t src_extent = src_shape[0] + cdef Py_ssize_t dst_extent = dst_shape[0] + cdef Py_ssize_t src_stride = src_strides[0] + cdef Py_ssize_t dst_stride = dst_strides[0] + + if ndim == 1: + if (src_stride > 0 and dst_stride > 0 and + src_stride == itemsize == dst_stride): + memcpy(dst_data, src_data, itemsize * dst_extent) + else: + for i in range(dst_extent): + memcpy(dst_data, src_data, itemsize) + src_data += src_stride + dst_data += dst_stride + else: + for i in range(dst_extent): + _copy_strided_to_strided(src_data, src_strides + 1, + dst_data, dst_strides + 1, + src_shape + 1, dst_shape + 1, + ndim - 1, itemsize) + src_data += src_stride + dst_data += dst_stride + +cdef void copy_strided_to_strided({{memviewslice_name}} *src, + {{memviewslice_name}} *dst, + int ndim, size_t itemsize) nogil: + _copy_strided_to_strided(src.data, src.strides, dst.data, dst.strides, + src.shape, dst.shape, ndim, itemsize) + +@cname('__pyx_memoryview_slice_get_size') +cdef Py_ssize_t slice_get_size({{memviewslice_name}} *src, int ndim) nogil: + "Return the size of the memory occupied by the slice in number of bytes" + cdef Py_ssize_t shape, size = src.memview.view.itemsize + + for shape in src.shape[:ndim]: + size *= shape + + return size + +@cname('__pyx_fill_contig_strides_array') +cdef Py_ssize_t fill_contig_strides_array( + Py_ssize_t *shape, Py_ssize_t *strides, Py_ssize_t stride, + int ndim, char order) nogil: + """ + Fill the strides array for a slice with C or F contiguous strides. + This is like PyBuffer_FillContiguousStrides, but compatible with py < 2.6 + """ + cdef int idx + + if order == 'F': + for idx in range(ndim): + strides[idx] = stride + stride *= shape[idx] + else: + for idx in range(ndim - 1, -1, -1): + strides[idx] = stride + stride *= shape[idx] + + return stride + +@cname('__pyx_memoryview_copy_data_to_temp') +cdef void *copy_data_to_temp({{memviewslice_name}} *src, + {{memviewslice_name}} *tmpslice, + char order, + int ndim) nogil except NULL: + """ + Copy a direct slice to temporary contiguous memory. The caller should free + the result when done. + """ + cdef int i + cdef void *result + + cdef size_t itemsize = src.memview.view.itemsize + cdef size_t size = slice_get_size(src, ndim) + + result = malloc(size) + if not result: + _err(MemoryError, NULL) + + # tmpslice[0] = src + tmpslice.data = result + tmpslice.memview = src.memview + for i in range(ndim): + tmpslice.shape[i] = src.shape[i] + tmpslice.suboffsets[i] = -1 + + fill_contig_strides_array(&tmpslice.shape[0], &tmpslice.strides[0], itemsize, + ndim, order) + + # We need to broadcast strides again + for i in range(ndim): + if tmpslice.shape[i] == 1: + tmpslice.strides[i] = 0 + + if slice_is_contig(src[0], order, ndim): + memcpy(result, src.data, size) + else: + copy_strided_to_strided(src, tmpslice, ndim, itemsize) + + return result + +# Use 'with gil' functions and avoid 'with gil' blocks, as the code within the blocks +# has temporaries that need the GIL to clean up +@cname('__pyx_memoryview_err_extents') +cdef int _err_extents(int i, Py_ssize_t extent1, + Py_ssize_t extent2) except -1 with gil: + raise ValueError("got differing extents in dimension %d (got %d and %d)" % + (i, extent1, extent2)) + +@cname('__pyx_memoryview_err_dim') +cdef int _err_dim(object error, char *msg, int dim) except -1 with gil: + raise error(msg.decode('ascii') % dim) + +@cname('__pyx_memoryview_err') +cdef int _err(object error, char *msg) except -1 with gil: + if msg != NULL: + raise error(msg.decode('ascii')) + else: + raise error + +@cname('__pyx_memoryview_copy_contents') +cdef int memoryview_copy_contents({{memviewslice_name}} src, + {{memviewslice_name}} dst, + int src_ndim, int dst_ndim, + bint dtype_is_object) nogil except -1: + """ + Copy memory from slice src to slice dst. + Check for overlapping memory and verify the shapes. + """ + cdef void *tmpdata = NULL + cdef size_t itemsize = src.memview.view.itemsize + cdef int i + cdef char order = get_best_order(&src, src_ndim) + cdef bint broadcasting = False + cdef bint direct_copy = False + cdef {{memviewslice_name}} tmp + + if src_ndim < dst_ndim: + broadcast_leading(&src, src_ndim, dst_ndim) + elif dst_ndim < src_ndim: + broadcast_leading(&dst, dst_ndim, src_ndim) + + cdef int ndim = max(src_ndim, dst_ndim) + + for i in range(ndim): + if src.shape[i] != dst.shape[i]: + if src.shape[i] == 1: + broadcasting = True + src.strides[i] = 0 + else: + _err_extents(i, dst.shape[i], src.shape[i]) + + if src.suboffsets[i] >= 0: + _err_dim(ValueError, "Dimension %d is not direct", i) + + if slices_overlap(&src, &dst, ndim, itemsize): + # slices overlap, copy to temp, copy temp to dst + if not slice_is_contig(src, order, ndim): + order = get_best_order(&dst, ndim) + + tmpdata = copy_data_to_temp(&src, &tmp, order, ndim) + src = tmp + + if not broadcasting: + # See if both slices have equal contiguity, in that case perform a + # direct copy. This only works when we are not broadcasting. + if slice_is_contig(src, 'C', ndim): + direct_copy = slice_is_contig(dst, 'C', ndim) + elif slice_is_contig(src, 'F', ndim): + direct_copy = slice_is_contig(dst, 'F', ndim) + + if direct_copy: + # Contiguous slices with same order + refcount_copying(&dst, dtype_is_object, ndim, False) + memcpy(dst.data, src.data, slice_get_size(&src, ndim)) + refcount_copying(&dst, dtype_is_object, ndim, True) + free(tmpdata) + return 0 + + if order == 'F' == get_best_order(&dst, ndim): + # see if both slices have Fortran order, transpose them to match our + # C-style indexing order + transpose_memslice(&src) + transpose_memslice(&dst) + + refcount_copying(&dst, dtype_is_object, ndim, False) + copy_strided_to_strided(&src, &dst, ndim, itemsize) + refcount_copying(&dst, dtype_is_object, ndim, True) + + free(tmpdata) + return 0 + +@cname('__pyx_memoryview_broadcast_leading') +cdef void broadcast_leading({{memviewslice_name}} *mslice, + int ndim, + int ndim_other) nogil: + cdef int i + cdef int offset = ndim_other - ndim + + for i in range(ndim - 1, -1, -1): + mslice.shape[i + offset] = mslice.shape[i] + mslice.strides[i + offset] = mslice.strides[i] + mslice.suboffsets[i + offset] = mslice.suboffsets[i] + + for i in range(offset): + mslice.shape[i] = 1 + mslice.strides[i] = mslice.strides[0] + mslice.suboffsets[i] = -1 + +# +### Take care of refcounting the objects in slices. Do this separately from any copying, +### to minimize acquiring the GIL +# + +@cname('__pyx_memoryview_refcount_copying') +cdef void refcount_copying({{memviewslice_name}} *dst, bint dtype_is_object, + int ndim, bint inc) nogil: + # incref or decref the objects in the destination slice if the dtype is + # object + if dtype_is_object: + refcount_objects_in_slice_with_gil(dst.data, dst.shape, + dst.strides, ndim, inc) + +@cname('__pyx_memoryview_refcount_objects_in_slice_with_gil') +cdef void refcount_objects_in_slice_with_gil(char *data, Py_ssize_t *shape, + Py_ssize_t *strides, int ndim, + bint inc) with gil: + refcount_objects_in_slice(data, shape, strides, ndim, inc) + +@cname('__pyx_memoryview_refcount_objects_in_slice') +cdef void refcount_objects_in_slice(char *data, Py_ssize_t *shape, + Py_ssize_t *strides, int ndim, bint inc): + cdef Py_ssize_t i + + for i in range(shape[0]): + if ndim == 1: + if inc: + Py_INCREF(( data)[0]) + else: + Py_DECREF(( data)[0]) + else: + refcount_objects_in_slice(data, shape + 1, strides + 1, + ndim - 1, inc) + + data += strides[0] + +# +### Scalar to slice assignment +# +@cname('__pyx_memoryview_slice_assign_scalar') +cdef void slice_assign_scalar({{memviewslice_name}} *dst, int ndim, + size_t itemsize, void *item, + bint dtype_is_object) nogil: + refcount_copying(dst, dtype_is_object, ndim, False) + _slice_assign_scalar(dst.data, dst.shape, dst.strides, ndim, + itemsize, item) + refcount_copying(dst, dtype_is_object, ndim, True) + + +@cname('__pyx_memoryview__slice_assign_scalar') +cdef void _slice_assign_scalar(char *data, Py_ssize_t *shape, + Py_ssize_t *strides, int ndim, + size_t itemsize, void *item) nogil: + cdef Py_ssize_t i + cdef Py_ssize_t stride = strides[0] + cdef Py_ssize_t extent = shape[0] + + if ndim == 1: + for i in range(extent): + memcpy(data, item, itemsize) + data += stride + else: + for i in range(extent): + _slice_assign_scalar(data, shape + 1, strides + 1, + ndim - 1, itemsize, item) + data += stride + + +############### BufferFormatFromTypeInfo ############### +cdef extern from *: + ctypedef struct __Pyx_StructField + + cdef enum: + __PYX_BUF_FLAGS_PACKED_STRUCT + __PYX_BUF_FLAGS_INTEGER_COMPLEX + + ctypedef struct __Pyx_TypeInfo: + char* name + __Pyx_StructField* fields + size_t size + size_t arraysize[8] + int ndim + char typegroup + char is_unsigned + int flags + + ctypedef struct __Pyx_StructField: + __Pyx_TypeInfo* type + char* name + size_t offset + + ctypedef struct __Pyx_BufFmt_StackElem: + __Pyx_StructField* field + size_t parent_offset + + #ctypedef struct __Pyx_BufFmt_Context: + # __Pyx_StructField root + __Pyx_BufFmt_StackElem* head + + struct __pyx_typeinfo_string: + char string[3] + + __pyx_typeinfo_string __Pyx_TypeInfoToFormat(__Pyx_TypeInfo *) + + +@cname('__pyx_format_from_typeinfo') +cdef bytes format_from_typeinfo(__Pyx_TypeInfo *type): + cdef __Pyx_StructField *field + cdef __pyx_typeinfo_string fmt + cdef bytes part, result + + if type.typegroup == 'S': + assert type.fields != NULL and type.fields.type != NULL + + if type.flags & __PYX_BUF_FLAGS_PACKED_STRUCT: + alignment = b'^' + else: + alignment = b'' + + parts = [b"T{"] + field = type.fields + + while field.type: + part = format_from_typeinfo(field.type) + parts.append(part + b':' + field.name + b':') + field += 1 + + result = alignment.join(parts) + b'}' + else: + fmt = __Pyx_TypeInfoToFormat(type) + if type.arraysize[0]: + extents = [unicode(type.arraysize[i]) for i in range(type.ndim)] + result = (u"(%s)" % u','.join(extents)).encode('ascii') + fmt.string + else: + result = fmt.string + + return result diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/MemoryView_C.c b/venv/lib/python3.8/site-packages/Cython/Utility/MemoryView_C.c new file mode 100644 index 0000000..9f4828d --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/MemoryView_C.c @@ -0,0 +1,941 @@ +////////// MemviewSliceStruct.proto ////////// +//@proto_block: utility_code_proto_before_types + +/* memoryview slice struct */ +struct {{memview_struct_name}}; + +typedef struct { + struct {{memview_struct_name}} *memview; + char *data; + Py_ssize_t shape[{{max_dims}}]; + Py_ssize_t strides[{{max_dims}}]; + Py_ssize_t suboffsets[{{max_dims}}]; +} {{memviewslice_name}}; + +// used for "len(memviewslice)" +#define __Pyx_MemoryView_Len(m) (m.shape[0]) + + +/////////// Atomics.proto ///////////// +//@proto_block: utility_code_proto_before_types + +#include + +#ifndef CYTHON_ATOMICS + #define CYTHON_ATOMICS 1 +#endif + +#define __pyx_atomic_int_type int +// todo: Portland pgcc, maybe OS X's OSAtomicIncrement32, +// libatomic + autotools-like distutils support? Such a pain... +#if CYTHON_ATOMICS && __GNUC__ >= 4 && (__GNUC_MINOR__ > 1 || \ + (__GNUC_MINOR__ == 1 && __GNUC_PATCHLEVEL >= 2)) && \ + !defined(__i386__) + /* gcc >= 4.1.2 */ + #define __pyx_atomic_incr_aligned(value, lock) __sync_fetch_and_add(value, 1) + #define __pyx_atomic_decr_aligned(value, lock) __sync_fetch_and_sub(value, 1) + + #ifdef __PYX_DEBUG_ATOMICS + #warning "Using GNU atomics" + #endif +#elif CYTHON_ATOMICS && defined(_MSC_VER) && 0 + /* msvc */ + #include + #undef __pyx_atomic_int_type + #define __pyx_atomic_int_type LONG + #define __pyx_atomic_incr_aligned(value, lock) InterlockedIncrement(value) + #define __pyx_atomic_decr_aligned(value, lock) InterlockedDecrement(value) + + #ifdef __PYX_DEBUG_ATOMICS + #pragma message ("Using MSVC atomics") + #endif +#elif CYTHON_ATOMICS && (defined(__ICC) || defined(__INTEL_COMPILER)) && 0 + #define __pyx_atomic_incr_aligned(value, lock) _InterlockedIncrement(value) + #define __pyx_atomic_decr_aligned(value, lock) _InterlockedDecrement(value) + + #ifdef __PYX_DEBUG_ATOMICS + #warning "Using Intel atomics" + #endif +#else + #undef CYTHON_ATOMICS + #define CYTHON_ATOMICS 0 + + #ifdef __PYX_DEBUG_ATOMICS + #warning "Not using atomics" + #endif +#endif + +typedef volatile __pyx_atomic_int_type __pyx_atomic_int; + +#if CYTHON_ATOMICS + #define __pyx_add_acquisition_count(memview) \ + __pyx_atomic_incr_aligned(__pyx_get_slice_count_pointer(memview), memview->lock) + #define __pyx_sub_acquisition_count(memview) \ + __pyx_atomic_decr_aligned(__pyx_get_slice_count_pointer(memview), memview->lock) +#else + #define __pyx_add_acquisition_count(memview) \ + __pyx_add_acquisition_count_locked(__pyx_get_slice_count_pointer(memview), memview->lock) + #define __pyx_sub_acquisition_count(memview) \ + __pyx_sub_acquisition_count_locked(__pyx_get_slice_count_pointer(memview), memview->lock) +#endif + + +/////////////// ObjectToMemviewSlice.proto /////////////// + +static CYTHON_INLINE {{memviewslice_name}} {{funcname}}(PyObject *, int writable_flag); + + +////////// MemviewSliceInit.proto ////////// + +#define __Pyx_BUF_MAX_NDIMS %(BUF_MAX_NDIMS)d + +#define __Pyx_MEMVIEW_DIRECT 1 +#define __Pyx_MEMVIEW_PTR 2 +#define __Pyx_MEMVIEW_FULL 4 +#define __Pyx_MEMVIEW_CONTIG 8 +#define __Pyx_MEMVIEW_STRIDED 16 +#define __Pyx_MEMVIEW_FOLLOW 32 + +#define __Pyx_IS_C_CONTIG 1 +#define __Pyx_IS_F_CONTIG 2 + +static int __Pyx_init_memviewslice( + struct __pyx_memoryview_obj *memview, + int ndim, + __Pyx_memviewslice *memviewslice, + int memview_is_new_reference); + +static CYTHON_INLINE int __pyx_add_acquisition_count_locked( + __pyx_atomic_int *acquisition_count, PyThread_type_lock lock); +static CYTHON_INLINE int __pyx_sub_acquisition_count_locked( + __pyx_atomic_int *acquisition_count, PyThread_type_lock lock); + +#define __pyx_get_slice_count_pointer(memview) (memview->acquisition_count_aligned_p) +#define __pyx_get_slice_count(memview) (*__pyx_get_slice_count_pointer(memview)) +#define __PYX_INC_MEMVIEW(slice, have_gil) __Pyx_INC_MEMVIEW(slice, have_gil, __LINE__) +#define __PYX_XDEC_MEMVIEW(slice, have_gil) __Pyx_XDEC_MEMVIEW(slice, have_gil, __LINE__) +static CYTHON_INLINE void __Pyx_INC_MEMVIEW({{memviewslice_name}} *, int, int); +static CYTHON_INLINE void __Pyx_XDEC_MEMVIEW({{memviewslice_name}} *, int, int); + + +/////////////// MemviewSliceIndex.proto /////////////// + +static CYTHON_INLINE char *__pyx_memviewslice_index_full( + const char *bufp, Py_ssize_t idx, Py_ssize_t stride, Py_ssize_t suboffset); + + +/////////////// ObjectToMemviewSlice /////////////// +//@requires: MemviewSliceValidateAndInit + +static CYTHON_INLINE {{memviewslice_name}} {{funcname}}(PyObject *obj, int writable_flag) { + {{memviewslice_name}} result = {{memslice_init}}; + __Pyx_BufFmt_StackElem stack[{{struct_nesting_depth}}]; + int axes_specs[] = { {{axes_specs}} }; + int retcode; + + if (obj == Py_None) { + /* We don't bother to refcount None */ + result.memview = (struct __pyx_memoryview_obj *) Py_None; + return result; + } + + retcode = __Pyx_ValidateAndInit_memviewslice(axes_specs, {{c_or_f_flag}}, + {{buf_flag}} | writable_flag, {{ndim}}, + &{{dtype_typeinfo}}, stack, + &result, obj); + + if (unlikely(retcode == -1)) + goto __pyx_fail; + + return result; +__pyx_fail: + result.memview = NULL; + result.data = NULL; + return result; +} + + +/////////////// MemviewSliceValidateAndInit.proto /////////////// + +static int __Pyx_ValidateAndInit_memviewslice( + int *axes_specs, + int c_or_f_flag, + int buf_flags, + int ndim, + __Pyx_TypeInfo *dtype, + __Pyx_BufFmt_StackElem stack[], + __Pyx_memviewslice *memviewslice, + PyObject *original_obj); + +/////////////// MemviewSliceValidateAndInit /////////////// +//@requires: Buffer.c::TypeInfoCompare +//@requires: Buffer.c::BufferFormatStructs +//@requires: Buffer.c::BufferFormatCheck + +static int +__pyx_check_strides(Py_buffer *buf, int dim, int ndim, int spec) +{ + if (buf->shape[dim] <= 1) + return 1; + + if (buf->strides) { + if (spec & __Pyx_MEMVIEW_CONTIG) { + if (spec & (__Pyx_MEMVIEW_PTR|__Pyx_MEMVIEW_FULL)) { + if (unlikely(buf->strides[dim] != sizeof(void *))) { + PyErr_Format(PyExc_ValueError, + "Buffer is not indirectly contiguous " + "in dimension %d.", dim); + goto fail; + } + } else if (unlikely(buf->strides[dim] != buf->itemsize)) { + PyErr_SetString(PyExc_ValueError, + "Buffer and memoryview are not contiguous " + "in the same dimension."); + goto fail; + } + } + + if (spec & __Pyx_MEMVIEW_FOLLOW) { + Py_ssize_t stride = buf->strides[dim]; + if (stride < 0) + stride = -stride; + if (unlikely(stride < buf->itemsize)) { + PyErr_SetString(PyExc_ValueError, + "Buffer and memoryview are not contiguous " + "in the same dimension."); + goto fail; + } + } + } else { + if (unlikely(spec & __Pyx_MEMVIEW_CONTIG && dim != ndim - 1)) { + PyErr_Format(PyExc_ValueError, + "C-contiguous buffer is not contiguous in " + "dimension %d", dim); + goto fail; + } else if (unlikely(spec & (__Pyx_MEMVIEW_PTR))) { + PyErr_Format(PyExc_ValueError, + "C-contiguous buffer is not indirect in " + "dimension %d", dim); + goto fail; + } else if (unlikely(buf->suboffsets)) { + PyErr_SetString(PyExc_ValueError, + "Buffer exposes suboffsets but no strides"); + goto fail; + } + } + + return 1; +fail: + return 0; +} + +static int +__pyx_check_suboffsets(Py_buffer *buf, int dim, CYTHON_UNUSED int ndim, int spec) +{ + // Todo: without PyBUF_INDIRECT we may not have suboffset information, i.e., the + // ptr may not be set to NULL but may be uninitialized? + if (spec & __Pyx_MEMVIEW_DIRECT) { + if (unlikely(buf->suboffsets && buf->suboffsets[dim] >= 0)) { + PyErr_Format(PyExc_ValueError, + "Buffer not compatible with direct access " + "in dimension %d.", dim); + goto fail; + } + } + + if (spec & __Pyx_MEMVIEW_PTR) { + if (unlikely(!buf->suboffsets || (buf->suboffsets[dim] < 0))) { + PyErr_Format(PyExc_ValueError, + "Buffer is not indirectly accessible " + "in dimension %d.", dim); + goto fail; + } + } + + return 1; +fail: + return 0; +} + +static int +__pyx_verify_contig(Py_buffer *buf, int ndim, int c_or_f_flag) +{ + int i; + + if (c_or_f_flag & __Pyx_IS_F_CONTIG) { + Py_ssize_t stride = 1; + for (i = 0; i < ndim; i++) { + if (unlikely(stride * buf->itemsize != buf->strides[i] && buf->shape[i] > 1)) { + PyErr_SetString(PyExc_ValueError, + "Buffer not fortran contiguous."); + goto fail; + } + stride = stride * buf->shape[i]; + } + } else if (c_or_f_flag & __Pyx_IS_C_CONTIG) { + Py_ssize_t stride = 1; + for (i = ndim - 1; i >- 1; i--) { + if (unlikely(stride * buf->itemsize != buf->strides[i] && buf->shape[i] > 1)) { + PyErr_SetString(PyExc_ValueError, + "Buffer not C contiguous."); + goto fail; + } + stride = stride * buf->shape[i]; + } + } + + return 1; +fail: + return 0; +} + +static int __Pyx_ValidateAndInit_memviewslice( + int *axes_specs, + int c_or_f_flag, + int buf_flags, + int ndim, + __Pyx_TypeInfo *dtype, + __Pyx_BufFmt_StackElem stack[], + __Pyx_memviewslice *memviewslice, + PyObject *original_obj) +{ + struct __pyx_memoryview_obj *memview, *new_memview; + __Pyx_RefNannyDeclarations + Py_buffer *buf; + int i, spec = 0, retval = -1; + __Pyx_BufFmt_Context ctx; + int from_memoryview = __pyx_memoryview_check(original_obj); + + __Pyx_RefNannySetupContext("ValidateAndInit_memviewslice", 0); + + if (from_memoryview && __pyx_typeinfo_cmp(dtype, ((struct __pyx_memoryview_obj *) + original_obj)->typeinfo)) { + /* We have a matching dtype, skip format parsing */ + memview = (struct __pyx_memoryview_obj *) original_obj; + new_memview = NULL; + } else { + memview = (struct __pyx_memoryview_obj *) __pyx_memoryview_new( + original_obj, buf_flags, 0, dtype); + new_memview = memview; + if (unlikely(!memview)) + goto fail; + } + + buf = &memview->view; + if (unlikely(buf->ndim != ndim)) { + PyErr_Format(PyExc_ValueError, + "Buffer has wrong number of dimensions (expected %d, got %d)", + ndim, buf->ndim); + goto fail; + } + + if (new_memview) { + __Pyx_BufFmt_Init(&ctx, stack, dtype); + if (unlikely(!__Pyx_BufFmt_CheckString(&ctx, buf->format))) goto fail; + } + + if (unlikely((unsigned) buf->itemsize != dtype->size)) { + PyErr_Format(PyExc_ValueError, + "Item size of buffer (%" CYTHON_FORMAT_SSIZE_T "u byte%s) " + "does not match size of '%s' (%" CYTHON_FORMAT_SSIZE_T "u byte%s)", + buf->itemsize, + (buf->itemsize > 1) ? "s" : "", + dtype->name, + dtype->size, + (dtype->size > 1) ? "s" : ""); + goto fail; + } + + /* Check axes */ + for (i = 0; i < ndim; i++) { + spec = axes_specs[i]; + if (unlikely(!__pyx_check_strides(buf, i, ndim, spec))) + goto fail; + if (unlikely(!__pyx_check_suboffsets(buf, i, ndim, spec))) + goto fail; + } + + /* Check contiguity */ + if (unlikely(buf->strides && !__pyx_verify_contig(buf, ndim, c_or_f_flag))) + goto fail; + + /* Initialize */ + if (unlikely(__Pyx_init_memviewslice(memview, ndim, memviewslice, + new_memview != NULL) == -1)) { + goto fail; + } + + retval = 0; + goto no_fail; + +fail: + Py_XDECREF(new_memview); + retval = -1; + +no_fail: + __Pyx_RefNannyFinishContext(); + return retval; +} + + +////////// MemviewSliceInit ////////// + +static int +__Pyx_init_memviewslice(struct __pyx_memoryview_obj *memview, + int ndim, + {{memviewslice_name}} *memviewslice, + int memview_is_new_reference) +{ + __Pyx_RefNannyDeclarations + int i, retval=-1; + Py_buffer *buf = &memview->view; + __Pyx_RefNannySetupContext("init_memviewslice", 0); + + if (unlikely(memviewslice->memview || memviewslice->data)) { + PyErr_SetString(PyExc_ValueError, + "memviewslice is already initialized!"); + goto fail; + } + + if (buf->strides) { + for (i = 0; i < ndim; i++) { + memviewslice->strides[i] = buf->strides[i]; + } + } else { + Py_ssize_t stride = buf->itemsize; + for (i = ndim - 1; i >= 0; i--) { + memviewslice->strides[i] = stride; + stride *= buf->shape[i]; + } + } + + for (i = 0; i < ndim; i++) { + memviewslice->shape[i] = buf->shape[i]; + if (buf->suboffsets) { + memviewslice->suboffsets[i] = buf->suboffsets[i]; + } else { + memviewslice->suboffsets[i] = -1; + } + } + + memviewslice->memview = memview; + memviewslice->data = (char *)buf->buf; + if (__pyx_add_acquisition_count(memview) == 0 && !memview_is_new_reference) { + Py_INCREF(memview); + } + retval = 0; + goto no_fail; + +fail: + /* Don't decref, the memoryview may be borrowed. Let the caller do the cleanup */ + /* __Pyx_XDECREF(memviewslice->memview); */ + memviewslice->memview = 0; + memviewslice->data = 0; + retval = -1; +no_fail: + __Pyx_RefNannyFinishContext(); + return retval; +} + +#ifndef Py_NO_RETURN +// available since Py3.3 +#define Py_NO_RETURN +#endif + +static void __pyx_fatalerror(const char *fmt, ...) Py_NO_RETURN { + va_list vargs; + char msg[200]; + +#ifdef HAVE_STDARG_PROTOTYPES + va_start(vargs, fmt); +#else + va_start(vargs); +#endif + vsnprintf(msg, 200, fmt, vargs); + va_end(vargs); + + Py_FatalError(msg); +} + +static CYTHON_INLINE int +__pyx_add_acquisition_count_locked(__pyx_atomic_int *acquisition_count, + PyThread_type_lock lock) +{ + int result; + PyThread_acquire_lock(lock, 1); + result = (*acquisition_count)++; + PyThread_release_lock(lock); + return result; +} + +static CYTHON_INLINE int +__pyx_sub_acquisition_count_locked(__pyx_atomic_int *acquisition_count, + PyThread_type_lock lock) +{ + int result; + PyThread_acquire_lock(lock, 1); + result = (*acquisition_count)--; + PyThread_release_lock(lock); + return result; +} + + +static CYTHON_INLINE void +__Pyx_INC_MEMVIEW({{memviewslice_name}} *memslice, int have_gil, int lineno) +{ + int first_time; + struct {{memview_struct_name}} *memview = memslice->memview; + if (unlikely(!memview || (PyObject *) memview == Py_None)) + return; /* allow uninitialized memoryview assignment */ + + if (unlikely(__pyx_get_slice_count(memview) < 0)) + __pyx_fatalerror("Acquisition count is %d (line %d)", + __pyx_get_slice_count(memview), lineno); + + first_time = __pyx_add_acquisition_count(memview) == 0; + + if (unlikely(first_time)) { + if (have_gil) { + Py_INCREF((PyObject *) memview); + } else { + PyGILState_STATE _gilstate = PyGILState_Ensure(); + Py_INCREF((PyObject *) memview); + PyGILState_Release(_gilstate); + } + } +} + +static CYTHON_INLINE void __Pyx_XDEC_MEMVIEW({{memviewslice_name}} *memslice, + int have_gil, int lineno) { + int last_time; + struct {{memview_struct_name}} *memview = memslice->memview; + + if (unlikely(!memview || (PyObject *) memview == Py_None)) { + // we do not ref-count None + memslice->memview = NULL; + return; + } + + if (unlikely(__pyx_get_slice_count(memview) <= 0)) + __pyx_fatalerror("Acquisition count is %d (line %d)", + __pyx_get_slice_count(memview), lineno); + + last_time = __pyx_sub_acquisition_count(memview) == 1; + memslice->data = NULL; + + if (unlikely(last_time)) { + if (have_gil) { + Py_CLEAR(memslice->memview); + } else { + PyGILState_STATE _gilstate = PyGILState_Ensure(); + Py_CLEAR(memslice->memview); + PyGILState_Release(_gilstate); + } + } else { + memslice->memview = NULL; + } +} + + +////////// MemviewSliceCopyTemplate.proto ////////// + +static {{memviewslice_name}} +__pyx_memoryview_copy_new_contig(const __Pyx_memviewslice *from_mvs, + const char *mode, int ndim, + size_t sizeof_dtype, int contig_flag, + int dtype_is_object); + + +////////// MemviewSliceCopyTemplate ////////// + +static {{memviewslice_name}} +__pyx_memoryview_copy_new_contig(const __Pyx_memviewslice *from_mvs, + const char *mode, int ndim, + size_t sizeof_dtype, int contig_flag, + int dtype_is_object) +{ + __Pyx_RefNannyDeclarations + int i; + __Pyx_memviewslice new_mvs = {{memslice_init}}; + struct __pyx_memoryview_obj *from_memview = from_mvs->memview; + Py_buffer *buf = &from_memview->view; + PyObject *shape_tuple = NULL; + PyObject *temp_int = NULL; + struct __pyx_array_obj *array_obj = NULL; + struct __pyx_memoryview_obj *memview_obj = NULL; + + __Pyx_RefNannySetupContext("__pyx_memoryview_copy_new_contig", 0); + + for (i = 0; i < ndim; i++) { + if (unlikely(from_mvs->suboffsets[i] >= 0)) { + PyErr_Format(PyExc_ValueError, "Cannot copy memoryview slice with " + "indirect dimensions (axis %d)", i); + goto fail; + } + } + + shape_tuple = PyTuple_New(ndim); + if (unlikely(!shape_tuple)) { + goto fail; + } + __Pyx_GOTREF(shape_tuple); + + + for(i = 0; i < ndim; i++) { + temp_int = PyInt_FromSsize_t(from_mvs->shape[i]); + if(unlikely(!temp_int)) { + goto fail; + } else { + PyTuple_SET_ITEM(shape_tuple, i, temp_int); + temp_int = NULL; + } + } + + array_obj = __pyx_array_new(shape_tuple, sizeof_dtype, buf->format, (char *) mode, NULL); + if (unlikely(!array_obj)) { + goto fail; + } + __Pyx_GOTREF(array_obj); + + memview_obj = (struct __pyx_memoryview_obj *) __pyx_memoryview_new( + (PyObject *) array_obj, contig_flag, + dtype_is_object, + from_mvs->memview->typeinfo); + if (unlikely(!memview_obj)) + goto fail; + + /* initialize new_mvs */ + if (unlikely(__Pyx_init_memviewslice(memview_obj, ndim, &new_mvs, 1) < 0)) + goto fail; + + if (unlikely(__pyx_memoryview_copy_contents(*from_mvs, new_mvs, ndim, ndim, + dtype_is_object) < 0)) + goto fail; + + goto no_fail; + +fail: + __Pyx_XDECREF(new_mvs.memview); + new_mvs.memview = NULL; + new_mvs.data = NULL; +no_fail: + __Pyx_XDECREF(shape_tuple); + __Pyx_XDECREF(temp_int); + __Pyx_XDECREF(array_obj); + __Pyx_RefNannyFinishContext(); + return new_mvs; +} + + +////////// CopyContentsUtility.proto ///////// + +#define {{func_cname}}(slice) \ + __pyx_memoryview_copy_new_contig(&slice, "{{mode}}", {{ndim}}, \ + sizeof({{dtype_decl}}), {{contig_flag}}, \ + {{dtype_is_object}}) + + +////////// OverlappingSlices.proto ////////// + +static int __pyx_slices_overlap({{memviewslice_name}} *slice1, + {{memviewslice_name}} *slice2, + int ndim, size_t itemsize); + + +////////// OverlappingSlices ////////// + +/* Based on numpy's core/src/multiarray/array_assign.c */ + +/* Gets a half-open range [start, end) which contains the array data */ +static void +__pyx_get_array_memory_extents({{memviewslice_name}} *slice, + void **out_start, void **out_end, + int ndim, size_t itemsize) +{ + char *start, *end; + int i; + + start = end = slice->data; + + for (i = 0; i < ndim; i++) { + Py_ssize_t stride = slice->strides[i]; + Py_ssize_t extent = slice->shape[i]; + + if (extent == 0) { + *out_start = *out_end = start; + return; + } else { + if (stride > 0) + end += stride * (extent - 1); + else + start += stride * (extent - 1); + } + } + + /* Return a half-open range */ + *out_start = start; + *out_end = end + itemsize; +} + +/* Returns 1 if the arrays have overlapping data, 0 otherwise */ +static int +__pyx_slices_overlap({{memviewslice_name}} *slice1, + {{memviewslice_name}} *slice2, + int ndim, size_t itemsize) +{ + void *start1, *end1, *start2, *end2; + + __pyx_get_array_memory_extents(slice1, &start1, &end1, ndim, itemsize); + __pyx_get_array_memory_extents(slice2, &start2, &end2, ndim, itemsize); + + return (start1 < end2) && (start2 < end1); +} + + +////////// MemviewSliceCheckContig.proto ////////// + +#define __pyx_memviewslice_is_contig_{{contig_type}}{{ndim}}(slice) \ + __pyx_memviewslice_is_contig(slice, '{{contig_type}}', {{ndim}}) + + +////////// MemviewSliceIsContig.proto ////////// + +static int __pyx_memviewslice_is_contig(const {{memviewslice_name}} mvs, char order, int ndim);/*proto*/ + + +////////// MemviewSliceIsContig ////////// + +static int +__pyx_memviewslice_is_contig(const {{memviewslice_name}} mvs, char order, int ndim) +{ + int i, index, step, start; + Py_ssize_t itemsize = mvs.memview->view.itemsize; + + if (order == 'F') { + step = 1; + start = 0; + } else { + step = -1; + start = ndim - 1; + } + + for (i = 0; i < ndim; i++) { + index = start + step * i; + if (mvs.suboffsets[index] >= 0 || mvs.strides[index] != itemsize) + return 0; + + itemsize *= mvs.shape[index]; + } + + return 1; +} + + +/////////////// MemviewSliceIndex /////////////// + +static CYTHON_INLINE char * +__pyx_memviewslice_index_full(const char *bufp, Py_ssize_t idx, + Py_ssize_t stride, Py_ssize_t suboffset) +{ + bufp = bufp + idx * stride; + if (suboffset >= 0) { + bufp = *((char **) bufp) + suboffset; + } + return (char *) bufp; +} + + +/////////////// MemviewDtypeToObject.proto /////////////// + +{{if to_py_function}} +static CYTHON_INLINE PyObject *{{get_function}}(const char *itemp); /* proto */ +{{endif}} + +{{if from_py_function}} +static CYTHON_INLINE int {{set_function}}(const char *itemp, PyObject *obj); /* proto */ +{{endif}} + +/////////////// MemviewDtypeToObject /////////////// + +{{#__pyx_memview__to_object}} + +/* Convert a dtype to or from a Python object */ + +{{if to_py_function}} +static CYTHON_INLINE PyObject *{{get_function}}(const char *itemp) { + return (PyObject *) {{to_py_function}}(*({{dtype}} *) itemp); +} +{{endif}} + +{{if from_py_function}} +static CYTHON_INLINE int {{set_function}}(const char *itemp, PyObject *obj) { + {{dtype}} value = {{from_py_function}}(obj); + if ({{error_condition}}) + return 0; + *({{dtype}} *) itemp = value; + return 1; +} +{{endif}} + + +/////////////// MemviewObjectToObject.proto /////////////// + +/* Function callbacks (for memoryview object) for dtype object */ +static PyObject *{{get_function}}(const char *itemp); /* proto */ +static int {{set_function}}(const char *itemp, PyObject *obj); /* proto */ + + +/////////////// MemviewObjectToObject /////////////// + +static PyObject *{{get_function}}(const char *itemp) { + PyObject *result = *(PyObject **) itemp; + Py_INCREF(result); + return result; +} + +static int {{set_function}}(const char *itemp, PyObject *obj) { + Py_INCREF(obj); + Py_DECREF(*(PyObject **) itemp); + *(PyObject **) itemp = obj; + return 1; +} + +/////////// ToughSlice ////////// + +/* Dimension is indexed with 'start:stop:step' */ + +if (unlikely(__pyx_memoryview_slice_memviewslice( + &{{dst}}, + {{src}}.shape[{{dim}}], {{src}}.strides[{{dim}}], {{src}}.suboffsets[{{dim}}], + {{dim}}, + {{new_ndim}}, + &{{get_suboffset_dim()}}, + {{start}}, + {{stop}}, + {{step}}, + {{int(have_start)}}, + {{int(have_stop)}}, + {{int(have_step)}}, + 1) < 0)) +{ + {{error_goto}} +} + + +////////// SimpleSlice ////////// + +/* Dimension is indexed with ':' only */ + +{{dst}}.shape[{{new_ndim}}] = {{src}}.shape[{{dim}}]; +{{dst}}.strides[{{new_ndim}}] = {{src}}.strides[{{dim}}]; + +{{if access == 'direct'}} + {{dst}}.suboffsets[{{new_ndim}}] = -1; +{{else}} + {{dst}}.suboffsets[{{new_ndim}}] = {{src}}.suboffsets[{{dim}}]; + if ({{src}}.suboffsets[{{dim}}] >= 0) + {{get_suboffset_dim()}} = {{new_ndim}}; +{{endif}} + + +////////// SliceIndex ////////// + +// Dimension is indexed with an integer, we could use the ToughSlice +// approach, but this is faster + +{ + Py_ssize_t __pyx_tmp_idx = {{idx}}; + + {{if wraparound or boundscheck}} + Py_ssize_t __pyx_tmp_shape = {{src}}.shape[{{dim}}]; + {{endif}} + + Py_ssize_t __pyx_tmp_stride = {{src}}.strides[{{dim}}]; + {{if wraparound}} + if (__pyx_tmp_idx < 0) + __pyx_tmp_idx += __pyx_tmp_shape; + {{endif}} + + {{if boundscheck}} + if (unlikely(!__Pyx_is_valid_index(__pyx_tmp_idx, __pyx_tmp_shape))) { + {{if not have_gil}} + #ifdef WITH_THREAD + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure(); + #endif + {{endif}} + + PyErr_SetString(PyExc_IndexError, + "Index out of bounds (axis {{dim}})"); + + {{if not have_gil}} + #ifdef WITH_THREAD + PyGILState_Release(__pyx_gilstate_save); + #endif + {{endif}} + + {{error_goto}} + } + {{endif}} + + {{if all_dimensions_direct}} + {{dst}}.data += __pyx_tmp_idx * __pyx_tmp_stride; + {{else}} + if ({{get_suboffset_dim()}} < 0) { + {{dst}}.data += __pyx_tmp_idx * __pyx_tmp_stride; + + /* This dimension is the first dimension, or is preceded by */ + /* direct or indirect dimensions that are indexed away. */ + /* Hence suboffset_dim must be less than zero, and we can have */ + /* our data pointer refer to another block by dereferencing. */ + /* slice.data -> B -> C becomes slice.data -> C */ + + {{if indirect}} + { + Py_ssize_t __pyx_tmp_suboffset = {{src}}.suboffsets[{{dim}}]; + + {{if generic}} + if (__pyx_tmp_suboffset >= 0) + {{endif}} + + {{dst}}.data = *((char **) {{dst}}.data) + __pyx_tmp_suboffset; + } + {{endif}} + + } else { + {{dst}}.suboffsets[{{get_suboffset_dim()}}] += __pyx_tmp_idx * __pyx_tmp_stride; + + /* Note: dimension can not be indirect, the compiler will have */ + /* issued an error */ + } + + {{endif}} +} + + +////////// FillStrided1DScalar.proto ////////// + +static void +__pyx_fill_slice_{{dtype_name}}({{type_decl}} *p, Py_ssize_t extent, Py_ssize_t stride, + size_t itemsize, void *itemp); + +////////// FillStrided1DScalar ////////// + +/* Fill a slice with a scalar value. The dimension is direct and strided or contiguous */ +/* This can be used as a callback for the memoryview object to efficienty assign a scalar */ +/* Currently unused */ +static void +__pyx_fill_slice_{{dtype_name}}({{type_decl}} *p, Py_ssize_t extent, Py_ssize_t stride, + size_t itemsize, void *itemp) +{ + Py_ssize_t i; + {{type_decl}} item = *(({{type_decl}} *) itemp); + {{type_decl}} *endp; + + stride /= sizeof({{type_decl}}); + endp = p + stride * extent; + + while (p < endp) { + *p = item; + p += stride; + } +} diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/ModuleSetupCode.c b/venv/lib/python3.8/site-packages/Cython/Utility/ModuleSetupCode.c new file mode 100644 index 0000000..4090185 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/ModuleSetupCode.c @@ -0,0 +1,1496 @@ +/////////////// CModulePreamble /////////////// + +#include /* For offsetof */ +#ifndef offsetof + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif + +#if !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif + +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif + +// For use in DL_IMPORT/DL_EXPORT macros. +#define __PYX_COMMA , + +#ifndef HAVE_LONG_LONG + // CPython has required PY_LONG_LONG support for years, even if HAVE_LONG_LONG is not defined for us + #if PY_VERSION_HEX >= 0x02070000 + #define HAVE_LONG_LONG + #endif +#endif + +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif + +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif + +#ifdef PYPY_VERSION + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + +#elif defined(PYSTON_VERSION) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + // looks like calling _PyType_Lookup() isn't safe in Py<=2.6/3.1 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) + #define CYTHON_USE_PYTYPE_LOOKUP 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #elif !defined(CYTHON_USE_PYLONG_INTERNALS) + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #ifndef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 1 + #endif + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) + #endif + #ifndef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1) + #endif + #ifndef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3) + #endif +#endif + +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif + +#if CYTHON_USE_PYLONG_INTERNALS + #include "longintrepr.h" + /* These short defines can easily conflict with other code */ + #undef SHIFT + #undef BASE + #undef MASK + /* Compile-time sanity check that these are indeed equal. Github issue #2670. */ + #ifdef SIZEOF_VOID_P + enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; + #endif +#endif + +#ifndef __has_attribute + #define __has_attribute(x) 0 +#endif + +#ifndef __has_cpp_attribute + #define __has_cpp_attribute(x) 0 +#endif + +// restrict +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif + +// unused attribute +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif + +#ifndef CYTHON_MAYBE_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) +# endif +#endif + +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif + +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) + +#ifdef _MSC_VER + #ifndef _MSC_STDINT_H_ + #if _MSC_VER < 1300 + typedef unsigned char uint8_t; + typedef unsigned int uint32_t; + #else + typedef unsigned __int8 uint8_t; + typedef unsigned __int32 uint32_t; + #endif + #endif +#else + #include +#endif + + +#ifndef CYTHON_FALLTHROUGH + #if defined(__cplusplus) && __cplusplus >= 201103L + #if __has_cpp_attribute(fallthrough) + #define CYTHON_FALLTHROUGH [[fallthrough]] + #elif __has_cpp_attribute(clang::fallthrough) + #define CYTHON_FALLTHROUGH [[clang::fallthrough]] + #elif __has_cpp_attribute(gnu::fallthrough) + #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] + #endif + #endif + + #ifndef CYTHON_FALLTHROUGH + #if __has_attribute(fallthrough) + #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) + #else + #define CYTHON_FALLTHROUGH + #endif + #endif + + #if defined(__clang__ ) && defined(__apple_build_version__) + #if __apple_build_version__ < 7000000 /* Xcode < 7.0 */ + #undef CYTHON_FALLTHROUGH + #define CYTHON_FALLTHROUGH + #endif + #endif +#endif + +/////////////// CInitCode /////////////// + +// inline attribute +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #elif defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif +#endif + + +/////////////// CppInitCode /////////////// + +#ifndef __cplusplus + #error "Cython files generated with the C++ option must be compiled with a C++ compiler." +#endif + +// inline attribute +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #else + #define CYTHON_INLINE inline + #endif +#endif + +// Work around clang bug http://stackoverflow.com/questions/21847816/c-invoke-nested-template-class-destructor +template +void __Pyx_call_destructor(T& x) { + x.~T(); +} + +// Used for temporary variables of "reference" type. +template +class __Pyx_FakeReference { + public: + __Pyx_FakeReference() : ptr(NULL) { } + // __Pyx_FakeReference(T& ref) : ptr(&ref) { } + // Const version needed as Cython doesn't know about const overloads (e.g. for stl containers). + __Pyx_FakeReference(const T& ref) : ptr(const_cast(&ref)) { } + T *operator->() { return ptr; } + T *operator&() { return ptr; } + operator T&() { return *ptr; } + // TODO(robertwb): Delegate all operators (or auto-generate unwrapping code where needed). + template bool operator ==(U other) { return *ptr == other; } + template bool operator !=(U other) { return *ptr != other; } + private: + T *ptr; +}; + + +/////////////// PythonCompatibility /////////////// + +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) + #define Py_OptimizeFlag 0 +#endif + +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" + +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) \ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyClass_Type +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" +#if PY_VERSION_HEX >= 0x030800A4 && PY_VERSION_HEX < 0x030800B2 + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) \ + PyCode_New(a, 0, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#else + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) \ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#endif + #define __Pyx_DefaultClassType PyType_Type +#endif + +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif + +#ifndef METH_STACKLESS + // already defined for Stackless Python (all versions) and C-Python >= 3.7 + // value if defined: Stackless Python < 3.6: 0x80 else 0x100 + #define METH_STACKLESS 0 +#endif +#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) + // new in CPython 3.6, but changed in 3.7 - see + // positional-only parameters: + // https://bugs.python.org/issue29464 + // const args: + // https://bugs.python.org/issue32240 + #ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + #endif + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); + // new in CPython 3.7, used to be old signature of _PyCFunctionFast() in 3.6 + typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #define __Pyx_PyCFunctionFast _PyCFunctionFast + #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords +#endif +#if CYTHON_FAST_PYCCALL +#define __Pyx_PyFastCFunction_Check(func) \ + ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))))) +#else +#define __Pyx_PyFastCFunction_Check(func) 0 +#endif + +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif + +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1 + #define PyMem_RawMalloc(n) PyMem_Malloc(n) + #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n) + #define PyMem_RawFree(p) PyMem_Free(p) +#endif + +#if CYTHON_COMPILING_IN_PYSTON + // special C-API functions only in Pyston + #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif + +#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#elif PY_VERSION_HEX >= 0x03060000 + //#elif PY_VERSION_HEX >= 0x03050200 + // Actually added in 3.5.2, but compiling against that does not guarantee that we get imported there. + #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() +#elif PY_VERSION_HEX >= 0x03000000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#else + #define __Pyx_PyThreadState_Current _PyThreadState_Current +#endif + +// TSS (Thread Specific Storage) API +#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) +#include "pythread.h" +#define Py_tss_NEEDS_INIT 0 +typedef int Py_tss_t; +static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { + *key = PyThread_create_key(); + return 0; /* PyThread_create_key reports success always */ +} +static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { + Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); + *key = Py_tss_NEEDS_INIT; + return key; +} +static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { + PyObject_Free(key); +} +static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { + return *key != Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { + PyThread_delete_key(*key); + *key = Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { + return PyThread_set_key_value(*key, value); +} +static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { + return PyThread_get_key_value(*key); +} +// PyThread_delete_key_value(key) is equalivalent to PyThread_set_key_value(key, NULL) +// PyThread_ReInitTLS() is a no-op +#endif /* TSS (Thread Specific Storage) API */ + +#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) +#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) +#else +#define __Pyx_PyDict_NewPresized(n) PyDict_New() +#endif + +#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif + +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS +#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) +#else +#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) +#endif + +/* new Py3.3 unicode type (PEP 393) */ +#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ? \ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) + #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + /* (void)(k) => avoid unused variable warning due to macro: */ + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif + +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ? \ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif + +#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) +#endif + +#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) +#endif + +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) +#endif + +// ("..." % x) must call PyNumber_Remainder() if x is a string subclass that implements "__rmod__()". +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) + +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif + +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif + +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact + // PyPy3 used to define "PyObject_Unicode" +#ifndef PyObject_Unicode + #define PyObject_Unicode PyObject_Str +#endif +#endif + +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) +#endif + +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) +#endif + +#if CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) +#else + // NOTE: might fail with exception => check for -1 + #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) +#endif + +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long +#endif + +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif + +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif + +#if PY_VERSION_HEX < 0x030200A4 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t PyInt_AsLong +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t +#endif + +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : (Py_INCREF(func), func)) +#else + #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) +#endif + +// backport of PyAsyncMethods from Py3.5 to older Py3.x versions +// (mis-)using the "tp_reserved" type slot which is re-activated as "tp_as_async" in Py3.5 +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif +#else + #define __Pyx_PyType_AsAsync(obj) NULL +#endif +#ifndef __Pyx_PyAsyncMethodsStruct + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; +#endif + + +/////////////// SmallCodeConfig.proto /////////////// + +#ifndef CYTHON_SMALL_CODE +#if defined(__clang__) + #define CYTHON_SMALL_CODE +#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) + #define CYTHON_SMALL_CODE __attribute__((cold)) +#else + #define CYTHON_SMALL_CODE +#endif +#endif + + +/////////////// PyModInitFuncType.proto /////////////// + +#ifndef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC + +#elif PY_MAJOR_VERSION < 3 +// Py2: define this to void manually because PyMODINIT_FUNC adds __declspec(dllexport) to it's definition. +#ifdef __cplusplus +#define __Pyx_PyMODINIT_FUNC extern "C" void +#else +#define __Pyx_PyMODINIT_FUNC void +#endif + +#else +// Py3+: define this to PyObject * manually because PyMODINIT_FUNC adds __declspec(dllexport) to it's definition. +#ifdef __cplusplus +#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * +#else +#define __Pyx_PyMODINIT_FUNC PyObject * +#endif +#endif + + +/////////////// FastTypeChecks.proto /////////////// + +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b);/*proto*/ +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type);/*proto*/ +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2);/*proto*/ +#else +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) +#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) +#endif + +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) + +/////////////// FastTypeChecks /////////////// +//@requires: Exceptions.c::PyThreadStateGet +//@requires: Exceptions.c::PyErrFetchRestore + +#if CYTHON_COMPILING_IN_CPYTHON +static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { + while (a) { + a = a->tp_base; + if (a == b) + return 1; + } + return b == &PyBaseObject_Type; +} + +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (a == b) return 1; + mro = a->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) + return 1; + } + return 0; + } + // should only get here for incompletely initialised types, i.e. never under normal usage patterns + return __Pyx_InBases(a, b); +} + + +#if PY_MAJOR_VERSION == 2 +static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { + // PyObject_IsSubclass() can recurse and therefore is not safe + PyObject *exception, *value, *tb; + int res; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&exception, &value, &tb); + + res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; + // This function must not fail, so print the error here (which also clears it) + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + if (!res) { + res = PyObject_IsSubclass(err, exc_type2); + // This function must not fail, so print the error here (which also clears it) + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + } + + __Pyx_ErrRestore(exception, value, tb); + return res; +} +#else +static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { + int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; + if (!res) { + res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); + } + return res; +} +#endif + +// so far, we only call PyErr_GivenExceptionMatches() with an exception type (not instance) as first argument +// => optimise for that case + +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + // the tighter subtype checking in Py3 allows faster out-of-order comparison + for (i=0; i pure safety check assertions. + assert(PyExceptionClass_Check(exc_type1)); + assert(PyExceptionClass_Check(exc_type2)); + if (likely(err == exc_type1 || err == exc_type2)) return 1; + if (likely(PyExceptionClass_Check(err))) { + return __Pyx_inner_PyErr_GivenExceptionMatches2(err, exc_type1, exc_type2); + } + return (PyErr_GivenExceptionMatches(err, exc_type1) || PyErr_GivenExceptionMatches(err, exc_type2)); +} + +#endif + + +/////////////// MathInitCode /////////////// + +#if defined(WIN32) || defined(MS_WINDOWS) + #define _USE_MATH_DEFINES +#endif +#include + +#ifdef NAN +#define __PYX_NAN() ((float) NAN) +#else +static CYTHON_INLINE float __PYX_NAN() { + // Initialize NaN. The sign is irrelevant, an exponent with all bits 1 and + // a nonzero mantissa means NaN. If the first bit in the mantissa is 1, it is + // a quiet NaN. + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} +#endif + +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc +#else +#define __Pyx_truncl truncl +#endif + + +/////////////// UtilityFunctionPredeclarations.proto /////////////// + +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; /*proto*/ + +/////////////// ForceInitThreads.proto /////////////// +//@proto_block: utility_code_proto_before_types + +#ifndef __PYX_FORCE_INIT_THREADS + #define __PYX_FORCE_INIT_THREADS 0 +#endif + +/////////////// InitThreads.init /////////////// + +#ifdef WITH_THREAD +PyEval_InitThreads(); +#endif + + +/////////////// ModuleCreationPEP489 /////////////// +//@substitute: naming + +//#if CYTHON_PEP489_MULTI_PHASE_INIT +static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { + #if PY_VERSION_HEX >= 0x030700A1 + static PY_INT64_T main_interpreter_id = -1; + PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); + if (main_interpreter_id == -1) { + main_interpreter_id = current_id; + return (unlikely(current_id == -1)) ? -1 : 0; + } else if (unlikely(main_interpreter_id != current_id)) + + #else + static PyInterpreterState *main_interpreter = NULL; + PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; + if (!main_interpreter) { + main_interpreter = current_interpreter; + } else if (unlikely(main_interpreter != current_interpreter)) + #endif + + { + PyErr_SetString( + PyExc_ImportError, + "Interpreter change detected - this module can only be loaded into one interpreter per process."); + return -1; + } + return 0; +} + +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) { + PyObject *value = PyObject_GetAttrString(spec, from_name); + int result = 0; + if (likely(value)) { + if (allow_none || value != Py_None) { + result = PyDict_SetItemString(moddict, to_name, value); + } + Py_DECREF(value); + } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + } else { + result = -1; + } + return result; +} + +static CYTHON_SMALL_CODE PyObject* ${pymodule_create_func_cname}(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { + PyObject *module = NULL, *moddict, *modname; + + // For now, we only have exactly one module instance. + if (__Pyx_check_single_interpreter()) + return NULL; + if (${module_cname}) + return __Pyx_NewRef(${module_cname}); + + modname = PyObject_GetAttrString(spec, "name"); + if (unlikely(!modname)) goto bad; + + module = PyModule_NewObject(modname); + Py_DECREF(modname); + if (unlikely(!module)) goto bad; + + moddict = PyModule_GetDict(module); + if (unlikely(!moddict)) goto bad; + // moddict is a borrowed reference + + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; + + return module; +bad: + Py_XDECREF(module); + return NULL; +} +//#endif + + +/////////////// CodeObjectCache.proto /////////////// + +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; + +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; + +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; + +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); + +/////////////// CodeObjectCache /////////////// +// Note that errors are simply ignored in the code below. +// This is just a cache, if a lookup or insertion fails - so what? + +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} + +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} + +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} + +/////////////// CodeObjectCache.cleanup /////////////// + + if (__pyx_code_cache.entries) { + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + int i, count = __pyx_code_cache.count; + __pyx_code_cache.count = 0; + __pyx_code_cache.max_count = 0; + __pyx_code_cache.entries = NULL; + for (i=0; iSetupContext((name), __LINE__, __FILE__); \ + PyGILState_Release(__pyx_gilstate_save); \ + } else { \ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__); \ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil) \ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) +#endif + #define __Pyx_RefNannyFinishContext() \ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif /* CYTHON_REFNANNY */ + +#define __Pyx_XDECREF_SET(r, v) do { \ + PyObject *tmp = (PyObject *) r; \ + r = v; __Pyx_XDECREF(tmp); \ + } while (0) +#define __Pyx_DECREF_SET(r, v) do { \ + PyObject *tmp = (PyObject *) r; \ + r = v; __Pyx_DECREF(tmp); \ + } while (0) + +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/////////////// Refnanny /////////////// + +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule(modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, "RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif /* CYTHON_REFNANNY */ + + +/////////////// ImportRefnannyAPI /////////////// + +#if CYTHON_REFNANNY +__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); +if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); +} +#endif + + +/////////////// RegisterModuleCleanup.proto /////////////// +//@substitute: naming + +static void ${cleanup_cname}(PyObject *self); /*proto*/ + +#if PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY +static int __Pyx_RegisterCleanup(void); /*proto*/ +#else +#define __Pyx_RegisterCleanup() (0) +#endif + +/////////////// RegisterModuleCleanup /////////////// +//@substitute: naming + +#if PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY +static PyObject* ${cleanup_cname}_atexit(PyObject *module, CYTHON_UNUSED PyObject *unused) { + ${cleanup_cname}(module); + Py_INCREF(Py_None); return Py_None; +} + +static int __Pyx_RegisterCleanup(void) { + // Don't use Py_AtExit because that has a 32-call limit and is called + // after python finalization. + // Also, we try to prepend the cleanup function to "atexit._exithandlers" + // in Py2 because CPython runs them last-to-first. Being run last allows + // user exit code to run before us that may depend on the globals + // and cached objects that we are about to clean up. + + static PyMethodDef cleanup_def = { + "__cleanup", (PyCFunction)${cleanup_cname}_atexit, METH_NOARGS, 0}; + + PyObject *cleanup_func = 0; + PyObject *atexit = 0; + PyObject *reg = 0; + PyObject *args = 0; + PyObject *res = 0; + int ret = -1; + + cleanup_func = PyCFunction_New(&cleanup_def, 0); + if (!cleanup_func) + goto bad; + + atexit = PyImport_ImportModule("atexit"); + if (!atexit) + goto bad; + reg = PyObject_GetAttrString(atexit, "_exithandlers"); + if (reg && PyList_Check(reg)) { + PyObject *a, *kw; + a = PyTuple_New(0); + kw = PyDict_New(); + if (!a || !kw) { + Py_XDECREF(a); + Py_XDECREF(kw); + goto bad; + } + args = PyTuple_Pack(3, cleanup_func, a, kw); + Py_DECREF(a); + Py_DECREF(kw); + if (!args) + goto bad; + ret = PyList_Insert(reg, 0, args); + } else { + if (!reg) + PyErr_Clear(); + Py_XDECREF(reg); + reg = PyObject_GetAttrString(atexit, "register"); + if (!reg) + goto bad; + args = PyTuple_Pack(1, cleanup_func); + if (!args) + goto bad; + res = PyObject_CallObject(reg, args); + if (!res) + goto bad; + ret = 0; + } +bad: + Py_XDECREF(cleanup_func); + Py_XDECREF(atexit); + Py_XDECREF(reg); + Py_XDECREF(args); + Py_XDECREF(res); + return ret; +} +#endif + +/////////////// FastGil.init /////////////// +#ifdef WITH_THREAD +__Pyx_FastGilFuncInit(); +#endif + +/////////////// NoFastGil.proto /////////////// +//@proto_block: utility_code_proto_before_types + +#define __Pyx_PyGILState_Ensure PyGILState_Ensure +#define __Pyx_PyGILState_Release PyGILState_Release +#define __Pyx_FastGIL_Remember() +#define __Pyx_FastGIL_Forget() +#define __Pyx_FastGilFuncInit() + +/////////////// FastGil.proto /////////////// +//@proto_block: utility_code_proto_before_types + +struct __Pyx_FastGilVtab { + PyGILState_STATE (*Fast_PyGILState_Ensure)(void); + void (*Fast_PyGILState_Release)(PyGILState_STATE oldstate); + void (*FastGIL_Remember)(void); + void (*FastGIL_Forget)(void); +}; + +static void __Pyx_FastGIL_Noop(void) {} +static struct __Pyx_FastGilVtab __Pyx_FastGilFuncs = { + PyGILState_Ensure, + PyGILState_Release, + __Pyx_FastGIL_Noop, + __Pyx_FastGIL_Noop +}; + +static void __Pyx_FastGilFuncInit(void); + +#define __Pyx_PyGILState_Ensure __Pyx_FastGilFuncs.Fast_PyGILState_Ensure +#define __Pyx_PyGILState_Release __Pyx_FastGilFuncs.Fast_PyGILState_Release +#define __Pyx_FastGIL_Remember __Pyx_FastGilFuncs.FastGIL_Remember +#define __Pyx_FastGIL_Forget __Pyx_FastGilFuncs.FastGIL_Forget + +#ifdef WITH_THREAD + #ifndef CYTHON_THREAD_LOCAL + #if __STDC_VERSION__ >= 201112 + #define CYTHON_THREAD_LOCAL _Thread_local + #elif defined(__GNUC__) + #define CYTHON_THREAD_LOCAL __thread + #elif defined(_MSC_VER) + #define CYTHON_THREAD_LOCAL __declspec(thread) + #endif + #endif +#endif + +/////////////// FastGil /////////////// +//@requires: CommonStructures.c::FetchCommonPointer +// The implementations of PyGILState_Ensure/Release calls PyThread_get_key_value +// several times which is turns out to be quite slow (slower in fact than +// acquiring the GIL itself). Simply storing it in a thread local for the +// common case is much faster. +// To make optimal use of this thread local, we attempt to share it between +// modules. + +#define __Pyx_FastGIL_ABI_module "_cython_" CYTHON_ABI +#define __Pyx_FastGIL_PyCapsuleName "FastGilFuncs" +#define __Pyx_FastGIL_PyCapsule \ + __Pyx_FastGIL_ABI_module "." __Pyx_FastGIL_PyCapsuleName + +#if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_THREAD_LOCAL +#endif + +#ifdef CYTHON_THREAD_LOCAL + +#include "pythread.h" +#include "pystate.h" + +static CYTHON_THREAD_LOCAL PyThreadState *__Pyx_FastGil_tcur = NULL; +static CYTHON_THREAD_LOCAL int __Pyx_FastGil_tcur_depth = 0; +static int __Pyx_FastGil_autoTLSkey = -1; + +static CYTHON_INLINE void __Pyx_FastGIL_Remember0(void) { + ++__Pyx_FastGil_tcur_depth; +} + +static CYTHON_INLINE void __Pyx_FastGIL_Forget0(void) { + if (--__Pyx_FastGil_tcur_depth == 0) { + __Pyx_FastGil_tcur = NULL; + } +} + +static CYTHON_INLINE PyThreadState *__Pyx_FastGil_get_tcur(void) { + PyThreadState *tcur = __Pyx_FastGil_tcur; + if (tcur == NULL) { + tcur = __Pyx_FastGil_tcur = (PyThreadState*)PyThread_get_key_value(__Pyx_FastGil_autoTLSkey); + } + return tcur; +} + +static PyGILState_STATE __Pyx_FastGil_PyGILState_Ensure(void) { + int current; + PyThreadState *tcur; + __Pyx_FastGIL_Remember0(); + tcur = __Pyx_FastGil_get_tcur(); + if (tcur == NULL) { + // Uninitialized, need to initialize now. + return PyGILState_Ensure(); + } + current = tcur == __Pyx_PyThreadState_Current; + if (current == 0) { + PyEval_RestoreThread(tcur); + } + ++tcur->gilstate_counter; + return current ? PyGILState_LOCKED : PyGILState_UNLOCKED; +} + +static void __Pyx_FastGil_PyGILState_Release(PyGILState_STATE oldstate) { + PyThreadState *tcur = __Pyx_FastGil_get_tcur(); + __Pyx_FastGIL_Forget0(); + if (tcur->gilstate_counter == 1) { + // This is the last lock, do all the cleanup as well. + PyGILState_Release(oldstate); + } else { + --tcur->gilstate_counter; + if (oldstate == PyGILState_UNLOCKED) { + PyEval_SaveThread(); + } + } +} + +static void __Pyx_FastGilFuncInit0(void) { + /* Try to detect autoTLSkey. */ + int key; + void* this_thread_state = (void*) PyGILState_GetThisThreadState(); + for (key = 0; key < 100; key++) { + if (PyThread_get_key_value(key) == this_thread_state) { + __Pyx_FastGil_autoTLSkey = key; + break; + } + } + if (__Pyx_FastGil_autoTLSkey != -1) { + PyObject* capsule = NULL; + PyObject* abi_module = NULL; + __Pyx_PyGILState_Ensure = __Pyx_FastGil_PyGILState_Ensure; + __Pyx_PyGILState_Release = __Pyx_FastGil_PyGILState_Release; + __Pyx_FastGIL_Remember = __Pyx_FastGIL_Remember0; + __Pyx_FastGIL_Forget = __Pyx_FastGIL_Forget0; + capsule = PyCapsule_New(&__Pyx_FastGilFuncs, __Pyx_FastGIL_PyCapsule, NULL); + abi_module = PyImport_AddModule(__Pyx_FastGIL_ABI_module); + if (capsule && abi_module) { + PyObject_SetAttrString(abi_module, __Pyx_FastGIL_PyCapsuleName, capsule); + } + Py_XDECREF(capsule); + } +} + +#else + +static void __Pyx_FastGilFuncInit0(void) { + CYTHON_UNUSED void* force_use = (void*)&__Pyx_FetchCommonPointer; +} + +#endif + +static void __Pyx_FastGilFuncInit(void) { +#if PY_VERSION_HEX >= 0x02070000 + struct __Pyx_FastGilVtab* shared = (struct __Pyx_FastGilVtab*)PyCapsule_Import(__Pyx_FastGIL_PyCapsule, 1); +#else + struct __Pyx_FastGilVtab* shared = NULL; +#endif + if (shared) { + __Pyx_FastGilFuncs = *shared; + } else { + PyErr_Clear(); + __Pyx_FastGilFuncInit0(); + } +} diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/ObjectHandling.c b/venv/lib/python3.8/site-packages/Cython/Utility/ObjectHandling.c new file mode 100644 index 0000000..fbb22be --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/ObjectHandling.c @@ -0,0 +1,2495 @@ +/* + * General object operations and protocol implementations, + * including their specialisations for certain builtins. + * + * Optional optimisations for builtins are in Optimize.c. + * + * Required replacements of builtins are in Builtins.c. + */ + +/////////////// RaiseNoneIterError.proto /////////////// + +static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void); + +/////////////// RaiseNoneIterError /////////////// + +static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); +} + +/////////////// RaiseTooManyValuesToUnpack.proto /////////////// + +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected); + +/////////////// RaiseTooManyValuesToUnpack /////////////// + +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { + PyErr_Format(PyExc_ValueError, + "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected); +} + +/////////////// RaiseNeedMoreValuesToUnpack.proto /////////////// + +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index); + +/////////////// RaiseNeedMoreValuesToUnpack /////////////// + +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { + PyErr_Format(PyExc_ValueError, + "need more than %" CYTHON_FORMAT_SSIZE_T "d value%.1s to unpack", + index, (index == 1) ? "" : "s"); +} + +/////////////// UnpackTupleError.proto /////////////// + +static void __Pyx_UnpackTupleError(PyObject *, Py_ssize_t index); /*proto*/ + +/////////////// UnpackTupleError /////////////// +//@requires: RaiseNoneIterError +//@requires: RaiseNeedMoreValuesToUnpack +//@requires: RaiseTooManyValuesToUnpack + +static void __Pyx_UnpackTupleError(PyObject *t, Py_ssize_t index) { + if (t == Py_None) { + __Pyx_RaiseNoneNotIterableError(); + } else if (PyTuple_GET_SIZE(t) < index) { + __Pyx_RaiseNeedMoreValuesError(PyTuple_GET_SIZE(t)); + } else { + __Pyx_RaiseTooManyValuesError(index); + } +} + +/////////////// UnpackItemEndCheck.proto /////////////// + +static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected); /*proto*/ + +/////////////// UnpackItemEndCheck /////////////// +//@requires: RaiseTooManyValuesToUnpack +//@requires: IterFinish + +static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) { + if (unlikely(retval)) { + Py_DECREF(retval); + __Pyx_RaiseTooManyValuesError(expected); + return -1; + } else { + return __Pyx_IterFinish(); + } + return 0; +} + +/////////////// UnpackTuple2.proto /////////////// + +#define __Pyx_unpack_tuple2(tuple, value1, value2, is_tuple, has_known_size, decref_tuple) \ + (likely(is_tuple || PyTuple_Check(tuple)) ? \ + (likely(has_known_size || PyTuple_GET_SIZE(tuple) == 2) ? \ + __Pyx_unpack_tuple2_exact(tuple, value1, value2, decref_tuple) : \ + (__Pyx_UnpackTupleError(tuple, 2), -1)) : \ + __Pyx_unpack_tuple2_generic(tuple, value1, value2, has_known_size, decref_tuple)) + +static CYTHON_INLINE int __Pyx_unpack_tuple2_exact( + PyObject* tuple, PyObject** value1, PyObject** value2, int decref_tuple); +static int __Pyx_unpack_tuple2_generic( + PyObject* tuple, PyObject** value1, PyObject** value2, int has_known_size, int decref_tuple); + +/////////////// UnpackTuple2 /////////////// +//@requires: UnpackItemEndCheck +//@requires: UnpackTupleError +//@requires: RaiseNeedMoreValuesToUnpack + +static CYTHON_INLINE int __Pyx_unpack_tuple2_exact( + PyObject* tuple, PyObject** pvalue1, PyObject** pvalue2, int decref_tuple) { + PyObject *value1 = NULL, *value2 = NULL; +#if CYTHON_COMPILING_IN_PYPY + value1 = PySequence_ITEM(tuple, 0); if (unlikely(!value1)) goto bad; + value2 = PySequence_ITEM(tuple, 1); if (unlikely(!value2)) goto bad; +#else + value1 = PyTuple_GET_ITEM(tuple, 0); Py_INCREF(value1); + value2 = PyTuple_GET_ITEM(tuple, 1); Py_INCREF(value2); +#endif + if (decref_tuple) { + Py_DECREF(tuple); + } + + *pvalue1 = value1; + *pvalue2 = value2; + return 0; +#if CYTHON_COMPILING_IN_PYPY +bad: + Py_XDECREF(value1); + Py_XDECREF(value2); + if (decref_tuple) { Py_XDECREF(tuple); } + return -1; +#endif +} + +static int __Pyx_unpack_tuple2_generic(PyObject* tuple, PyObject** pvalue1, PyObject** pvalue2, + int has_known_size, int decref_tuple) { + Py_ssize_t index; + PyObject *value1 = NULL, *value2 = NULL, *iter = NULL; + iternextfunc iternext; + + iter = PyObject_GetIter(tuple); + if (unlikely(!iter)) goto bad; + if (decref_tuple) { Py_DECREF(tuple); tuple = NULL; } + + iternext = Py_TYPE(iter)->tp_iternext; + value1 = iternext(iter); if (unlikely(!value1)) { index = 0; goto unpacking_failed; } + value2 = iternext(iter); if (unlikely(!value2)) { index = 1; goto unpacking_failed; } + if (!has_known_size && unlikely(__Pyx_IternextUnpackEndCheck(iternext(iter), 2))) goto bad; + + Py_DECREF(iter); + *pvalue1 = value1; + *pvalue2 = value2; + return 0; + +unpacking_failed: + if (!has_known_size && __Pyx_IterFinish() == 0) + __Pyx_RaiseNeedMoreValuesError(index); +bad: + Py_XDECREF(iter); + Py_XDECREF(value1); + Py_XDECREF(value2); + if (decref_tuple) { Py_XDECREF(tuple); } + return -1; +} + + +/////////////// IterNext.proto /////////////// + +#define __Pyx_PyIter_Next(obj) __Pyx_PyIter_Next2(obj, NULL) +static CYTHON_INLINE PyObject *__Pyx_PyIter_Next2(PyObject *, PyObject *); /*proto*/ + +/////////////// IterNext /////////////// +//@requires: Exceptions.c::PyThreadStateGet +//@requires: Exceptions.c::PyErrFetchRestore + +static PyObject *__Pyx_PyIter_Next2Default(PyObject* defval) { + PyObject* exc_type; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + exc_type = __Pyx_PyErr_Occurred(); + if (unlikely(exc_type)) { + if (!defval || unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) + return NULL; + __Pyx_PyErr_Clear(); + Py_INCREF(defval); + return defval; + } + if (defval) { + Py_INCREF(defval); + return defval; + } + __Pyx_PyErr_SetNone(PyExc_StopIteration); + return NULL; +} + +static void __Pyx_PyIter_Next_ErrorNoIterator(PyObject *iterator) { + PyErr_Format(PyExc_TypeError, + "%.200s object is not an iterator", Py_TYPE(iterator)->tp_name); +} + +// originally copied from Py3's builtin_next() +static CYTHON_INLINE PyObject *__Pyx_PyIter_Next2(PyObject* iterator, PyObject* defval) { + PyObject* next; + // We always do a quick slot check because calling PyIter_Check() is so wasteful. + iternextfunc iternext = Py_TYPE(iterator)->tp_iternext; + if (likely(iternext)) { +#if CYTHON_USE_TYPE_SLOTS + next = iternext(iterator); + if (likely(next)) + return next; + #if PY_VERSION_HEX >= 0x02070000 + if (unlikely(iternext == &_PyObject_NextNotImplemented)) + return NULL; + #endif +#else + // Since the slot was set, assume that PyIter_Next() will likely succeed, and properly fail otherwise. + // Note: PyIter_Next() crashes in CPython if "tp_iternext" is NULL. + next = PyIter_Next(iterator); + if (likely(next)) + return next; +#endif + } else if (CYTHON_USE_TYPE_SLOTS || unlikely(!PyIter_Check(iterator))) { + // If CYTHON_USE_TYPE_SLOTS, then the slot was not set and we don't have an iterable. + // Otherwise, don't trust "tp_iternext" and rely on PyIter_Check(). + __Pyx_PyIter_Next_ErrorNoIterator(iterator); + return NULL; + } +#if !CYTHON_USE_TYPE_SLOTS + else { + // We have an iterator with an empty "tp_iternext", but didn't call next() on it yet. + next = PyIter_Next(iterator); + if (likely(next)) + return next; + } +#endif + return __Pyx_PyIter_Next2Default(defval); +} + +/////////////// IterFinish.proto /////////////// + +static CYTHON_INLINE int __Pyx_IterFinish(void); /*proto*/ + +/////////////// IterFinish /////////////// + +// When PyIter_Next(iter) has returned NULL in order to signal termination, +// this function does the right cleanup and returns 0 on success. If it +// detects an error that occurred in the iterator, it returns -1. + +static CYTHON_INLINE int __Pyx_IterFinish(void) { +#if CYTHON_FAST_THREAD_STATE + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* exc_type = tstate->curexc_type; + if (unlikely(exc_type)) { + if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) { + PyObject *exc_value, *exc_tb; + exc_value = tstate->curexc_value; + exc_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; + Py_DECREF(exc_type); + Py_XDECREF(exc_value); + Py_XDECREF(exc_tb); + return 0; + } else { + return -1; + } + } + return 0; +#else + if (unlikely(PyErr_Occurred())) { + if (likely(PyErr_ExceptionMatches(PyExc_StopIteration))) { + PyErr_Clear(); + return 0; + } else { + return -1; + } + } + return 0; +#endif +} + + +/////////////// ObjectGetItem.proto /////////////// + +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* key);/*proto*/ +#else +#define __Pyx_PyObject_GetItem(obj, key) PyObject_GetItem(obj, key) +#endif + +/////////////// ObjectGetItem /////////////// +// //@requires: GetItemInt - added in IndexNode as it uses templating. + +#if CYTHON_USE_TYPE_SLOTS +static PyObject *__Pyx_PyObject_GetIndex(PyObject *obj, PyObject* index) { + PyObject *runerr; + Py_ssize_t key_value; + PySequenceMethods *m = Py_TYPE(obj)->tp_as_sequence; + if (unlikely(!(m && m->sq_item))) { + PyErr_Format(PyExc_TypeError, "'%.200s' object is not subscriptable", Py_TYPE(obj)->tp_name); + return NULL; + } + + key_value = __Pyx_PyIndex_AsSsize_t(index); + if (likely(key_value != -1 || !(runerr = PyErr_Occurred()))) { + return __Pyx_GetItemInt_Fast(obj, key_value, 0, 1, 1); + } + + // Error handling code -- only manage OverflowError differently. + if (PyErr_GivenExceptionMatches(runerr, PyExc_OverflowError)) { + PyErr_Clear(); + PyErr_Format(PyExc_IndexError, "cannot fit '%.200s' into an index-sized integer", Py_TYPE(index)->tp_name); + } + return NULL; +} + +static PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* key) { + PyMappingMethods *m = Py_TYPE(obj)->tp_as_mapping; + if (likely(m && m->mp_subscript)) { + return m->mp_subscript(obj, key); + } + return __Pyx_PyObject_GetIndex(obj, key); +} +#endif + + +/////////////// DictGetItem.proto /////////////// + +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY +static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key);/*proto*/ + +#define __Pyx_PyObject_Dict_GetItem(obj, name) \ + (likely(PyDict_CheckExact(obj)) ? \ + __Pyx_PyDict_GetItem(obj, name) : PyObject_GetItem(obj, name)) + +#else +#define __Pyx_PyDict_GetItem(d, key) PyObject_GetItem(d, key) +#define __Pyx_PyObject_Dict_GetItem(obj, name) PyObject_GetItem(obj, name) +#endif + +/////////////// DictGetItem /////////////// + +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY +static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key) { + PyObject *value; + value = PyDict_GetItemWithError(d, key); + if (unlikely(!value)) { + if (!PyErr_Occurred()) { + if (unlikely(PyTuple_Check(key))) { + // CPython interprets tuples as separate arguments => must wrap them in another tuple. + PyObject* args = PyTuple_Pack(1, key); + if (likely(args)) { + PyErr_SetObject(PyExc_KeyError, args); + Py_DECREF(args); + } + } else { + // Avoid tuple packing if possible. + PyErr_SetObject(PyExc_KeyError, key); + } + } + return NULL; + } + Py_INCREF(value); + return value; +} +#endif + +/////////////// GetItemInt.proto /////////////// + +#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck) \ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ? \ + __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) : \ + (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) : \ + __Pyx_GetItemInt_Generic(o, to_py_func(i)))) + +{{for type in ['List', 'Tuple']}} +#define __Pyx_GetItemInt_{{type}}(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck) \ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ? \ + __Pyx_GetItemInt_{{type}}_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) : \ + (PyErr_SetString(PyExc_IndexError, "{{ type.lower() }} index out of range"), (PyObject*)NULL)) + +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_{{type}}_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +{{endfor}} + +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, + int is_list, int wraparound, int boundscheck); + +/////////////// GetItemInt /////////////// + +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { + PyObject *r; + if (!j) return NULL; + r = PyObject_GetItem(o, j); + Py_DECREF(j); + return r; +} + +{{for type in ['List', 'Tuple']}} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_{{type}}_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += Py{{type}}_GET_SIZE(o); + } + if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, Py{{type}}_GET_SIZE(o)))) { + PyObject *r = Py{{type}}_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +{{endfor}} + +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS + if (is_list || PyList_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); + if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { + PyObject *r = PyList_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } + else if (PyTuple_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); + if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } else { + // inlined PySequence_GetItem() + special cased length overflow + PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence; + if (likely(m && m->sq_item)) { + if (wraparound && unlikely(i < 0) && likely(m->sq_length)) { + Py_ssize_t l = m->sq_length(o); + if (likely(l >= 0)) { + i += l; + } else { + // if length > max(Py_ssize_t), maybe the object can wrap around itself? + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + return NULL; + PyErr_Clear(); + } + } + return m->sq_item(o, i); + } + } +#else + if (is_list || PySequence_Check(o)) { + return PySequence_GetItem(o, i); + } +#endif + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +} + +/////////////// SetItemInt.proto /////////////// + +#define __Pyx_SetItemInt(o, i, v, type, is_signed, to_py_func, is_list, wraparound, boundscheck) \ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ? \ + __Pyx_SetItemInt_Fast(o, (Py_ssize_t)i, v, is_list, wraparound, boundscheck) : \ + (is_list ? (PyErr_SetString(PyExc_IndexError, "list assignment index out of range"), -1) : \ + __Pyx_SetItemInt_Generic(o, to_py_func(i), v))) + +static int __Pyx_SetItemInt_Generic(PyObject *o, PyObject *j, PyObject *v); +static CYTHON_INLINE int __Pyx_SetItemInt_Fast(PyObject *o, Py_ssize_t i, PyObject *v, + int is_list, int wraparound, int boundscheck); + +/////////////// SetItemInt /////////////// + +static int __Pyx_SetItemInt_Generic(PyObject *o, PyObject *j, PyObject *v) { + int r; + if (!j) return -1; + r = PyObject_SetItem(o, j, v); + Py_DECREF(j); + return r; +} + +static CYTHON_INLINE int __Pyx_SetItemInt_Fast(PyObject *o, Py_ssize_t i, PyObject *v, int is_list, + CYTHON_NCP_UNUSED int wraparound, CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS + if (is_list || PyList_CheckExact(o)) { + Py_ssize_t n = (!wraparound) ? i : ((likely(i >= 0)) ? i : i + PyList_GET_SIZE(o)); + if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o)))) { + PyObject* old = PyList_GET_ITEM(o, n); + Py_INCREF(v); + PyList_SET_ITEM(o, n, v); + Py_DECREF(old); + return 1; + } + } else { + // inlined PySequence_SetItem() + special cased length overflow + PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence; + if (likely(m && m->sq_ass_item)) { + if (wraparound && unlikely(i < 0) && likely(m->sq_length)) { + Py_ssize_t l = m->sq_length(o); + if (likely(l >= 0)) { + i += l; + } else { + // if length > max(Py_ssize_t), maybe the object can wrap around itself? + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + return -1; + PyErr_Clear(); + } + } + return m->sq_ass_item(o, i, v); + } + } +#else +#if CYTHON_COMPILING_IN_PYPY + if (is_list || (PySequence_Check(o) && !PyDict_Check(o))) +#else + if (is_list || PySequence_Check(o)) +#endif + { + return PySequence_SetItem(o, i, v); + } +#endif + return __Pyx_SetItemInt_Generic(o, PyInt_FromSsize_t(i), v); +} + + +/////////////// DelItemInt.proto /////////////// + +#define __Pyx_DelItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck) \ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ? \ + __Pyx_DelItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound) : \ + (is_list ? (PyErr_SetString(PyExc_IndexError, "list assignment index out of range"), -1) : \ + __Pyx_DelItem_Generic(o, to_py_func(i)))) + +static int __Pyx_DelItem_Generic(PyObject *o, PyObject *j); +static CYTHON_INLINE int __Pyx_DelItemInt_Fast(PyObject *o, Py_ssize_t i, + int is_list, int wraparound); + +/////////////// DelItemInt /////////////// + +static int __Pyx_DelItem_Generic(PyObject *o, PyObject *j) { + int r; + if (!j) return -1; + r = PyObject_DelItem(o, j); + Py_DECREF(j); + return r; +} + +static CYTHON_INLINE int __Pyx_DelItemInt_Fast(PyObject *o, Py_ssize_t i, + CYTHON_UNUSED int is_list, CYTHON_NCP_UNUSED int wraparound) { +#if !CYTHON_USE_TYPE_SLOTS + if (is_list || PySequence_Check(o)) { + return PySequence_DelItem(o, i); + } +#else + // inlined PySequence_DelItem() + special cased length overflow + PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence; + if (likely(m && m->sq_ass_item)) { + if (wraparound && unlikely(i < 0) && likely(m->sq_length)) { + Py_ssize_t l = m->sq_length(o); + if (likely(l >= 0)) { + i += l; + } else { + // if length > max(Py_ssize_t), maybe the object can wrap around itself? + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + return -1; + PyErr_Clear(); + } + } + return m->sq_ass_item(o, i, (PyObject *)NULL); + } +#endif + return __Pyx_DelItem_Generic(o, PyInt_FromSsize_t(i)); +} + + +/////////////// SliceObject.proto /////////////// + +// we pass pointer addresses to show the C compiler what is NULL and what isn't +{{if access == 'Get'}} +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetSlice( + PyObject* obj, Py_ssize_t cstart, Py_ssize_t cstop, + PyObject** py_start, PyObject** py_stop, PyObject** py_slice, + int has_cstart, int has_cstop, int wraparound); +{{else}} +#define __Pyx_PyObject_DelSlice(obj, cstart, cstop, py_start, py_stop, py_slice, has_cstart, has_cstop, wraparound) \ + __Pyx_PyObject_SetSlice(obj, (PyObject*)NULL, cstart, cstop, py_start, py_stop, py_slice, has_cstart, has_cstop, wraparound) + +// we pass pointer addresses to show the C compiler what is NULL and what isn't +static CYTHON_INLINE int __Pyx_PyObject_SetSlice( + PyObject* obj, PyObject* value, Py_ssize_t cstart, Py_ssize_t cstop, + PyObject** py_start, PyObject** py_stop, PyObject** py_slice, + int has_cstart, int has_cstop, int wraparound); +{{endif}} + +/////////////// SliceObject /////////////// + +{{if access == 'Get'}} +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetSlice(PyObject* obj, +{{else}} +static CYTHON_INLINE int __Pyx_PyObject_SetSlice(PyObject* obj, PyObject* value, +{{endif}} + Py_ssize_t cstart, Py_ssize_t cstop, + PyObject** _py_start, PyObject** _py_stop, PyObject** _py_slice, + int has_cstart, int has_cstop, CYTHON_UNUSED int wraparound) { +#if CYTHON_USE_TYPE_SLOTS + PyMappingMethods* mp; +#if PY_MAJOR_VERSION < 3 + PySequenceMethods* ms = Py_TYPE(obj)->tp_as_sequence; + if (likely(ms && ms->sq_{{if access == 'Set'}}ass_{{endif}}slice)) { + if (!has_cstart) { + if (_py_start && (*_py_start != Py_None)) { + cstart = __Pyx_PyIndex_AsSsize_t(*_py_start); + if ((cstart == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; + } else + cstart = 0; + } + if (!has_cstop) { + if (_py_stop && (*_py_stop != Py_None)) { + cstop = __Pyx_PyIndex_AsSsize_t(*_py_stop); + if ((cstop == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; + } else + cstop = PY_SSIZE_T_MAX; + } + if (wraparound && unlikely((cstart < 0) | (cstop < 0)) && likely(ms->sq_length)) { + Py_ssize_t l = ms->sq_length(obj); + if (likely(l >= 0)) { + if (cstop < 0) { + cstop += l; + if (cstop < 0) cstop = 0; + } + if (cstart < 0) { + cstart += l; + if (cstart < 0) cstart = 0; + } + } else { + // if length > max(Py_ssize_t), maybe the object can wrap around itself? + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + goto bad; + PyErr_Clear(); + } + } +{{if access == 'Get'}} + return ms->sq_slice(obj, cstart, cstop); +{{else}} + return ms->sq_ass_slice(obj, cstart, cstop, value); +{{endif}} + } +#endif + + mp = Py_TYPE(obj)->tp_as_mapping; +{{if access == 'Get'}} + if (likely(mp && mp->mp_subscript)) +{{else}} + if (likely(mp && mp->mp_ass_subscript)) +{{endif}} +#endif + { + {{if access == 'Get'}}PyObject*{{else}}int{{endif}} result; + PyObject *py_slice, *py_start, *py_stop; + if (_py_slice) { + py_slice = *_py_slice; + } else { + PyObject* owned_start = NULL; + PyObject* owned_stop = NULL; + if (_py_start) { + py_start = *_py_start; + } else { + if (has_cstart) { + owned_start = py_start = PyInt_FromSsize_t(cstart); + if (unlikely(!py_start)) goto bad; + } else + py_start = Py_None; + } + if (_py_stop) { + py_stop = *_py_stop; + } else { + if (has_cstop) { + owned_stop = py_stop = PyInt_FromSsize_t(cstop); + if (unlikely(!py_stop)) { + Py_XDECREF(owned_start); + goto bad; + } + } else + py_stop = Py_None; + } + py_slice = PySlice_New(py_start, py_stop, Py_None); + Py_XDECREF(owned_start); + Py_XDECREF(owned_stop); + if (unlikely(!py_slice)) goto bad; + } +#if CYTHON_USE_TYPE_SLOTS +{{if access == 'Get'}} + result = mp->mp_subscript(obj, py_slice); +#else + result = PyObject_GetItem(obj, py_slice); +{{else}} + result = mp->mp_ass_subscript(obj, py_slice, value); +#else + result = value ? PyObject_SetItem(obj, py_slice, value) : PyObject_DelItem(obj, py_slice); +{{endif}} +#endif + if (!_py_slice) { + Py_DECREF(py_slice); + } + return result; + } + PyErr_Format(PyExc_TypeError, +{{if access == 'Get'}} + "'%.200s' object is unsliceable", Py_TYPE(obj)->tp_name); +{{else}} + "'%.200s' object does not support slice %.10s", + Py_TYPE(obj)->tp_name, value ? "assignment" : "deletion"); +{{endif}} + +bad: + return {{if access == 'Get'}}NULL{{else}}-1{{endif}}; +} + + +/////////////// SliceTupleAndList.proto /////////////// + +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyList_GetSlice(PyObject* src, Py_ssize_t start, Py_ssize_t stop); +static CYTHON_INLINE PyObject* __Pyx_PyTuple_GetSlice(PyObject* src, Py_ssize_t start, Py_ssize_t stop); +#else +#define __Pyx_PyList_GetSlice(seq, start, stop) PySequence_GetSlice(seq, start, stop) +#define __Pyx_PyTuple_GetSlice(seq, start, stop) PySequence_GetSlice(seq, start, stop) +#endif + +/////////////// SliceTupleAndList /////////////// + +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE void __Pyx_crop_slice(Py_ssize_t* _start, Py_ssize_t* _stop, Py_ssize_t* _length) { + Py_ssize_t start = *_start, stop = *_stop, length = *_length; + if (start < 0) { + start += length; + if (start < 0) + start = 0; + } + + if (stop < 0) + stop += length; + else if (stop > length) + stop = length; + + *_length = stop - start; + *_start = start; + *_stop = stop; +} + +static CYTHON_INLINE void __Pyx_copy_object_array(PyObject** CYTHON_RESTRICT src, PyObject** CYTHON_RESTRICT dest, Py_ssize_t length) { + PyObject *v; + Py_ssize_t i; + for (i = 0; i < length; i++) { + v = dest[i] = src[i]; + Py_INCREF(v); + } +} + +{{for type in ['List', 'Tuple']}} +static CYTHON_INLINE PyObject* __Pyx_Py{{type}}_GetSlice( + PyObject* src, Py_ssize_t start, Py_ssize_t stop) { + PyObject* dest; + Py_ssize_t length = Py{{type}}_GET_SIZE(src); + __Pyx_crop_slice(&start, &stop, &length); + if (unlikely(length <= 0)) + return Py{{type}}_New(0); + + dest = Py{{type}}_New(length); + if (unlikely(!dest)) + return NULL; + __Pyx_copy_object_array( + ((Py{{type}}Object*)src)->ob_item + start, + ((Py{{type}}Object*)dest)->ob_item, + length); + return dest; +} +{{endfor}} +#endif + + +/////////////// CalculateMetaclass.proto /////////////// + +static PyObject *__Pyx_CalculateMetaclass(PyTypeObject *metaclass, PyObject *bases); + +/////////////// CalculateMetaclass /////////////// + +static PyObject *__Pyx_CalculateMetaclass(PyTypeObject *metaclass, PyObject *bases) { + Py_ssize_t i, nbases = PyTuple_GET_SIZE(bases); + for (i=0; i < nbases; i++) { + PyTypeObject *tmptype; + PyObject *tmp = PyTuple_GET_ITEM(bases, i); + tmptype = Py_TYPE(tmp); +#if PY_MAJOR_VERSION < 3 + if (tmptype == &PyClass_Type) + continue; +#endif + if (!metaclass) { + metaclass = tmptype; + continue; + } + if (PyType_IsSubtype(metaclass, tmptype)) + continue; + if (PyType_IsSubtype(tmptype, metaclass)) { + metaclass = tmptype; + continue; + } + // else: + PyErr_SetString(PyExc_TypeError, + "metaclass conflict: " + "the metaclass of a derived class " + "must be a (non-strict) subclass " + "of the metaclasses of all its bases"); + return NULL; + } + if (!metaclass) { +#if PY_MAJOR_VERSION < 3 + metaclass = &PyClass_Type; +#else + metaclass = &PyType_Type; +#endif + } + // make owned reference + Py_INCREF((PyObject*) metaclass); + return (PyObject*) metaclass; +} + + +/////////////// FindInheritedMetaclass.proto /////////////// + +static PyObject *__Pyx_FindInheritedMetaclass(PyObject *bases); /*proto*/ + +/////////////// FindInheritedMetaclass /////////////// +//@requires: PyObjectGetAttrStr +//@requires: CalculateMetaclass + +static PyObject *__Pyx_FindInheritedMetaclass(PyObject *bases) { + PyObject *metaclass; + if (PyTuple_Check(bases) && PyTuple_GET_SIZE(bases) > 0) { + PyTypeObject *metatype; +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + PyObject *base = PyTuple_GET_ITEM(bases, 0); +#else + PyObject *base = PySequence_ITEM(bases, 0); +#endif +#if PY_MAJOR_VERSION < 3 + PyObject* basetype = __Pyx_PyObject_GetAttrStr(base, PYIDENT("__class__")); + if (basetype) { + metatype = (PyType_Check(basetype)) ? ((PyTypeObject*) basetype) : NULL; + } else { + PyErr_Clear(); + metatype = Py_TYPE(base); + basetype = (PyObject*) metatype; + Py_INCREF(basetype); + } +#else + metatype = Py_TYPE(base); +#endif + metaclass = __Pyx_CalculateMetaclass(metatype, bases); +#if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) + Py_DECREF(base); +#endif +#if PY_MAJOR_VERSION < 3 + Py_DECREF(basetype); +#endif + } else { + // no bases => use default metaclass +#if PY_MAJOR_VERSION < 3 + metaclass = (PyObject *) &PyClass_Type; +#else + metaclass = (PyObject *) &PyType_Type; +#endif + Py_INCREF(metaclass); + } + return metaclass; +} + +/////////////// Py3MetaclassGet.proto /////////////// + +static PyObject *__Pyx_Py3MetaclassGet(PyObject *bases, PyObject *mkw); /*proto*/ + +/////////////// Py3MetaclassGet /////////////// +//@requires: FindInheritedMetaclass +//@requires: CalculateMetaclass + +static PyObject *__Pyx_Py3MetaclassGet(PyObject *bases, PyObject *mkw) { + PyObject *metaclass = mkw ? __Pyx_PyDict_GetItemStr(mkw, PYIDENT("metaclass")) : NULL; + if (metaclass) { + Py_INCREF(metaclass); + if (PyDict_DelItem(mkw, PYIDENT("metaclass")) < 0) { + Py_DECREF(metaclass); + return NULL; + } + if (PyType_Check(metaclass)) { + PyObject* orig = metaclass; + metaclass = __Pyx_CalculateMetaclass((PyTypeObject*) metaclass, bases); + Py_DECREF(orig); + } + return metaclass; + } + return __Pyx_FindInheritedMetaclass(bases); +} + +/////////////// CreateClass.proto /////////////// + +static PyObject *__Pyx_CreateClass(PyObject *bases, PyObject *dict, PyObject *name, + PyObject *qualname, PyObject *modname); /*proto*/ + +/////////////// CreateClass /////////////// +//@requires: FindInheritedMetaclass +//@requires: CalculateMetaclass + +static PyObject *__Pyx_CreateClass(PyObject *bases, PyObject *dict, PyObject *name, + PyObject *qualname, PyObject *modname) { + PyObject *result; + PyObject *metaclass; + + if (PyDict_SetItem(dict, PYIDENT("__module__"), modname) < 0) + return NULL; + if (PyDict_SetItem(dict, PYIDENT("__qualname__"), qualname) < 0) + return NULL; + + /* Python2 __metaclass__ */ + metaclass = __Pyx_PyDict_GetItemStr(dict, PYIDENT("__metaclass__")); + if (metaclass) { + Py_INCREF(metaclass); + if (PyType_Check(metaclass)) { + PyObject* orig = metaclass; + metaclass = __Pyx_CalculateMetaclass((PyTypeObject*) metaclass, bases); + Py_DECREF(orig); + } + } else { + metaclass = __Pyx_FindInheritedMetaclass(bases); + } + if (unlikely(!metaclass)) + return NULL; + result = PyObject_CallFunctionObjArgs(metaclass, name, bases, dict, NULL); + Py_DECREF(metaclass); + return result; +} + +/////////////// Py3ClassCreate.proto /////////////// + +static PyObject *__Pyx_Py3MetaclassPrepare(PyObject *metaclass, PyObject *bases, PyObject *name, PyObject *qualname, + PyObject *mkw, PyObject *modname, PyObject *doc); /*proto*/ +static PyObject *__Pyx_Py3ClassCreate(PyObject *metaclass, PyObject *name, PyObject *bases, PyObject *dict, + PyObject *mkw, int calculate_metaclass, int allow_py2_metaclass); /*proto*/ + +/////////////// Py3ClassCreate /////////////// +//@requires: PyObjectGetAttrStr +//@requires: CalculateMetaclass + +static PyObject *__Pyx_Py3MetaclassPrepare(PyObject *metaclass, PyObject *bases, PyObject *name, + PyObject *qualname, PyObject *mkw, PyObject *modname, PyObject *doc) { + PyObject *ns; + if (metaclass) { + PyObject *prep = __Pyx_PyObject_GetAttrStr(metaclass, PYIDENT("__prepare__")); + if (prep) { + PyObject *pargs = PyTuple_Pack(2, name, bases); + if (unlikely(!pargs)) { + Py_DECREF(prep); + return NULL; + } + ns = PyObject_Call(prep, pargs, mkw); + Py_DECREF(prep); + Py_DECREF(pargs); + } else { + if (unlikely(!PyErr_ExceptionMatches(PyExc_AttributeError))) + return NULL; + PyErr_Clear(); + ns = PyDict_New(); + } + } else { + ns = PyDict_New(); + } + + if (unlikely(!ns)) + return NULL; + + /* Required here to emulate assignment order */ + if (unlikely(PyObject_SetItem(ns, PYIDENT("__module__"), modname) < 0)) goto bad; + if (unlikely(PyObject_SetItem(ns, PYIDENT("__qualname__"), qualname) < 0)) goto bad; + if (unlikely(doc && PyObject_SetItem(ns, PYIDENT("__doc__"), doc) < 0)) goto bad; + return ns; +bad: + Py_DECREF(ns); + return NULL; +} + +static PyObject *__Pyx_Py3ClassCreate(PyObject *metaclass, PyObject *name, PyObject *bases, + PyObject *dict, PyObject *mkw, + int calculate_metaclass, int allow_py2_metaclass) { + PyObject *result, *margs; + PyObject *owned_metaclass = NULL; + if (allow_py2_metaclass) { + /* honour Python2 __metaclass__ for backward compatibility */ + owned_metaclass = PyObject_GetItem(dict, PYIDENT("__metaclass__")); + if (owned_metaclass) { + metaclass = owned_metaclass; + } else if (likely(PyErr_ExceptionMatches(PyExc_KeyError))) { + PyErr_Clear(); + } else { + return NULL; + } + } + if (calculate_metaclass && (!metaclass || PyType_Check(metaclass))) { + metaclass = __Pyx_CalculateMetaclass((PyTypeObject*) metaclass, bases); + Py_XDECREF(owned_metaclass); + if (unlikely(!metaclass)) + return NULL; + owned_metaclass = metaclass; + } + margs = PyTuple_Pack(3, name, bases, dict); + if (unlikely(!margs)) { + result = NULL; + } else { + result = PyObject_Call(metaclass, margs, mkw); + Py_DECREF(margs); + } + Py_XDECREF(owned_metaclass); + return result; +} + +/////////////// ExtTypeTest.proto /////////////// + +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); /*proto*/ + +/////////////// ExtTypeTest /////////////// + +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + if (likely(__Pyx_TypeCheck(obj, type))) + return 1; + PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", + Py_TYPE(obj)->tp_name, type->tp_name); + return 0; +} + +/////////////// CallableCheck.proto /////////////// + +#if CYTHON_USE_TYPE_SLOTS && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyCallable_Check(obj) ((obj)->ob_type->tp_call != NULL) +#else +#define __Pyx_PyCallable_Check(obj) PyCallable_Check(obj) +#endif + +/////////////// PyDictContains.proto /////////////// + +static CYTHON_INLINE int __Pyx_PyDict_ContainsTF(PyObject* item, PyObject* dict, int eq) { + int result = PyDict_Contains(dict, item); + return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); +} + +/////////////// PySetContains.proto /////////////// + +static CYTHON_INLINE int __Pyx_PySet_ContainsTF(PyObject* key, PyObject* set, int eq); /* proto */ + +/////////////// PySetContains /////////////// +//@requires: Builtins.c::pyfrozenset_new + +static int __Pyx_PySet_ContainsUnhashable(PyObject *set, PyObject *key) { + int result = -1; + if (PySet_Check(key) && PyErr_ExceptionMatches(PyExc_TypeError)) { + /* Convert key to frozenset */ + PyObject *tmpkey; + PyErr_Clear(); + tmpkey = __Pyx_PyFrozenSet_New(key); + if (tmpkey != NULL) { + result = PySet_Contains(set, tmpkey); + Py_DECREF(tmpkey); + } + } + return result; +} + +static CYTHON_INLINE int __Pyx_PySet_ContainsTF(PyObject* key, PyObject* set, int eq) { + int result = PySet_Contains(set, key); + + if (unlikely(result < 0)) { + result = __Pyx_PySet_ContainsUnhashable(set, key); + } + return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); +} + +/////////////// PySequenceContains.proto /////////////// + +static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { + int result = PySequence_Contains(seq, item); + return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); +} + +/////////////// PyBoolOrNullFromLong.proto /////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyBoolOrNull_FromLong(long b) { + return unlikely(b < 0) ? NULL : __Pyx_PyBool_FromLong(b); +} + +/////////////// GetBuiltinName.proto /////////////// + +static PyObject *__Pyx_GetBuiltinName(PyObject *name); /*proto*/ + +/////////////// GetBuiltinName /////////////// +//@requires: PyObjectGetAttrStr +//@substitute: naming + +static PyObject *__Pyx_GetBuiltinName(PyObject *name) { + PyObject* result = __Pyx_PyObject_GetAttrStr($builtins_cname, name); + if (unlikely(!result)) { + PyErr_Format(PyExc_NameError, +#if PY_MAJOR_VERSION >= 3 + "name '%U' is not defined", name); +#else + "name '%.200s' is not defined", PyString_AS_STRING(name)); +#endif + } + return result; +} + +/////////////// GetNameInClass.proto /////////////// + +#define __Pyx_GetNameInClass(var, nmspace, name) (var) = __Pyx__GetNameInClass(nmspace, name) +static PyObject *__Pyx__GetNameInClass(PyObject *nmspace, PyObject *name); /*proto*/ + +/////////////// GetNameInClass /////////////// +//@requires: PyObjectGetAttrStr +//@requires: GetModuleGlobalName +//@requires: Exceptions.c::PyThreadStateGet +//@requires: Exceptions.c::PyErrFetchRestore +//@requires: Exceptions.c::PyErrExceptionMatches + +static PyObject *__Pyx_GetGlobalNameAfterAttributeLookup(PyObject *name) { + PyObject *result; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) + return NULL; + __Pyx_PyErr_Clear(); + __Pyx_GetModuleGlobalNameUncached(result, name); + return result; +} + +static PyObject *__Pyx__GetNameInClass(PyObject *nmspace, PyObject *name) { + PyObject *result; + result = __Pyx_PyObject_GetAttrStr(nmspace, name); + if (!result) { + result = __Pyx_GetGlobalNameAfterAttributeLookup(name); + } + return result; +} + + +/////////////// SetNameInClass.proto /////////////// + +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 +// Identifier names are always interned and have a pre-calculated hash value. +#define __Pyx_SetNameInClass(ns, name, value) \ + (likely(PyDict_CheckExact(ns)) ? _PyDict_SetItem_KnownHash(ns, name, value, ((PyASCIIObject *) name)->hash) : PyObject_SetItem(ns, name, value)) +#elif CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_SetNameInClass(ns, name, value) \ + (likely(PyDict_CheckExact(ns)) ? PyDict_SetItem(ns, name, value) : PyObject_SetItem(ns, name, value)) +#else +#define __Pyx_SetNameInClass(ns, name, value) PyObject_SetItem(ns, name, value) +#endif + + +/////////////// GetModuleGlobalName.proto /////////////// +//@requires: PyDictVersioning +//@substitute: naming + +#if CYTHON_USE_DICT_VERSIONS +#define __Pyx_GetModuleGlobalName(var, name) { \ + static PY_UINT64_T __pyx_dict_version = 0; \ + static PyObject *__pyx_dict_cached_value = NULL; \ + (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION($moddict_cname))) ? \ + (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) : \ + __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value); \ +} +#define __Pyx_GetModuleGlobalNameUncached(var, name) { \ + PY_UINT64_T __pyx_dict_version; \ + PyObject *__pyx_dict_cached_value; \ + (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value); \ +} +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); /*proto*/ +#else +#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) +#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); /*proto*/ +#endif + + +/////////////// GetModuleGlobalName /////////////// +//@requires: GetBuiltinName +//@substitute: naming + +#if CYTHON_USE_DICT_VERSIONS +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) +#else +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) +#endif +{ + PyObject *result; +#if !CYTHON_AVOID_BORROWED_REFS +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 + // Identifier names are always interned and have a pre-calculated hash value. + result = _PyDict_GetItem_KnownHash($moddict_cname, name, ((PyASCIIObject *) name)->hash); + __PYX_UPDATE_DICT_CACHE($moddict_cname, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } else if (unlikely(PyErr_Occurred())) { + return NULL; + } +#else + result = PyDict_GetItem($moddict_cname, name); + __PYX_UPDATE_DICT_CACHE($moddict_cname, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } +#endif +#else + result = PyObject_GetItem($moddict_cname, name); + __PYX_UPDATE_DICT_CACHE($moddict_cname, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } + PyErr_Clear(); +#endif + return __Pyx_GetBuiltinName(name); +} + +//////////////////// GetAttr.proto //////////////////// + +static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); /*proto*/ + +//////////////////// GetAttr //////////////////// +//@requires: PyObjectGetAttrStr + +static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { +#if CYTHON_USE_TYPE_SLOTS +#if PY_MAJOR_VERSION >= 3 + if (likely(PyUnicode_Check(n))) +#else + if (likely(PyString_Check(n))) +#endif + return __Pyx_PyObject_GetAttrStr(o, n); +#endif + return PyObject_GetAttr(o, n); +} + +/////////////// PyObjectLookupSpecial.proto /////////////// +//@requires: PyObjectGetAttrStr + +#if CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_LookupSpecial(PyObject* obj, PyObject* attr_name) { + PyObject *res; + PyTypeObject *tp = Py_TYPE(obj); +#if PY_MAJOR_VERSION < 3 + if (unlikely(PyInstance_Check(obj))) + return __Pyx_PyObject_GetAttrStr(obj, attr_name); +#endif + // adapted from CPython's special_lookup() in ceval.c + res = _PyType_Lookup(tp, attr_name); + if (likely(res)) { + descrgetfunc f = Py_TYPE(res)->tp_descr_get; + if (!f) { + Py_INCREF(res); + } else { + res = f(res, obj, (PyObject *)tp); + } + } else { + PyErr_SetObject(PyExc_AttributeError, attr_name); + } + return res; +} +#else +#define __Pyx_PyObject_LookupSpecial(o,n) __Pyx_PyObject_GetAttrStr(o,n) +#endif + + +/////////////// PyObject_GenericGetAttrNoDict.proto /////////////// + +// Setting "tp_getattro" to anything but "PyObject_GenericGetAttr" disables fast method calls in Py3.7. +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); +#else +// No-args macro to allow function pointer assignment. +#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr +#endif + +/////////////// PyObject_GenericGetAttrNoDict /////////////// + +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 + +static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { + PyErr_Format(PyExc_AttributeError, +#if PY_MAJOR_VERSION >= 3 + "'%.50s' object has no attribute '%U'", + tp->tp_name, attr_name); +#else + "'%.50s' object has no attribute '%.400s'", + tp->tp_name, PyString_AS_STRING(attr_name)); +#endif + return NULL; +} + +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { + // Copied and adapted from _PyObject_GenericGetAttrWithDict() in CPython 2.6/3.7. + // To be used in the "tp_getattro" slot of extension types that have no instance dict and cannot be subclassed. + PyObject *descr; + PyTypeObject *tp = Py_TYPE(obj); + + if (unlikely(!PyString_Check(attr_name))) { + return PyObject_GenericGetAttr(obj, attr_name); + } + + assert(!tp->tp_dictoffset); + descr = _PyType_Lookup(tp, attr_name); + if (unlikely(!descr)) { + return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); + } + + Py_INCREF(descr); + + #if PY_MAJOR_VERSION < 3 + if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) + #endif + { + descrgetfunc f = Py_TYPE(descr)->tp_descr_get; + // Optimise for the non-descriptor case because it is faster. + if (unlikely(f)) { + PyObject *res = f(descr, obj, (PyObject *)tp); + Py_DECREF(descr); + return res; + } + } + return descr; +} +#endif + + +/////////////// PyObject_GenericGetAttr.proto /////////////// + +// Setting "tp_getattro" to anything but "PyObject_GenericGetAttr" disables fast method calls in Py3.7. +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name); +#else +// No-args macro to allow function pointer assignment. +#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr +#endif + +/////////////// PyObject_GenericGetAttr /////////////// +//@requires: PyObject_GenericGetAttrNoDict + +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) { + if (unlikely(Py_TYPE(obj)->tp_dictoffset)) { + return PyObject_GenericGetAttr(obj, attr_name); + } + return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name); +} +#endif + + +/////////////// PyObjectGetAttrStrNoError.proto /////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name);/*proto*/ + +/////////////// PyObjectGetAttrStrNoError /////////////// +//@requires: PyObjectGetAttrStr +//@requires: Exceptions.c::PyThreadStateGet +//@requires: Exceptions.c::PyErrFetchRestore +//@requires: Exceptions.c::PyErrExceptionMatches + +static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) + __Pyx_PyErr_Clear(); +} + +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { + PyObject *result; +#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 + // _PyObject_GenericGetAttrWithDict() in CPython 3.7+ can avoid raising the AttributeError. + // See https://bugs.python.org/issue32544 + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { + return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); + } +#endif + result = __Pyx_PyObject_GetAttrStr(obj, attr_name); + if (unlikely(!result)) { + __Pyx_PyObject_GetAttrStr_ClearAttributeError(); + } + return result; +} + + +/////////////// PyObjectGetAttrStr.proto /////////////// + +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name);/*proto*/ +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif + +/////////////// PyObjectGetAttrStr /////////////// + +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#endif + + +/////////////// PyObjectSetAttrStr.proto /////////////// + +#if CYTHON_USE_TYPE_SLOTS +#define __Pyx_PyObject_DelAttrStr(o,n) __Pyx_PyObject_SetAttrStr(o, n, NULL) +static CYTHON_INLINE int __Pyx_PyObject_SetAttrStr(PyObject* obj, PyObject* attr_name, PyObject* value);/*proto*/ +#else +#define __Pyx_PyObject_DelAttrStr(o,n) PyObject_DelAttr(o,n) +#define __Pyx_PyObject_SetAttrStr(o,n,v) PyObject_SetAttr(o,n,v) +#endif + +/////////////// PyObjectSetAttrStr /////////////// + +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE int __Pyx_PyObject_SetAttrStr(PyObject* obj, PyObject* attr_name, PyObject* value) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_setattro)) + return tp->tp_setattro(obj, attr_name, value); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_setattr)) + return tp->tp_setattr(obj, PyString_AS_STRING(attr_name), value); +#endif + return PyObject_SetAttr(obj, attr_name, value); +} +#endif + + +/////////////// PyObjectGetMethod.proto /////////////// + +static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method);/*proto*/ + +/////////////// PyObjectGetMethod /////////////// +//@requires: PyObjectGetAttrStr + +static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method) { + PyObject *attr; +#if CYTHON_UNPACK_METHODS && CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_PYTYPE_LOOKUP + // Copied from _PyObject_GetMethod() in CPython 3.7 + PyTypeObject *tp = Py_TYPE(obj); + PyObject *descr; + descrgetfunc f = NULL; + PyObject **dictptr, *dict; + int meth_found = 0; + + assert (*method == NULL); + + if (unlikely(tp->tp_getattro != PyObject_GenericGetAttr)) { + attr = __Pyx_PyObject_GetAttrStr(obj, name); + goto try_unpack; + } + if (unlikely(tp->tp_dict == NULL) && unlikely(PyType_Ready(tp) < 0)) { + return 0; + } + + descr = _PyType_Lookup(tp, name); + if (likely(descr != NULL)) { + Py_INCREF(descr); + // Repeating the condition below accommodates for MSVC's inability to test macros inside of macro expansions. +#if PY_MAJOR_VERSION >= 3 + #ifdef __Pyx_CyFunction_USED + if (likely(PyFunction_Check(descr) || (Py_TYPE(descr) == &PyMethodDescr_Type) || __Pyx_CyFunction_Check(descr))) + #else + if (likely(PyFunction_Check(descr) || (Py_TYPE(descr) == &PyMethodDescr_Type))) + #endif +#else + // "PyMethodDescr_Type" is not part of the C-API in Py2. + #ifdef __Pyx_CyFunction_USED + if (likely(PyFunction_Check(descr) || __Pyx_CyFunction_Check(descr))) + #else + if (likely(PyFunction_Check(descr))) + #endif +#endif + { + meth_found = 1; + } else { + f = Py_TYPE(descr)->tp_descr_get; + if (f != NULL && PyDescr_IsData(descr)) { + attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); + Py_DECREF(descr); + goto try_unpack; + } + } + } + + dictptr = _PyObject_GetDictPtr(obj); + if (dictptr != NULL && (dict = *dictptr) != NULL) { + Py_INCREF(dict); + attr = __Pyx_PyDict_GetItemStr(dict, name); + if (attr != NULL) { + Py_INCREF(attr); + Py_DECREF(dict); + Py_XDECREF(descr); + goto try_unpack; + } + Py_DECREF(dict); + } + + if (meth_found) { + *method = descr; + return 1; + } + + if (f != NULL) { + attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); + Py_DECREF(descr); + goto try_unpack; + } + + if (descr != NULL) { + *method = descr; + return 0; + } + + PyErr_Format(PyExc_AttributeError, +#if PY_MAJOR_VERSION >= 3 + "'%.50s' object has no attribute '%U'", + tp->tp_name, name); +#else + "'%.50s' object has no attribute '%.400s'", + tp->tp_name, PyString_AS_STRING(name)); +#endif + return 0; + +// Generic fallback implementation using normal attribute lookup. +#else + attr = __Pyx_PyObject_GetAttrStr(obj, name); + goto try_unpack; +#endif + +try_unpack: +#if CYTHON_UNPACK_METHODS + // Even if we failed to avoid creating a bound method object, it's still worth unpacking it now, if possible. + if (likely(attr) && PyMethod_Check(attr) && likely(PyMethod_GET_SELF(attr) == obj)) { + PyObject *function = PyMethod_GET_FUNCTION(attr); + Py_INCREF(function); + Py_DECREF(attr); + *method = function; + return 1; + } +#endif + *method = attr; + return 0; +} + + +/////////////// UnpackUnboundCMethod.proto /////////////// + +typedef struct { + PyObject *type; + PyObject **method_name; + // "func" is set on first access (direct C function pointer) + PyCFunction func; + // "method" is set on first access (fallback) + PyObject *method; + int flag; +} __Pyx_CachedCFunction; + +/////////////// UnpackUnboundCMethod /////////////// +//@requires: PyObjectGetAttrStr + +static int __Pyx_TryUnpackUnboundCMethod(__Pyx_CachedCFunction* target) { + PyObject *method; + method = __Pyx_PyObject_GetAttrStr(target->type, *target->method_name); + if (unlikely(!method)) + return -1; + target->method = method; +#if CYTHON_COMPILING_IN_CPYTHON + #if PY_MAJOR_VERSION >= 3 + // method dscriptor type isn't exported in Py2.x, cannot easily check the type there + if (likely(__Pyx_TypeCheck(method, &PyMethodDescr_Type))) + #endif + { + PyMethodDescrObject *descr = (PyMethodDescrObject*) method; + target->func = descr->d_method->ml_meth; + target->flag = descr->d_method->ml_flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_STACKLESS); + } +#endif + return 0; +} + + +/////////////// CallUnboundCMethod0.proto /////////////// +//@substitute: naming + +static PyObject* __Pyx__CallUnboundCMethod0(__Pyx_CachedCFunction* cfunc, PyObject* self); /*proto*/ +#if CYTHON_COMPILING_IN_CPYTHON +// FASTCALL methods receive "&empty_tuple" as simple "PyObject[0]*" +#define __Pyx_CallUnboundCMethod0(cfunc, self) \ + (likely((cfunc)->func) ? \ + (likely((cfunc)->flag == METH_NOARGS) ? (*((cfunc)->func))(self, NULL) : \ + (PY_VERSION_HEX >= 0x030600B1 && likely((cfunc)->flag == METH_FASTCALL) ? \ + (PY_VERSION_HEX >= 0x030700A0 ? \ + (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)(cfunc)->func)(self, &$empty_tuple, 0) : \ + (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)(cfunc)->func)(self, &$empty_tuple, 0, NULL)) : \ + (PY_VERSION_HEX >= 0x030700A0 && (cfunc)->flag == (METH_FASTCALL | METH_KEYWORDS) ? \ + (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)(cfunc)->func)(self, &$empty_tuple, 0, NULL) : \ + (likely((cfunc)->flag == (METH_VARARGS | METH_KEYWORDS)) ? ((*(PyCFunctionWithKeywords)(void*)(PyCFunction)(cfunc)->func)(self, $empty_tuple, NULL)) : \ + ((cfunc)->flag == METH_VARARGS ? (*((cfunc)->func))(self, $empty_tuple) : \ + __Pyx__CallUnboundCMethod0(cfunc, self)))))) : \ + __Pyx__CallUnboundCMethod0(cfunc, self)) +#else +#define __Pyx_CallUnboundCMethod0(cfunc, self) __Pyx__CallUnboundCMethod0(cfunc, self) +#endif + +/////////////// CallUnboundCMethod0 /////////////// +//@requires: UnpackUnboundCMethod +//@requires: PyObjectCall + +static PyObject* __Pyx__CallUnboundCMethod0(__Pyx_CachedCFunction* cfunc, PyObject* self) { + PyObject *args, *result = NULL; + if (unlikely(!cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; +#if CYTHON_ASSUME_SAFE_MACROS + args = PyTuple_New(1); + if (unlikely(!args)) goto bad; + Py_INCREF(self); + PyTuple_SET_ITEM(args, 0, self); +#else + args = PyTuple_Pack(1, self); + if (unlikely(!args)) goto bad; +#endif + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); + Py_DECREF(args); +bad: + return result; +} + + +/////////////// CallUnboundCMethod1.proto /////////////// + +static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg);/*proto*/ + +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg);/*proto*/ +#else +#define __Pyx_CallUnboundCMethod1(cfunc, self, arg) __Pyx__CallUnboundCMethod1(cfunc, self, arg) +#endif + +/////////////// CallUnboundCMethod1 /////////////// +//@requires: UnpackUnboundCMethod +//@requires: PyObjectCall + +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg) { + if (likely(cfunc->func)) { + int flag = cfunc->flag; + // Not using #ifdefs for PY_VERSION_HEX to avoid C compiler warnings about unused functions. + if (flag == METH_O) { + return (*(cfunc->func))(self, arg); + } else if (PY_VERSION_HEX >= 0x030600B1 && flag == METH_FASTCALL) { + if (PY_VERSION_HEX >= 0x030700A0) { + return (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)cfunc->func)(self, &arg, 1); + } else { + return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL); + } + } else if (PY_VERSION_HEX >= 0x030700A0 && flag == (METH_FASTCALL | METH_KEYWORDS)) { + return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL); + } + } + return __Pyx__CallUnboundCMethod1(cfunc, self, arg); +} +#endif + +static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg){ + PyObject *args, *result = NULL; + if (unlikely(!cfunc->func && !cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; +#if CYTHON_COMPILING_IN_CPYTHON + if (cfunc->func && (cfunc->flag & METH_VARARGS)) { + args = PyTuple_New(1); + if (unlikely(!args)) goto bad; + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 0, arg); + if (cfunc->flag & METH_KEYWORDS) + result = (*(PyCFunctionWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, NULL); + else + result = (*cfunc->func)(self, args); + } else { + args = PyTuple_New(2); + if (unlikely(!args)) goto bad; + Py_INCREF(self); + PyTuple_SET_ITEM(args, 0, self); + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 1, arg); + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); + } +#else + args = PyTuple_Pack(2, self, arg); + if (unlikely(!args)) goto bad; + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); +#endif +bad: + Py_XDECREF(args); + return result; +} + + +/////////////// CallUnboundCMethod2.proto /////////////// + +static PyObject* __Pyx__CallUnboundCMethod2(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg1, PyObject* arg2); /*proto*/ + +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030600B1 +static CYTHON_INLINE PyObject *__Pyx_CallUnboundCMethod2(__Pyx_CachedCFunction *cfunc, PyObject *self, PyObject *arg1, PyObject *arg2); /*proto*/ +#else +#define __Pyx_CallUnboundCMethod2(cfunc, self, arg1, arg2) __Pyx__CallUnboundCMethod2(cfunc, self, arg1, arg2) +#endif + +/////////////// CallUnboundCMethod2 /////////////// +//@requires: UnpackUnboundCMethod +//@requires: PyObjectCall + +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030600B1 +static CYTHON_INLINE PyObject *__Pyx_CallUnboundCMethod2(__Pyx_CachedCFunction *cfunc, PyObject *self, PyObject *arg1, PyObject *arg2) { + if (likely(cfunc->func)) { + PyObject *args[2] = {arg1, arg2}; + if (cfunc->flag == METH_FASTCALL) { + #if PY_VERSION_HEX >= 0x030700A0 + return (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)cfunc->func)(self, args, 2); + #else + return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, 2, NULL); + #endif + } + #if PY_VERSION_HEX >= 0x030700A0 + if (cfunc->flag == (METH_FASTCALL | METH_KEYWORDS)) + return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, 2, NULL); + #endif + } + return __Pyx__CallUnboundCMethod2(cfunc, self, arg1, arg2); +} +#endif + +static PyObject* __Pyx__CallUnboundCMethod2(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg1, PyObject* arg2){ + PyObject *args, *result = NULL; + if (unlikely(!cfunc->func && !cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; +#if CYTHON_COMPILING_IN_CPYTHON + if (cfunc->func && (cfunc->flag & METH_VARARGS)) { + args = PyTuple_New(2); + if (unlikely(!args)) goto bad; + Py_INCREF(arg1); + PyTuple_SET_ITEM(args, 0, arg1); + Py_INCREF(arg2); + PyTuple_SET_ITEM(args, 1, arg2); + if (cfunc->flag & METH_KEYWORDS) + result = (*(PyCFunctionWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, NULL); + else + result = (*cfunc->func)(self, args); + } else { + args = PyTuple_New(3); + if (unlikely(!args)) goto bad; + Py_INCREF(self); + PyTuple_SET_ITEM(args, 0, self); + Py_INCREF(arg1); + PyTuple_SET_ITEM(args, 1, arg1); + Py_INCREF(arg2); + PyTuple_SET_ITEM(args, 2, arg2); + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); + } +#else + args = PyTuple_Pack(3, self, arg1, arg2); + if (unlikely(!args)) goto bad; + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); +#endif +bad: + Py_XDECREF(args); + return result; +} + + +/////////////// PyObjectCallMethod0.proto /////////////// + +static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name); /*proto*/ + +/////////////// PyObjectCallMethod0 /////////////// +//@requires: PyObjectGetMethod +//@requires: PyObjectCallOneArg +//@requires: PyObjectCallNoArg + +static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name) { + PyObject *method = NULL, *result = NULL; + int is_method = __Pyx_PyObject_GetMethod(obj, method_name, &method); + if (likely(is_method)) { + result = __Pyx_PyObject_CallOneArg(method, obj); + Py_DECREF(method); + return result; + } + if (unlikely(!method)) goto bad; + result = __Pyx_PyObject_CallNoArg(method); + Py_DECREF(method); +bad: + return result; +} + + +/////////////// PyObjectCallMethod1.proto /////////////// + +static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg); /*proto*/ + +/////////////// PyObjectCallMethod1 /////////////// +//@requires: PyObjectGetMethod +//@requires: PyObjectCallOneArg +//@requires: PyObjectCall2Args + +static PyObject* __Pyx__PyObject_CallMethod1(PyObject* method, PyObject* arg) { + // Separate function to avoid excessive inlining. + PyObject *result = __Pyx_PyObject_CallOneArg(method, arg); + Py_DECREF(method); + return result; +} + +static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg) { + PyObject *method = NULL, *result; + int is_method = __Pyx_PyObject_GetMethod(obj, method_name, &method); + if (likely(is_method)) { + result = __Pyx_PyObject_Call2Args(method, obj, arg); + Py_DECREF(method); + return result; + } + if (unlikely(!method)) return NULL; + return __Pyx__PyObject_CallMethod1(method, arg); +} + + +/////////////// PyObjectCallMethod2.proto /////////////// + +static PyObject* __Pyx_PyObject_CallMethod2(PyObject* obj, PyObject* method_name, PyObject* arg1, PyObject* arg2); /*proto*/ + +/////////////// PyObjectCallMethod2 /////////////// +//@requires: PyObjectCall +//@requires: PyFunctionFastCall +//@requires: PyCFunctionFastCall +//@requires: PyObjectCall2Args + +static PyObject* __Pyx_PyObject_Call3Args(PyObject* function, PyObject* arg1, PyObject* arg2, PyObject* arg3) { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(function)) { + PyObject *args[3] = {arg1, arg2, arg3}; + return __Pyx_PyFunction_FastCall(function, args, 3); + } + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(function)) { + PyObject *args[3] = {arg1, arg2, arg3}; + return __Pyx_PyFunction_FastCall(function, args, 3); + } + #endif + + args = PyTuple_New(3); + if (unlikely(!args)) goto done; + Py_INCREF(arg1); + PyTuple_SET_ITEM(args, 0, arg1); + Py_INCREF(arg2); + PyTuple_SET_ITEM(args, 1, arg2); + Py_INCREF(arg3); + PyTuple_SET_ITEM(args, 2, arg3); + + result = __Pyx_PyObject_Call(function, args, NULL); + Py_DECREF(args); + return result; +} + +static PyObject* __Pyx_PyObject_CallMethod2(PyObject* obj, PyObject* method_name, PyObject* arg1, PyObject* arg2) { + PyObject *args, *method = NULL, *result = NULL; + int is_method = __Pyx_PyObject_GetMethod(obj, method_name, &method); + if (likely(is_method)) { + result = __Pyx_PyObject_Call3Args(method, obj, arg1, arg2); + Py_DECREF(method); + return result; + } + if (unlikely(!method)) return NULL; + result = __Pyx_PyObject_Call2Args(method, arg1, arg2); + Py_DECREF(method); + return result; +} + + +/////////////// tp_new.proto /////////////// + +#define __Pyx_tp_new(type_obj, args) __Pyx_tp_new_kwargs(type_obj, args, NULL) +static CYTHON_INLINE PyObject* __Pyx_tp_new_kwargs(PyObject* type_obj, PyObject* args, PyObject* kwargs) { + return (PyObject*) (((PyTypeObject*)type_obj)->tp_new((PyTypeObject*)type_obj, args, kwargs)); +} + + +/////////////// PyObjectCall.proto /////////////// + +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); /*proto*/ +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + +/////////////// PyObjectCall /////////////// + +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = func->ob_type->tp_call; + + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + + +/////////////// PyObjectCallMethO.proto /////////////// + +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); /*proto*/ +#endif + +/////////////// PyObjectCallMethO /////////////// + +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { + PyObject *self, *result; + PyCFunction cfunc; + cfunc = PyCFunction_GET_FUNCTION(func); + self = PyCFunction_GET_SELF(func); + + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = cfunc(self, arg); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + + +/////////////// PyFunctionFastCall.proto /////////////// + +#if CYTHON_FAST_PYCALL +#define __Pyx_PyFunction_FastCall(func, args, nargs) \ + __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) + +// let's assume that the non-public C-API function might still change during the 3.6 beta phase +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); +#else +#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) +#endif + +// Backport from Python 3 +// Assert a build-time dependency, as an expression. +// Your compile will fail if the condition isn't true, or can't be evaluated +// by the compiler. This can be used in an expression: its value is 0. +// Example: +// #define foo_to_char(foo) \ +// ((char *)(foo) \ +// + Py_BUILD_ASSERT_EXPR(offsetof(struct foo, string) == 0)) +// +// Written by Rusty Russell, public domain, http://ccodearchive.net/ +#define __Pyx_BUILD_ASSERT_EXPR(cond) \ + (sizeof(char [1 - 2*!(cond)]) - 1) + +#ifndef Py_MEMBER_SIZE +// Get the size of a structure member in bytes +#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) +#endif + + // Initialised by module init code. + static size_t __pyx_pyframe_localsplus_offset = 0; + + #include "frameobject.h" + // This is the long runtime version of + // #define __Pyx_PyFrame_GetLocalsplus(frame) ((frame)->f_localsplus) + // offsetof(PyFrameObject, f_localsplus) differs between regular C-Python and Stackless Python. + // Therefore the offset is computed at run time from PyFrame_type.tp_basicsize. That is feasible, + // because f_localsplus is the last field of PyFrameObject (checked by Py_BUILD_ASSERT_EXPR below). + #define __Pxy_PyFrame_Initialize_Offsets() \ + ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)), \ + (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) + #define __Pyx_PyFrame_GetLocalsplus(frame) \ + (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) +#endif + + +/////////////// PyFunctionFastCall /////////////// +// copied from CPython 3.6 ceval.c + +#if CYTHON_FAST_PYCALL + +static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, + PyObject *globals) { + PyFrameObject *f; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject **fastlocals; + Py_ssize_t i; + PyObject *result; + + assert(globals != NULL); + /* XXX Perhaps we should create a specialized + PyFrame_New() that doesn't take locals, but does + take builtins without sanity checking them. + */ + assert(tstate != NULL); + f = PyFrame_New(tstate, co, globals, NULL); + if (f == NULL) { + return NULL; + } + + fastlocals = __Pyx_PyFrame_GetLocalsplus(f); + + for (i = 0; i < na; i++) { + Py_INCREF(*args); + fastlocals[i] = *args++; + } + result = PyEval_EvalFrameEx(f,0); + + ++tstate->recursion_depth; + Py_DECREF(f); + --tstate->recursion_depth; + + return result; +} + + +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { + PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); + PyObject *globals = PyFunction_GET_GLOBALS(func); + PyObject *argdefs = PyFunction_GET_DEFAULTS(func); + PyObject *closure; +#if PY_MAJOR_VERSION >= 3 + PyObject *kwdefs; + //#if PY_VERSION_HEX >= 0x03050000 + //PyObject *name, *qualname; + //#endif +#endif + PyObject *kwtuple, **k; + PyObject **d; + Py_ssize_t nd; + Py_ssize_t nk; + PyObject *result; + + assert(kwargs == NULL || PyDict_Check(kwargs)); + nk = kwargs ? PyDict_Size(kwargs) : 0; + + if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { + return NULL; + } + + if ( +#if PY_MAJOR_VERSION >= 3 + co->co_kwonlyargcount == 0 && +#endif + likely(kwargs == NULL || nk == 0) && + co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { + /* Fast paths */ + if (argdefs == NULL && co->co_argcount == nargs) { + result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); + goto done; + } + else if (nargs == 0 && argdefs != NULL + && co->co_argcount == Py_SIZE(argdefs)) { + /* function called with no arguments, but all parameters have + a default value: use default values as arguments .*/ + args = &PyTuple_GET_ITEM(argdefs, 0); + result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); + goto done; + } + } + + if (kwargs != NULL) { + Py_ssize_t pos, i; + kwtuple = PyTuple_New(2 * nk); + if (kwtuple == NULL) { + result = NULL; + goto done; + } + + k = &PyTuple_GET_ITEM(kwtuple, 0); + pos = i = 0; + while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { + Py_INCREF(k[i]); + Py_INCREF(k[i+1]); + i += 2; + } + nk = i / 2; + } + else { + kwtuple = NULL; + k = NULL; + } + + closure = PyFunction_GET_CLOSURE(func); +#if PY_MAJOR_VERSION >= 3 + kwdefs = PyFunction_GET_KW_DEFAULTS(func); + //#if PY_VERSION_HEX >= 0x03050000 + //name = ((PyFunctionObject *)func) -> func_name; + //qualname = ((PyFunctionObject *)func) -> func_qualname; + //#endif +#endif + + if (argdefs != NULL) { + d = &PyTuple_GET_ITEM(argdefs, 0); + nd = Py_SIZE(argdefs); + } + else { + d = NULL; + nd = 0; + } + + //#if PY_VERSION_HEX >= 0x03050000 + //return _PyEval_EvalCodeWithName((PyObject*)co, globals, (PyObject *)NULL, + // args, nargs, + // NULL, 0, + // d, nd, kwdefs, + // closure, name, qualname); + //#elif PY_MAJOR_VERSION >= 3 +#if PY_MAJOR_VERSION >= 3 + result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, + args, (int)nargs, + k, (int)nk, + d, (int)nd, kwdefs, closure); +#else + result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, + args, (int)nargs, + k, (int)nk, + d, (int)nd, closure); +#endif + Py_XDECREF(kwtuple); + +done: + Py_LeaveRecursiveCall(); + return result; +} +#endif /* CPython < 3.6 */ +#endif /* CYTHON_FAST_PYCALL */ + + +/////////////// PyCFunctionFastCall.proto /////////////// + +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); +#else +#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) +#endif + +/////////////// PyCFunctionFastCall /////////////// + +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { + PyCFunctionObject *func = (PyCFunctionObject*)func_obj; + PyCFunction meth = PyCFunction_GET_FUNCTION(func); + PyObject *self = PyCFunction_GET_SELF(func); + int flags = PyCFunction_GET_FLAGS(func); + + assert(PyCFunction_Check(func)); + assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))); + assert(nargs >= 0); + assert(nargs == 0 || args != NULL); + + /* _PyCFunction_FastCallDict() must not be called with an exception set, + because it may clear it (directly or indirectly) and so the + caller loses its exception */ + assert(!PyErr_Occurred()); + + if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { + return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL); + } else { + return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs); + } +} +#endif /* CYTHON_FAST_PYCCALL */ + + +/////////////// PyObjectCall2Args.proto /////////////// + +static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2); /*proto*/ + +/////////////// PyObjectCall2Args /////////////// +//@requires: PyObjectCall +//@requires: PyFunctionFastCall +//@requires: PyCFunctionFastCall + +static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2) { + PyObject *args, *result = NULL; + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(function)) { + PyObject *args[2] = {arg1, arg2}; + return __Pyx_PyFunction_FastCall(function, args, 2); + } + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(function)) { + PyObject *args[2] = {arg1, arg2}; + return __Pyx_PyCFunction_FastCall(function, args, 2); + } + #endif + + args = PyTuple_New(2); + if (unlikely(!args)) goto done; + Py_INCREF(arg1); + PyTuple_SET_ITEM(args, 0, arg1); + Py_INCREF(arg2); + PyTuple_SET_ITEM(args, 1, arg2); + + Py_INCREF(function); + result = __Pyx_PyObject_Call(function, args, NULL); + Py_DECREF(args); + Py_DECREF(function); +done: + return result; +} + + +/////////////// PyObjectCallOneArg.proto /////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); /*proto*/ + +/////////////// PyObjectCallOneArg /////////////// +//@requires: PyObjectCallMethO +//@requires: PyObjectCall +//@requires: PyFunctionFastCall +//@requires: PyCFunctionFastCall + +#if CYTHON_COMPILING_IN_CPYTHON +static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_New(1); + if (unlikely(!args)) return NULL; + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 0, arg); + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} + +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, &arg, 1); + } +#endif + if (likely(PyCFunction_Check(func))) { + if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { + // fast and simple case that we are optimising for + return __Pyx_PyObject_CallMethO(func, arg); +#if CYTHON_FAST_PYCCALL + } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) { + return __Pyx_PyCFunction_FastCall(func, &arg, 1); +#endif + } + } + return __Pyx__PyObject_CallOneArg(func, arg); +} +#else +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_Pack(1, arg); + if (unlikely(!args)) return NULL; + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +#endif + + +/////////////// PyObjectCallNoArg.proto /////////////// +//@requires: PyObjectCall +//@substitute: naming + +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); /*proto*/ +#else +#define __Pyx_PyObject_CallNoArg(func) __Pyx_PyObject_Call(func, $empty_tuple, NULL) +#endif + +/////////////// PyObjectCallNoArg /////////////// +//@requires: PyObjectCallMethO +//@requires: PyObjectCall +//@requires: PyFunctionFastCall +//@substitute: naming + +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, NULL, 0); + } +#endif +#ifdef __Pyx_CyFunction_USED + if (likely(PyCFunction_Check(func) || __Pyx_CyFunction_Check(func))) +#else + if (likely(PyCFunction_Check(func))) +#endif + { + if (likely(PyCFunction_GET_FLAGS(func) & METH_NOARGS)) { + // fast and simple case that we are optimising for + return __Pyx_PyObject_CallMethO(func, NULL); + } + } + return __Pyx_PyObject_Call(func, $empty_tuple, NULL); +} +#endif + + +/////////////// MatrixMultiply.proto /////////////// + +#if PY_VERSION_HEX >= 0x03050000 + #define __Pyx_PyNumber_MatrixMultiply(x,y) PyNumber_MatrixMultiply(x,y) + #define __Pyx_PyNumber_InPlaceMatrixMultiply(x,y) PyNumber_InPlaceMatrixMultiply(x,y) +#else +#define __Pyx_PyNumber_MatrixMultiply(x,y) __Pyx__PyNumber_MatrixMultiply(x, y, "@") +static PyObject* __Pyx__PyNumber_MatrixMultiply(PyObject* x, PyObject* y, const char* op_name); +static PyObject* __Pyx_PyNumber_InPlaceMatrixMultiply(PyObject* x, PyObject* y); +#endif + +/////////////// MatrixMultiply /////////////// +//@requires: PyObjectGetAttrStr +//@requires: PyObjectCallOneArg +//@requires: PyFunctionFastCall +//@requires: PyCFunctionFastCall + +#if PY_VERSION_HEX < 0x03050000 +static PyObject* __Pyx_PyObject_CallMatrixMethod(PyObject* method, PyObject* arg) { + // NOTE: eats the method reference + PyObject *result = NULL; +#if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(method))) { + PyObject *self = PyMethod_GET_SELF(method); + if (likely(self)) { + PyObject *args; + PyObject *function = PyMethod_GET_FUNCTION(method); + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(function)) { + PyObject *args[2] = {self, arg}; + result = __Pyx_PyFunction_FastCall(function, args, 2); + goto done; + } + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(function)) { + PyObject *args[2] = {self, arg}; + result = __Pyx_PyCFunction_FastCall(function, args, 2); + goto done; + } + #endif + args = PyTuple_New(2); + if (unlikely(!args)) goto done; + Py_INCREF(self); + PyTuple_SET_ITEM(args, 0, self); + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 1, arg); + Py_INCREF(function); + Py_DECREF(method); method = NULL; + result = __Pyx_PyObject_Call(function, args, NULL); + Py_DECREF(args); + Py_DECREF(function); + return result; + } + } +#endif + result = __Pyx_PyObject_CallOneArg(method, arg); +done: + Py_DECREF(method); + return result; +} + +#define __Pyx_TryMatrixMethod(x, y, py_method_name) { \ + PyObject *func = __Pyx_PyObject_GetAttrStr(x, py_method_name); \ + if (func) { \ + PyObject *result = __Pyx_PyObject_CallMatrixMethod(func, y); \ + if (result != Py_NotImplemented) \ + return result; \ + Py_DECREF(result); \ + } else { \ + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) \ + return NULL; \ + PyErr_Clear(); \ + } \ +} + +static PyObject* __Pyx__PyNumber_MatrixMultiply(PyObject* x, PyObject* y, const char* op_name) { + int right_is_subtype = PyObject_IsSubclass((PyObject*)Py_TYPE(y), (PyObject*)Py_TYPE(x)); + if (unlikely(right_is_subtype == -1)) + return NULL; + if (right_is_subtype) { + // to allow subtypes to override parent behaviour, try reversed operation first + // see note at https://docs.python.org/3/reference/datamodel.html#emulating-numeric-types + __Pyx_TryMatrixMethod(y, x, PYIDENT("__rmatmul__")) + } + __Pyx_TryMatrixMethod(x, y, PYIDENT("__matmul__")) + if (!right_is_subtype) { + __Pyx_TryMatrixMethod(y, x, PYIDENT("__rmatmul__")) + } + PyErr_Format(PyExc_TypeError, + "unsupported operand type(s) for %.2s: '%.100s' and '%.100s'", + op_name, + Py_TYPE(x)->tp_name, + Py_TYPE(y)->tp_name); + return NULL; +} + +static PyObject* __Pyx_PyNumber_InPlaceMatrixMultiply(PyObject* x, PyObject* y) { + __Pyx_TryMatrixMethod(x, y, PYIDENT("__imatmul__")) + return __Pyx__PyNumber_MatrixMultiply(x, y, "@="); +} + +#undef __Pyx_TryMatrixMethod +#endif + + +/////////////// PyDictVersioning.proto /////////////// + +#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS +#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) +#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) \ + (version_var) = __PYX_GET_DICT_VERSION(dict); \ + (cache_var) = (value); + +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) { \ + static PY_UINT64_T __pyx_dict_version = 0; \ + static PyObject *__pyx_dict_cached_value = NULL; \ + if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) { \ + (VAR) = __pyx_dict_cached_value; \ + } else { \ + (VAR) = __pyx_dict_cached_value = (LOOKUP); \ + __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT); \ + } \ +} + +static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); /*proto*/ +static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); /*proto*/ +static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); /*proto*/ + +#else +#define __PYX_GET_DICT_VERSION(dict) (0) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); +#endif + +/////////////// PyDictVersioning /////////////// + +#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { + PyObject *dict = Py_TYPE(obj)->tp_dict; + return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; +} + +static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { + PyObject **dictptr = NULL; + Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; + if (offset) { +#if CYTHON_COMPILING_IN_CPYTHON + dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); +#else + dictptr = _PyObject_GetDictPtr(obj); +#endif + } + return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; +} + +static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { + PyObject *dict = Py_TYPE(obj)->tp_dict; + if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) + return 0; + return obj_dict_version == __Pyx_get_object_dict_version(obj); +} +#endif diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/Optimize.c b/venv/lib/python3.8/site-packages/Cython/Utility/Optimize.c new file mode 100644 index 0000000..d6c32ac --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/Optimize.c @@ -0,0 +1,1195 @@ +/* + * Optional optimisations of built-in functions and methods. + * + * Required replacements of builtins are in Builtins.c. + * + * General object operations and protocols are in ObjectHandling.c. + */ + +/////////////// append.proto /////////////// + +static CYTHON_INLINE int __Pyx_PyObject_Append(PyObject* L, PyObject* x); /*proto*/ + +/////////////// append /////////////// +//@requires: ListAppend +//@requires: ObjectHandling.c::PyObjectCallMethod1 + +static CYTHON_INLINE int __Pyx_PyObject_Append(PyObject* L, PyObject* x) { + if (likely(PyList_CheckExact(L))) { + if (unlikely(__Pyx_PyList_Append(L, x) < 0)) return -1; + } else { + PyObject* retval = __Pyx_PyObject_CallMethod1(L, PYIDENT("append"), x); + if (unlikely(!retval)) + return -1; + Py_DECREF(retval); + } + return 0; +} + +/////////////// ListAppend.proto /////////////// + +#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS +static CYTHON_INLINE int __Pyx_PyList_Append(PyObject* list, PyObject* x) { + PyListObject* L = (PyListObject*) list; + Py_ssize_t len = Py_SIZE(list); + if (likely(L->allocated > len) & likely(len > (L->allocated >> 1))) { + Py_INCREF(x); + PyList_SET_ITEM(list, len, x); + Py_SIZE(list) = len+1; + return 0; + } + return PyList_Append(list, x); +} +#else +#define __Pyx_PyList_Append(L,x) PyList_Append(L,x) +#endif + +/////////////// ListCompAppend.proto /////////////// + +#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS +static CYTHON_INLINE int __Pyx_ListComp_Append(PyObject* list, PyObject* x) { + PyListObject* L = (PyListObject*) list; + Py_ssize_t len = Py_SIZE(list); + if (likely(L->allocated > len)) { + Py_INCREF(x); + PyList_SET_ITEM(list, len, x); + Py_SIZE(list) = len+1; + return 0; + } + return PyList_Append(list, x); +} +#else +#define __Pyx_ListComp_Append(L,x) PyList_Append(L,x) +#endif + +//////////////////// ListExtend.proto //////////////////// + +static CYTHON_INLINE int __Pyx_PyList_Extend(PyObject* L, PyObject* v) { +#if CYTHON_COMPILING_IN_CPYTHON + PyObject* none = _PyList_Extend((PyListObject*)L, v); + if (unlikely(!none)) + return -1; + Py_DECREF(none); + return 0; +#else + return PyList_SetSlice(L, PY_SSIZE_T_MAX, PY_SSIZE_T_MAX, v); +#endif +} + +/////////////// pop.proto /////////////// + +static CYTHON_INLINE PyObject* __Pyx__PyObject_Pop(PyObject* L); /*proto*/ + +#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS +static CYTHON_INLINE PyObject* __Pyx_PyList_Pop(PyObject* L); /*proto*/ +#define __Pyx_PyObject_Pop(L) (likely(PyList_CheckExact(L)) ? \ + __Pyx_PyList_Pop(L) : __Pyx__PyObject_Pop(L)) + +#else +#define __Pyx_PyList_Pop(L) __Pyx__PyObject_Pop(L) +#define __Pyx_PyObject_Pop(L) __Pyx__PyObject_Pop(L) +#endif + +/////////////// pop /////////////// +//@requires: ObjectHandling.c::PyObjectCallMethod0 + +static CYTHON_INLINE PyObject* __Pyx__PyObject_Pop(PyObject* L) { + if (Py_TYPE(L) == &PySet_Type) { + return PySet_Pop(L); + } + return __Pyx_PyObject_CallMethod0(L, PYIDENT("pop")); +} + +#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS +static CYTHON_INLINE PyObject* __Pyx_PyList_Pop(PyObject* L) { + /* Check that both the size is positive and no reallocation shrinking needs to be done. */ + if (likely(PyList_GET_SIZE(L) > (((PyListObject*)L)->allocated >> 1))) { + Py_SIZE(L) -= 1; + return PyList_GET_ITEM(L, PyList_GET_SIZE(L)); + } + return CALL_UNBOUND_METHOD(PyList_Type, "pop", L); +} +#endif + + +/////////////// pop_index.proto /////////////// + +static PyObject* __Pyx__PyObject_PopNewIndex(PyObject* L, PyObject* py_ix); /*proto*/ +static PyObject* __Pyx__PyObject_PopIndex(PyObject* L, PyObject* py_ix); /*proto*/ + +#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS +static PyObject* __Pyx__PyList_PopIndex(PyObject* L, PyObject* py_ix, Py_ssize_t ix); /*proto*/ + +#define __Pyx_PyObject_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) ( \ + (likely(PyList_CheckExact(L) && __Pyx_fits_Py_ssize_t(ix, type, is_signed))) ? \ + __Pyx__PyList_PopIndex(L, py_ix, ix) : ( \ + (unlikely((py_ix) == Py_None)) ? __Pyx__PyObject_PopNewIndex(L, to_py_func(ix)) : \ + __Pyx__PyObject_PopIndex(L, py_ix))) + +#define __Pyx_PyList_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) ( \ + __Pyx_fits_Py_ssize_t(ix, type, is_signed) ? \ + __Pyx__PyList_PopIndex(L, py_ix, ix) : ( \ + (unlikely((py_ix) == Py_None)) ? __Pyx__PyObject_PopNewIndex(L, to_py_func(ix)) : \ + __Pyx__PyObject_PopIndex(L, py_ix))) + +#else + +#define __Pyx_PyList_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) \ + __Pyx_PyObject_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) + +#define __Pyx_PyObject_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) ( \ + (unlikely((py_ix) == Py_None)) ? __Pyx__PyObject_PopNewIndex(L, to_py_func(ix)) : \ + __Pyx__PyObject_PopIndex(L, py_ix)) +#endif + +/////////////// pop_index /////////////// +//@requires: ObjectHandling.c::PyObjectCallMethod1 + +static PyObject* __Pyx__PyObject_PopNewIndex(PyObject* L, PyObject* py_ix) { + PyObject *r; + if (unlikely(!py_ix)) return NULL; + r = __Pyx__PyObject_PopIndex(L, py_ix); + Py_DECREF(py_ix); + return r; +} + +static PyObject* __Pyx__PyObject_PopIndex(PyObject* L, PyObject* py_ix) { + return __Pyx_PyObject_CallMethod1(L, PYIDENT("pop"), py_ix); +} + +#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS +static PyObject* __Pyx__PyList_PopIndex(PyObject* L, PyObject* py_ix, Py_ssize_t ix) { + Py_ssize_t size = PyList_GET_SIZE(L); + if (likely(size > (((PyListObject*)L)->allocated >> 1))) { + Py_ssize_t cix = ix; + if (cix < 0) { + cix += size; + } + if (likely(__Pyx_is_valid_index(cix, size))) { + PyObject* v = PyList_GET_ITEM(L, cix); + Py_SIZE(L) -= 1; + size -= 1; + memmove(&PyList_GET_ITEM(L, cix), &PyList_GET_ITEM(L, cix+1), (size_t)(size-cix)*sizeof(PyObject*)); + return v; + } + } + if (py_ix == Py_None) { + return __Pyx__PyObject_PopNewIndex(L, PyInt_FromSsize_t(ix)); + } else { + return __Pyx__PyObject_PopIndex(L, py_ix); + } +} +#endif + + +/////////////// dict_getitem_default.proto /////////////// + +static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObject* default_value); /*proto*/ + +/////////////// dict_getitem_default /////////////// + +static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObject* default_value) { + PyObject* value; +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY + value = PyDict_GetItemWithError(d, key); + if (unlikely(!value)) { + if (unlikely(PyErr_Occurred())) + return NULL; + value = default_value; + } + Py_INCREF(value); + // avoid C compiler warning about unused utility functions + if ((1)); +#else + if (PyString_CheckExact(key) || PyUnicode_CheckExact(key) || PyInt_CheckExact(key)) { + /* these presumably have safe hash functions */ + value = PyDict_GetItem(d, key); + if (unlikely(!value)) { + value = default_value; + } + Py_INCREF(value); + } +#endif + else { + if (default_value == Py_None) + value = CALL_UNBOUND_METHOD(PyDict_Type, "get", d, key); + else + value = CALL_UNBOUND_METHOD(PyDict_Type, "get", d, key, default_value); + } + return value; +} + + +/////////////// dict_setdefault.proto /////////////// + +static CYTHON_INLINE PyObject *__Pyx_PyDict_SetDefault(PyObject *d, PyObject *key, PyObject *default_value, int is_safe_type); /*proto*/ + +/////////////// dict_setdefault /////////////// + +static CYTHON_INLINE PyObject *__Pyx_PyDict_SetDefault(PyObject *d, PyObject *key, PyObject *default_value, + CYTHON_UNUSED int is_safe_type) { + PyObject* value; +#if PY_VERSION_HEX >= 0x030400A0 + // we keep the method call at the end to avoid "unused" C compiler warnings + if ((1)) { + value = PyDict_SetDefault(d, key, default_value); + if (unlikely(!value)) return NULL; + Py_INCREF(value); +#else + if (is_safe_type == 1 || (is_safe_type == -1 && + /* the following builtins presumably have repeatably safe and fast hash functions */ +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY + (PyUnicode_CheckExact(key) || PyString_CheckExact(key) || PyLong_CheckExact(key)))) { + value = PyDict_GetItemWithError(d, key); + if (unlikely(!value)) { + if (unlikely(PyErr_Occurred())) + return NULL; + if (unlikely(PyDict_SetItem(d, key, default_value) == -1)) + return NULL; + value = default_value; + } + Py_INCREF(value); +#else + (PyString_CheckExact(key) || PyUnicode_CheckExact(key) || PyInt_CheckExact(key) || PyLong_CheckExact(key)))) { + value = PyDict_GetItem(d, key); + if (unlikely(!value)) { + if (unlikely(PyDict_SetItem(d, key, default_value) == -1)) + return NULL; + value = default_value; + } + Py_INCREF(value); +#endif +#endif + } else { + value = CALL_UNBOUND_METHOD(PyDict_Type, "setdefault", d, key, default_value); + } + return value; +} + + +/////////////// py_dict_clear.proto /////////////// + +#define __Pyx_PyDict_Clear(d) (PyDict_Clear(d), 0) + + +/////////////// py_dict_pop.proto /////////////// + +static CYTHON_INLINE PyObject *__Pyx_PyDict_Pop(PyObject *d, PyObject *key, PyObject *default_value); /*proto*/ + +/////////////// py_dict_pop /////////////// + +static CYTHON_INLINE PyObject *__Pyx_PyDict_Pop(PyObject *d, PyObject *key, PyObject *default_value) { +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX > 0x030600B3 + if ((1)) { + return _PyDict_Pop(d, key, default_value); + } else + // avoid "function unused" warnings +#endif + if (default_value) { + return CALL_UNBOUND_METHOD(PyDict_Type, "pop", d, key, default_value); + } else { + return CALL_UNBOUND_METHOD(PyDict_Type, "pop", d, key); + } +} + + +/////////////// dict_iter.proto /////////////// + +static CYTHON_INLINE PyObject* __Pyx_dict_iterator(PyObject* dict, int is_dict, PyObject* method_name, + Py_ssize_t* p_orig_length, int* p_is_dict); +static CYTHON_INLINE int __Pyx_dict_iter_next(PyObject* dict_or_iter, Py_ssize_t orig_length, Py_ssize_t* ppos, + PyObject** pkey, PyObject** pvalue, PyObject** pitem, int is_dict); + +/////////////// dict_iter /////////////// +//@requires: ObjectHandling.c::UnpackTuple2 +//@requires: ObjectHandling.c::IterFinish +//@requires: ObjectHandling.c::PyObjectCallMethod0 + +static CYTHON_INLINE PyObject* __Pyx_dict_iterator(PyObject* iterable, int is_dict, PyObject* method_name, + Py_ssize_t* p_orig_length, int* p_source_is_dict) { + is_dict = is_dict || likely(PyDict_CheckExact(iterable)); + *p_source_is_dict = is_dict; + if (is_dict) { +#if !CYTHON_COMPILING_IN_PYPY + *p_orig_length = PyDict_Size(iterable); + Py_INCREF(iterable); + return iterable; +#elif PY_MAJOR_VERSION >= 3 + // On PyPy3, we need to translate manually a few method names. + // This logic is not needed on CPython thanks to the fast case above. + static PyObject *py_items = NULL, *py_keys = NULL, *py_values = NULL; + PyObject **pp = NULL; + if (method_name) { + const char *name = PyUnicode_AsUTF8(method_name); + if (strcmp(name, "iteritems") == 0) pp = &py_items; + else if (strcmp(name, "iterkeys") == 0) pp = &py_keys; + else if (strcmp(name, "itervalues") == 0) pp = &py_values; + if (pp) { + if (!*pp) { + *pp = PyUnicode_FromString(name + 4); + if (!*pp) + return NULL; + } + method_name = *pp; + } + } +#endif + } + *p_orig_length = 0; + if (method_name) { + PyObject* iter; + iterable = __Pyx_PyObject_CallMethod0(iterable, method_name); + if (!iterable) + return NULL; +#if !CYTHON_COMPILING_IN_PYPY + if (PyTuple_CheckExact(iterable) || PyList_CheckExact(iterable)) + return iterable; +#endif + iter = PyObject_GetIter(iterable); + Py_DECREF(iterable); + return iter; + } + return PyObject_GetIter(iterable); +} + +static CYTHON_INLINE int __Pyx_dict_iter_next( + PyObject* iter_obj, CYTHON_NCP_UNUSED Py_ssize_t orig_length, CYTHON_NCP_UNUSED Py_ssize_t* ppos, + PyObject** pkey, PyObject** pvalue, PyObject** pitem, int source_is_dict) { + PyObject* next_item; +#if !CYTHON_COMPILING_IN_PYPY + if (source_is_dict) { + PyObject *key, *value; + if (unlikely(orig_length != PyDict_Size(iter_obj))) { + PyErr_SetString(PyExc_RuntimeError, "dictionary changed size during iteration"); + return -1; + } + if (unlikely(!PyDict_Next(iter_obj, ppos, &key, &value))) { + return 0; + } + if (pitem) { + PyObject* tuple = PyTuple_New(2); + if (unlikely(!tuple)) { + return -1; + } + Py_INCREF(key); + Py_INCREF(value); + PyTuple_SET_ITEM(tuple, 0, key); + PyTuple_SET_ITEM(tuple, 1, value); + *pitem = tuple; + } else { + if (pkey) { + Py_INCREF(key); + *pkey = key; + } + if (pvalue) { + Py_INCREF(value); + *pvalue = value; + } + } + return 1; + } else if (PyTuple_CheckExact(iter_obj)) { + Py_ssize_t pos = *ppos; + if (unlikely(pos >= PyTuple_GET_SIZE(iter_obj))) return 0; + *ppos = pos + 1; + next_item = PyTuple_GET_ITEM(iter_obj, pos); + Py_INCREF(next_item); + } else if (PyList_CheckExact(iter_obj)) { + Py_ssize_t pos = *ppos; + if (unlikely(pos >= PyList_GET_SIZE(iter_obj))) return 0; + *ppos = pos + 1; + next_item = PyList_GET_ITEM(iter_obj, pos); + Py_INCREF(next_item); + } else +#endif + { + next_item = PyIter_Next(iter_obj); + if (unlikely(!next_item)) { + return __Pyx_IterFinish(); + } + } + if (pitem) { + *pitem = next_item; + } else if (pkey && pvalue) { + if (__Pyx_unpack_tuple2(next_item, pkey, pvalue, source_is_dict, source_is_dict, 1)) + return -1; + } else if (pkey) { + *pkey = next_item; + } else { + *pvalue = next_item; + } + return 1; +} + + +/////////////// set_iter.proto /////////////// + +static CYTHON_INLINE PyObject* __Pyx_set_iterator(PyObject* iterable, int is_set, + Py_ssize_t* p_orig_length, int* p_source_is_set); /*proto*/ +static CYTHON_INLINE int __Pyx_set_iter_next( + PyObject* iter_obj, Py_ssize_t orig_length, + Py_ssize_t* ppos, PyObject **value, + int source_is_set); /*proto*/ + +/////////////// set_iter /////////////// +//@requires: ObjectHandling.c::IterFinish + +static CYTHON_INLINE PyObject* __Pyx_set_iterator(PyObject* iterable, int is_set, + Py_ssize_t* p_orig_length, int* p_source_is_set) { +#if CYTHON_COMPILING_IN_CPYTHON + is_set = is_set || likely(PySet_CheckExact(iterable) || PyFrozenSet_CheckExact(iterable)); + *p_source_is_set = is_set; + if (likely(is_set)) { + *p_orig_length = PySet_Size(iterable); + Py_INCREF(iterable); + return iterable; + } +#else + (void)is_set; + *p_source_is_set = 0; +#endif + *p_orig_length = 0; + return PyObject_GetIter(iterable); +} + +static CYTHON_INLINE int __Pyx_set_iter_next( + PyObject* iter_obj, Py_ssize_t orig_length, + Py_ssize_t* ppos, PyObject **value, + int source_is_set) { + if (!CYTHON_COMPILING_IN_CPYTHON || unlikely(!source_is_set)) { + *value = PyIter_Next(iter_obj); + if (unlikely(!*value)) { + return __Pyx_IterFinish(); + } + (void)orig_length; + (void)ppos; + return 1; + } +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(PySet_GET_SIZE(iter_obj) != orig_length)) { + PyErr_SetString( + PyExc_RuntimeError, + "set changed size during iteration"); + return -1; + } + { + Py_hash_t hash; + int ret = _PySet_NextEntry(iter_obj, ppos, value, &hash); + // CPython does not raise errors here, only if !isinstance(iter_obj, set/frozenset) + assert (ret != -1); + if (likely(ret)) { + Py_INCREF(*value); + return 1; + } + } +#endif + return 0; +} + +/////////////// py_set_discard_unhashable /////////////// +//@requires: Builtins.c::pyfrozenset_new + +static int __Pyx_PySet_DiscardUnhashable(PyObject *set, PyObject *key) { + PyObject *tmpkey; + int rv; + + if (likely(!PySet_Check(key) || !PyErr_ExceptionMatches(PyExc_TypeError))) + return -1; + PyErr_Clear(); + tmpkey = __Pyx_PyFrozenSet_New(key); + if (tmpkey == NULL) + return -1; + rv = PySet_Discard(set, tmpkey); + Py_DECREF(tmpkey); + return rv; +} + + +/////////////// py_set_discard.proto /////////////// + +static CYTHON_INLINE int __Pyx_PySet_Discard(PyObject *set, PyObject *key); /*proto*/ + +/////////////// py_set_discard /////////////// +//@requires: py_set_discard_unhashable + +static CYTHON_INLINE int __Pyx_PySet_Discard(PyObject *set, PyObject *key) { + int found = PySet_Discard(set, key); + // Convert *key* to frozenset if necessary + if (unlikely(found < 0)) { + found = __Pyx_PySet_DiscardUnhashable(set, key); + } + // note: returns -1 on error, 0 (not found) or 1 (found) otherwise => error check for -1 or < 0 works + return found; +} + + +/////////////// py_set_remove.proto /////////////// + +static CYTHON_INLINE int __Pyx_PySet_Remove(PyObject *set, PyObject *key); /*proto*/ + +/////////////// py_set_remove /////////////// +//@requires: py_set_discard_unhashable + +static int __Pyx_PySet_RemoveNotFound(PyObject *set, PyObject *key, int found) { + // Convert *key* to frozenset if necessary + if (unlikely(found < 0)) { + found = __Pyx_PySet_DiscardUnhashable(set, key); + } + if (likely(found == 0)) { + // Not found + PyObject *tup; + tup = PyTuple_Pack(1, key); + if (!tup) + return -1; + PyErr_SetObject(PyExc_KeyError, tup); + Py_DECREF(tup); + return -1; + } + // note: returns -1 on error, 0 (not found) or 1 (found) otherwise => error check for -1 or < 0 works + return found; +} + +static CYTHON_INLINE int __Pyx_PySet_Remove(PyObject *set, PyObject *key) { + int found = PySet_Discard(set, key); + if (unlikely(found != 1)) { + // note: returns -1 on error, 0 (not found) or 1 (found) otherwise => error check for -1 or < 0 works + return __Pyx_PySet_RemoveNotFound(set, key, found); + } + return 0; +} + + +/////////////// unicode_iter.proto /////////////// + +static CYTHON_INLINE int __Pyx_init_unicode_iteration( + PyObject* ustring, Py_ssize_t *length, void** data, int *kind); /* proto */ + +/////////////// unicode_iter /////////////// + +static CYTHON_INLINE int __Pyx_init_unicode_iteration( + PyObject* ustring, Py_ssize_t *length, void** data, int *kind) { +#if CYTHON_PEP393_ENABLED + if (unlikely(__Pyx_PyUnicode_READY(ustring) < 0)) return -1; + *kind = PyUnicode_KIND(ustring); + *length = PyUnicode_GET_LENGTH(ustring); + *data = PyUnicode_DATA(ustring); +#else + *kind = 0; + *length = PyUnicode_GET_SIZE(ustring); + *data = (void*)PyUnicode_AS_UNICODE(ustring); +#endif + return 0; +} + +/////////////// pyobject_as_double.proto /////////////// + +static double __Pyx__PyObject_AsDouble(PyObject* obj); /* proto */ + +#if CYTHON_COMPILING_IN_PYPY +#define __Pyx_PyObject_AsDouble(obj) \ +(likely(PyFloat_CheckExact(obj)) ? PyFloat_AS_DOUBLE(obj) : \ + likely(PyInt_CheckExact(obj)) ? \ + PyFloat_AsDouble(obj) : __Pyx__PyObject_AsDouble(obj)) +#else +#define __Pyx_PyObject_AsDouble(obj) \ +((likely(PyFloat_CheckExact(obj))) ? \ + PyFloat_AS_DOUBLE(obj) : __Pyx__PyObject_AsDouble(obj)) +#endif + +/////////////// pyobject_as_double /////////////// + +static double __Pyx__PyObject_AsDouble(PyObject* obj) { + PyObject* float_value; +#if !CYTHON_USE_TYPE_SLOTS + float_value = PyNumber_Float(obj); if ((0)) goto bad; +#else + PyNumberMethods *nb = Py_TYPE(obj)->tp_as_number; + if (likely(nb) && likely(nb->nb_float)) { + float_value = nb->nb_float(obj); + if (likely(float_value) && unlikely(!PyFloat_Check(float_value))) { + PyErr_Format(PyExc_TypeError, + "__float__ returned non-float (type %.200s)", + Py_TYPE(float_value)->tp_name); + Py_DECREF(float_value); + goto bad; + } + } else if (PyUnicode_CheckExact(obj) || PyBytes_CheckExact(obj)) { +#if PY_MAJOR_VERSION >= 3 + float_value = PyFloat_FromString(obj); +#else + float_value = PyFloat_FromString(obj, 0); +#endif + } else { + PyObject* args = PyTuple_New(1); + if (unlikely(!args)) goto bad; + PyTuple_SET_ITEM(args, 0, obj); + float_value = PyObject_Call((PyObject*)&PyFloat_Type, args, 0); + PyTuple_SET_ITEM(args, 0, 0); + Py_DECREF(args); + } +#endif + if (likely(float_value)) { + double value = PyFloat_AS_DOUBLE(float_value); + Py_DECREF(float_value); + return value; + } +bad: + return (double)-1; +} + + +/////////////// PyNumberPow2.proto /////////////// + +#define __Pyx_PyNumber_InPlacePowerOf2(a, b, c) __Pyx__PyNumber_PowerOf2(a, b, c, 1) +#define __Pyx_PyNumber_PowerOf2(a, b, c) __Pyx__PyNumber_PowerOf2(a, b, c, 0) + +static PyObject* __Pyx__PyNumber_PowerOf2(PyObject *two, PyObject *exp, PyObject *none, int inplace); /*proto*/ + +/////////////// PyNumberPow2 /////////////// + +static PyObject* __Pyx__PyNumber_PowerOf2(PyObject *two, PyObject *exp, PyObject *none, int inplace) { +// in CPython, 1<ob_digit[0]; + } else if (size == 0) { + return PyInt_FromLong(1L); + } else if (unlikely(size < 0)) { + goto fallback; + } else { + shiftby = PyLong_AsSsize_t(exp); + } + #else + shiftby = PyLong_AsSsize_t(exp); + #endif + } else { + goto fallback; + } + if (likely(shiftby >= 0)) { + if ((size_t)shiftby <= sizeof(long) * 8 - 2) { + long value = 1L << shiftby; + return PyInt_FromLong(value); +#ifdef HAVE_LONG_LONG + } else if ((size_t)shiftby <= sizeof(unsigned PY_LONG_LONG) * 8 - 1) { + unsigned PY_LONG_LONG value = ((unsigned PY_LONG_LONG)1) << shiftby; + return PyLong_FromUnsignedLongLong(value); +#endif + } else { + PyObject *result, *one = PyInt_FromLong(1L); + if (unlikely(!one)) return NULL; + result = PyNumber_Lshift(one, exp); + Py_DECREF(one); + return result; + } + } else if (shiftby == -1 && PyErr_Occurred()) { + PyErr_Clear(); + } +fallback: +#endif + return (inplace ? PyNumber_InPlacePower : PyNumber_Power)(two, exp, none); +} + + +/////////////// PyIntCompare.proto /////////////// + +{{py: c_ret_type = 'PyObject*' if ret_type.is_pyobject else 'int'}} +static CYTHON_INLINE {{c_ret_type}} __Pyx_PyInt_{{'' if ret_type.is_pyobject else 'Bool'}}{{op}}{{order}}(PyObject *op1, PyObject *op2, long intval, long inplace); /*proto*/ + +/////////////// PyIntCompare /////////////// + +{{py: pyval, ival = ('op2', 'b') if order == 'CObj' else ('op1', 'a') }} +{{py: c_ret_type = 'PyObject*' if ret_type.is_pyobject else 'int'}} +{{py: return_true = 'Py_RETURN_TRUE' if ret_type.is_pyobject else 'return 1'}} +{{py: return_false = 'Py_RETURN_FALSE' if ret_type.is_pyobject else 'return 0'}} +{{py: slot_name = op.lower() }} +{{py: c_op = {'Eq': '==', 'Ne': '!='}[op] }} +{{py: +return_compare = ( + (lambda a,b,c_op, return_true=return_true, return_false=return_false: "if ({a} {c_op} {b}) {return_true}; else {return_false};".format( + a=a, b=b, c_op=c_op, return_true=return_true, return_false=return_false)) + if ret_type.is_pyobject else + (lambda a,b,c_op: "return ({a} {c_op} {b});".format(a=a, b=b, c_op=c_op)) + ) +}} + +static CYTHON_INLINE {{c_ret_type}} __Pyx_PyInt_{{'' if ret_type.is_pyobject else 'Bool'}}{{op}}{{order}}(PyObject *op1, PyObject *op2, CYTHON_UNUSED long intval, CYTHON_UNUSED long inplace) { + if (op1 == op2) { + {{return_true if op == 'Eq' else return_false}}; + } + + #if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact({{pyval}}))) { + const long {{'a' if order == 'CObj' else 'b'}} = intval; + long {{ival}} = PyInt_AS_LONG({{pyval}}); + {{return_compare('a', 'b', c_op)}} + } + #endif + + #if CYTHON_USE_PYLONG_INTERNALS + if (likely(PyLong_CheckExact({{pyval}}))) { + int unequal; + unsigned long uintval; + Py_ssize_t size = Py_SIZE({{pyval}}); + const digit* digits = ((PyLongObject*){{pyval}})->ob_digit; + if (intval == 0) { + // == 0 => Py_SIZE(pyval) == 0 + {{return_compare('size', '0', c_op)}} + } else if (intval < 0) { + // < 0 => Py_SIZE(pyval) < 0 + if (size >= 0) + {{return_false if op == 'Eq' else return_true}}; + // both are negative => can use absolute values now. + intval = -intval; + size = -size; + } else { + // > 0 => Py_SIZE(pyval) > 0 + if (size <= 0) + {{return_false if op == 'Eq' else return_true}}; + } + // After checking that the sign is the same (and excluding 0), now compare the absolute values. + // When inlining, the C compiler should select exactly one line from this unrolled loop. + uintval = (unsigned long) intval; + {{for _size in range(4, 0, -1)}} +#if PyLong_SHIFT * {{_size}} < SIZEOF_LONG*8 + if (uintval >> (PyLong_SHIFT * {{_size}})) { + // The C integer value is between (PyLong_BASE ** _size) and MIN(PyLong_BASE ** _size, LONG_MAX). + unequal = (size != {{_size+1}}) || (digits[0] != (uintval & (unsigned long) PyLong_MASK)) + {{for _i in range(1, _size+1)}} | (digits[{{_i}}] != ((uintval >> ({{_i}} * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)){{endfor}}; + } else +#endif + {{endfor}} + unequal = (size != 1) || (((unsigned long) digits[0]) != (uintval & (unsigned long) PyLong_MASK)); + + {{return_compare('unequal', '0', c_op)}} + } + #endif + + if (PyFloat_CheckExact({{pyval}})) { + const long {{'a' if order == 'CObj' else 'b'}} = intval; + double {{ival}} = PyFloat_AS_DOUBLE({{pyval}}); + {{return_compare('(double)a', '(double)b', c_op)}} + } + + return {{'' if ret_type.is_pyobject else '__Pyx_PyObject_IsTrueAndDecref'}}( + PyObject_RichCompare(op1, op2, Py_{{op.upper()}})); +} + + +/////////////// PyIntBinop.proto /////////////// + +{{py: c_ret_type = 'PyObject*' if ret_type.is_pyobject else 'int'}} +#if !CYTHON_COMPILING_IN_PYPY +static {{c_ret_type}} __Pyx_PyInt_{{'' if ret_type.is_pyobject else 'Bool'}}{{op}}{{order}}(PyObject *op1, PyObject *op2, long intval, int inplace, int zerodivision_check); /*proto*/ +#else +#define __Pyx_PyInt_{{'' if ret_type.is_pyobject else 'Bool'}}{{op}}{{order}}(op1, op2, intval, inplace, zerodivision_check) \ + {{if op in ('Eq', 'Ne')}}{{'' if ret_type.is_pyobject else '__Pyx_PyObject_IsTrueAndDecref'}}(PyObject_RichCompare(op1, op2, Py_{{op.upper()}})) + {{else}}(inplace ? PyNumber_InPlace{{op}}(op1, op2) : PyNumber_{{op}}(op1, op2)) + {{endif}} +#endif + +/////////////// PyIntBinop /////////////// + +#if !CYTHON_COMPILING_IN_PYPY +{{py: from Cython.Utility import pylong_join }} +{{py: pyval, ival = ('op2', 'b') if order == 'CObj' else ('op1', 'a') }} +{{py: c_ret_type = 'PyObject*' if ret_type.is_pyobject else 'int'}} +{{py: return_true = 'Py_RETURN_TRUE' if ret_type.is_pyobject else 'return 1'}} +{{py: return_false = 'Py_RETURN_FALSE' if ret_type.is_pyobject else 'return 0'}} +{{py: slot_name = {'TrueDivide': 'true_divide', 'FloorDivide': 'floor_divide'}.get(op, op.lower()) }} +{{py: cfunc_name = '__Pyx_PyInt_%s%s%s' % ('' if ret_type.is_pyobject else 'Bool', op, order)}} +{{py: zerodiv_check = lambda operand, _cfunc_name=cfunc_name: '%s_ZeroDivisionError(%s)' % (_cfunc_name, operand)}} +{{py: +c_op = { + 'Add': '+', 'Subtract': '-', 'Remainder': '%', 'TrueDivide': '/', 'FloorDivide': '/', + 'Or': '|', 'Xor': '^', 'And': '&', 'Rshift': '>>', 'Lshift': '<<', + 'Eq': '==', 'Ne': '!=', + }[op] +}} + +{{if op in ('TrueDivide', 'FloorDivide', 'Remainder')}} +#if PY_MAJOR_VERSION < 3 || CYTHON_USE_PYLONG_INTERNALS +#define {{zerodiv_check('operand')}} \ + if (unlikely(zerodivision_check && ((operand) == 0))) { \ + PyErr_SetString(PyExc_ZeroDivisionError, "integer division{{if op == 'Remainder'}} or modulo{{endif}} by zero"); \ + return NULL; \ + } +#endif +{{endif}} + +static {{c_ret_type}} {{cfunc_name}}(PyObject *op1, PyObject *op2, CYTHON_UNUSED long intval, int inplace, int zerodivision_check) { + // Prevent "unused" warnings. + (void)inplace; + (void)zerodivision_check; + + {{if op in ('Eq', 'Ne')}} + if (op1 == op2) { + {{return_true if op == 'Eq' else return_false}}; + } + {{endif}} + + #if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact({{pyval}}))) { + const long {{'a' if order == 'CObj' else 'b'}} = intval; + {{if c_op in '+-%' or op == 'FloorDivide'}} + long x; + {{endif}} + long {{ival}} = PyInt_AS_LONG({{pyval}}); + + {{if op in ('Eq', 'Ne')}} + if (a {{c_op}} b) { + {{return_true}}; + } else { + {{return_false}}; + } + {{elif c_op in '+-'}} + // adapted from intobject.c in Py2.7: + // casts in the line below avoid undefined behaviour on overflow + x = (long)((unsigned long)a {{c_op}} b); + if (likely((x^a) >= 0 || (x^{{ '~' if op == 'Subtract' else '' }}b) >= 0)) + return PyInt_FromLong(x); + return PyLong_Type.tp_as_number->nb_{{slot_name}}(op1, op2); + {{elif c_op == '%'}} + {{zerodiv_check('b')}} + // see ExprNodes.py :: mod_int_utility_code + x = a % b; + x += ((x != 0) & ((x ^ b) < 0)) * b; + return PyInt_FromLong(x); + {{elif op == 'TrueDivide'}} + {{zerodiv_check('b')}} + if (8 * sizeof(long) <= 53 || likely(labs({{ival}}) <= ((PY_LONG_LONG)1 << 53))) { + return PyFloat_FromDouble((double)a / (double)b); + } + // let Python do the rounding + return PyInt_Type.tp_as_number->nb_{{slot_name}}(op1, op2); + {{elif op == 'FloorDivide'}} + // INT_MIN / -1 is the only case that overflows, b == 0 is an error case + {{zerodiv_check('b')}} + if (unlikely(b == -1 && ((unsigned long)a) == 0-(unsigned long)a)) + return PyInt_Type.tp_as_number->nb_{{slot_name}}(op1, op2); + else { + long q, r; + // see ExprNodes.py :: div_int_utility_code + q = a / b; + r = a - q*b; + q -= ((r != 0) & ((r ^ b) < 0)); + x = q; + } + return PyInt_FromLong(x); + {{elif op == 'Lshift'}} + if (likely(b < (long) (sizeof(long)*8) && a == (a << b) >> b) || !a) { + return PyInt_FromLong(a {{c_op}} b); + } + {{else}} + // other operations are safe, no overflow + return PyInt_FromLong(a {{c_op}} b); + {{endif}} + } + #endif + + #if CYTHON_USE_PYLONG_INTERNALS + if (likely(PyLong_CheckExact({{pyval}}))) { + const long {{'a' if order == 'CObj' else 'b'}} = intval; + long {{ival}}{{if op not in ('Eq', 'Ne')}}, x{{endif}}; + {{if op not in ('Eq', 'Ne', 'TrueDivide')}} +#ifdef HAVE_LONG_LONG + const PY_LONG_LONG ll{{'a' if order == 'CObj' else 'b'}} = intval; + PY_LONG_LONG ll{{ival}}, llx; +#endif + {{endif}} + const digit* digits = ((PyLongObject*){{pyval}})->ob_digit; + const Py_ssize_t size = Py_SIZE({{pyval}}); + // handle most common case first to avoid indirect branch and optimise branch prediction + if (likely(__Pyx_sst_abs(size) <= 1)) { + {{ival}} = likely(size) ? digits[0] : 0; + if (size == -1) {{ival}} = -{{ival}}; + } else { + switch (size) { + {{for _size in range(2, 5)}} + {{for _case in (-_size, _size)}} + case {{_case}}: + if (8 * sizeof(long) - 1 > {{_size}} * PyLong_SHIFT{{if op == 'TrueDivide'}} && {{_size-1}} * PyLong_SHIFT < 53{{endif}}) { + {{ival}} = {{'-' if _case < 0 else ''}}(long) {{pylong_join(_size, 'digits')}}; + break; + {{if op not in ('Eq', 'Ne', 'TrueDivide')}} +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > {{_size}} * PyLong_SHIFT) { + ll{{ival}} = {{'-' if _case < 0 else ''}}(PY_LONG_LONG) {{pylong_join(_size, 'digits', 'unsigned PY_LONG_LONG')}}; + goto long_long; +#endif + {{endif}} + } + // if size doesn't fit into a long or PY_LONG_LONG anymore, fall through to default + CYTHON_FALLTHROUGH; + {{endfor}} + {{endfor}} + + {{if op in ('Eq', 'Ne')}} + #if PyLong_SHIFT < 30 && PyLong_SHIFT != 15 + // unusual setup - your fault + default: return {{'' if ret_type.is_pyobject else '__Pyx_PyObject_IsTrueAndDecref'}}( + PyLong_Type.tp_richcompare({{'op1, op2' if order == 'ObjC' else 'op2, op1'}}, Py_{{op.upper()}})); + #else + // too large for the long values we allow => definitely not equal + default: {{return_false if op == 'Eq' else return_true}}; + #endif + {{else}} + default: return PyLong_Type.tp_as_number->nb_{{slot_name}}(op1, op2); + {{endif}} + } + } + {{if op in ('Eq', 'Ne')}} + if (a {{c_op}} b) { + {{return_true}}; + } else { + {{return_false}}; + } + {{else}} + {{if c_op == '%'}} + {{zerodiv_check('b')}} + // see ExprNodes.py :: mod_int_utility_code + x = a % b; + x += ((x != 0) & ((x ^ b) < 0)) * b; + {{elif op == 'TrueDivide'}} + {{zerodiv_check('b')}} + if ((8 * sizeof(long) <= 53 || likely(labs({{ival}}) <= ((PY_LONG_LONG)1 << 53))) + || __Pyx_sst_abs(size) <= 52 / PyLong_SHIFT) { + return PyFloat_FromDouble((double)a / (double)b); + } + return PyLong_Type.tp_as_number->nb_{{slot_name}}(op1, op2); + {{elif op == 'FloorDivide'}} + {{zerodiv_check('b')}} + { + long q, r; + // see ExprNodes.py :: div_int_utility_code + q = a / b; + r = a - q*b; + q -= ((r != 0) & ((r ^ b) < 0)); + x = q; + } + {{else}} + x = a {{c_op}} b; + {{if op == 'Lshift'}} +#ifdef HAVE_LONG_LONG + if (unlikely(!(b < (long) (sizeof(long)*8) && a == x >> b)) && a) { + ll{{ival}} = {{ival}}; + goto long_long; + } +#else + if (likely(b < (long) (sizeof(long)*8) && a == x >> b) || !a) /* execute return statement below */ +#endif + {{endif}} + {{endif}} + return PyLong_FromLong(x); + + {{if op != 'TrueDivide'}} +#ifdef HAVE_LONG_LONG + long_long: + {{if c_op == '%'}} + // see ExprNodes.py :: mod_int_utility_code + llx = lla % llb; + llx += ((llx != 0) & ((llx ^ llb) < 0)) * llb; + {{elif op == 'FloorDivide'}} + { + PY_LONG_LONG q, r; + // see ExprNodes.py :: div_int_utility_code + q = lla / llb; + r = lla - q*llb; + q -= ((r != 0) & ((r ^ llb) < 0)); + llx = q; + } + {{else}} + llx = lla {{c_op}} llb; + {{if op == 'Lshift'}} + if (likely(lla == llx >> llb)) /* then execute 'return' below */ + {{endif}} + {{endif}} + return PyLong_FromLongLong(llx); +#endif + {{endif}}{{# if op != 'TrueDivide' #}} + {{endif}}{{# if op in ('Eq', 'Ne') #}} + } + #endif + + {{if c_op in '+-' or op in ('TrueDivide', 'Eq', 'Ne')}} + if (PyFloat_CheckExact({{pyval}})) { + const long {{'a' if order == 'CObj' else 'b'}} = intval; + double {{ival}} = PyFloat_AS_DOUBLE({{pyval}}); + {{if op in ('Eq', 'Ne')}} + if ((double)a {{c_op}} (double)b) { + {{return_true}}; + } else { + {{return_false}}; + } + {{else}} + double result; + {{if op == 'TrueDivide'}} + if (unlikely(zerodivision_check && b == 0)) { + PyErr_SetString(PyExc_ZeroDivisionError, "float division by zero"); + return NULL; + } + {{endif}} + // copied from floatobject.c in Py3.5: + PyFPE_START_PROTECT("{{op.lower() if not op.endswith('Divide') else 'divide'}}", return NULL) + result = ((double)a) {{c_op}} (double)b; + PyFPE_END_PROTECT(result) + return PyFloat_FromDouble(result); + {{endif}} + } + {{endif}} + + {{if op in ('Eq', 'Ne')}} + return {{'' if ret_type.is_pyobject else '__Pyx_PyObject_IsTrueAndDecref'}}( + PyObject_RichCompare(op1, op2, Py_{{op.upper()}})); + {{else}} + return (inplace ? PyNumber_InPlace{{op}} : PyNumber_{{op}})(op1, op2); + {{endif}} +} +#endif + +/////////////// PyFloatBinop.proto /////////////// + +{{py: c_ret_type = 'PyObject*' if ret_type.is_pyobject else 'int'}} +#if !CYTHON_COMPILING_IN_PYPY +static {{c_ret_type}} __Pyx_PyFloat_{{'' if ret_type.is_pyobject else 'Bool'}}{{op}}{{order}}(PyObject *op1, PyObject *op2, double floatval, int inplace, int zerodivision_check); /*proto*/ +#else +#define __Pyx_PyFloat_{{'' if ret_type.is_pyobject else 'Bool'}}{{op}}{{order}}(op1, op2, floatval, inplace, zerodivision_check) \ + {{if op in ('Eq', 'Ne')}}{{'' if ret_type.is_pyobject else '__Pyx_PyObject_IsTrueAndDecref'}}(PyObject_RichCompare(op1, op2, Py_{{op.upper()}})) + {{elif op == 'Divide'}}((inplace ? __Pyx_PyNumber_InPlaceDivide(op1, op2) : __Pyx_PyNumber_Divide(op1, op2))) + {{else}}(inplace ? PyNumber_InPlace{{op}}(op1, op2) : PyNumber_{{op}}(op1, op2)) + {{endif}} +#endif + +/////////////// PyFloatBinop /////////////// + +#if !CYTHON_COMPILING_IN_PYPY +{{py: from Cython.Utility import pylong_join }} +{{py: c_ret_type = 'PyObject*' if ret_type.is_pyobject else 'int'}} +{{py: return_true = 'Py_RETURN_TRUE' if ret_type.is_pyobject else 'return 1'}} +{{py: return_false = 'Py_RETURN_FALSE' if ret_type.is_pyobject else 'return 0'}} +{{py: pyval, fval = ('op2', 'b') if order == 'CObj' else ('op1', 'a') }} +{{py: cfunc_name = '__Pyx_PyFloat_%s%s%s' % ('' if ret_type.is_pyobject else 'Bool', op, order) }} +{{py: zerodiv_check = lambda operand, _cfunc_name=cfunc_name: '%s_ZeroDivisionError(%s)' % (_cfunc_name, operand)}} +{{py: +c_op = { + 'Add': '+', 'Subtract': '-', 'TrueDivide': '/', 'Divide': '/', 'Remainder': '%', + 'Eq': '==', 'Ne': '!=', + }[op] +}} + +{{if order == 'CObj' and c_op in '%/'}} +#define {{zerodiv_check('operand')}} if (unlikely(zerodivision_check && ((operand) == 0))) { \ + PyErr_SetString(PyExc_ZeroDivisionError, "float division{{if op == 'Remainder'}} or modulo{{endif}} by zero"); \ + return NULL; \ +} +{{endif}} + +static {{c_ret_type}} {{cfunc_name}}(PyObject *op1, PyObject *op2, double floatval, int inplace, int zerodivision_check) { + const double {{'a' if order == 'CObj' else 'b'}} = floatval; + double {{fval}}{{if op not in ('Eq', 'Ne')}}, result{{endif}}; + // Prevent "unused" warnings. + (void)inplace; + (void)zerodivision_check; + + {{if op in ('Eq', 'Ne')}} + if (op1 == op2) { + {{return_true if op == 'Eq' else return_false}}; + } + {{endif}} + + if (likely(PyFloat_CheckExact({{pyval}}))) { + {{fval}} = PyFloat_AS_DOUBLE({{pyval}}); + {{if order == 'CObj' and c_op in '%/'}}{{zerodiv_check(fval)}}{{endif}} + } else + + #if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact({{pyval}}))) { + {{fval}} = (double) PyInt_AS_LONG({{pyval}}); + {{if order == 'CObj' and c_op in '%/'}}{{zerodiv_check(fval)}}{{endif}} + } else + #endif + + if (likely(PyLong_CheckExact({{pyval}}))) { + #if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*){{pyval}})->ob_digit; + const Py_ssize_t size = Py_SIZE({{pyval}}); + switch (size) { + case 0: {{if order == 'CObj' and c_op in '%/'}}{{zerodiv_check('0')}}{{else}}{{fval}} = 0.0;{{endif}} break; + case -1: {{fval}} = -(double) digits[0]; break; + case 1: {{fval}} = (double) digits[0]; break; + {{for _size in (2, 3, 4)}} + case -{{_size}}: + case {{_size}}: + if (8 * sizeof(unsigned long) > {{_size}} * PyLong_SHIFT && ((8 * sizeof(unsigned long) < 53) || ({{_size-1}} * PyLong_SHIFT < 53))) { + {{fval}} = (double) {{pylong_join(_size, 'digits')}}; + // let CPython do its own float rounding from 2**53 on (max. consecutive integer in double float) + if ((8 * sizeof(unsigned long) < 53) || ({{_size}} * PyLong_SHIFT < 53) || ({{fval}} < (double) ((PY_LONG_LONG)1 << 53))) { + if (size == {{-_size}}) + {{fval}} = -{{fval}}; + break; + } + } + // Fall through if size doesn't fit safely into a double anymore. + // It may not be obvious that this is a safe fall-through given the "fval < 2**53" + // check above. However, the number of digits that CPython uses for a given PyLong + // value is minimal, and together with the "(size-1) * SHIFT < 53" check above, + // this should make it safe. + CYTHON_FALLTHROUGH; + {{endfor}} + default: + #else + { + #endif + {{if op in ('Eq', 'Ne')}} + return {{'' if ret_type.is_pyobject else '__Pyx_PyObject_IsTrueAndDecref'}}( + PyFloat_Type.tp_richcompare({{'op1, op2' if order == 'CObj' else 'op2, op1'}}, Py_{{op.upper()}})); + {{else}} + {{fval}} = PyLong_AsDouble({{pyval}}); + if (unlikely({{fval}} == -1.0 && PyErr_Occurred())) return NULL; + {{if order == 'CObj' and c_op in '%/'}}{{zerodiv_check(fval)}}{{endif}} + {{endif}} + } + } else { + {{if op in ('Eq', 'Ne')}} + return {{'' if ret_type.is_pyobject else '__Pyx_PyObject_IsTrueAndDecref'}}( + PyObject_RichCompare(op1, op2, Py_{{op.upper()}})); + {{elif op == 'Divide'}} + return (inplace ? __Pyx_PyNumber_InPlaceDivide(op1, op2) : __Pyx_PyNumber_Divide(op1, op2)); + {{else}} + return (inplace ? PyNumber_InPlace{{op}} : PyNumber_{{op}})(op1, op2); + {{endif}} + } + + {{if op in ('Eq', 'Ne')}} + if (a {{c_op}} b) { + {{return_true}}; + } else { + {{return_false}}; + } + {{else}} + // copied from floatobject.c in Py3.5: + {{if order == 'CObj' and c_op in '%/'}}{{zerodiv_check('b')}}{{endif}} + PyFPE_START_PROTECT("{{op.lower() if not op.endswith('Divide') else 'divide'}}", return NULL) + {{if c_op == '%'}} + result = fmod(a, b); + if (result) + result += ((result < 0) ^ (b < 0)) * b; + else + result = copysign(0.0, b); + {{else}} + result = a {{c_op}} b; + {{endif}} + PyFPE_END_PROTECT(result) + return PyFloat_FromDouble(result); + {{endif}} +} +#endif diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/Overflow.c b/venv/lib/python3.8/site-packages/Cython/Utility/Overflow.c new file mode 100644 index 0000000..6dff81c --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/Overflow.c @@ -0,0 +1,311 @@ +/* +These functions provide integer arithmetic with integer checking. They do not +actually raise an exception when an overflow is detected, but rather set a bit +in the overflow parameter. (This parameter may be re-used across several +arithmetic operations, so should be or-ed rather than assigned to.) + +The implementation is divided into two parts, the signed and unsigned basecases, +which is where the magic happens, and a generic template matching a specific +type to an implementation based on its (c-compile-time) size and signedness. + +When possible, branching is avoided, and preference is given to speed over +accuracy (a low rate of falsely "detected" overflows are acceptable, +undetected overflows are not). + + +TODO: Hook up checking. +TODO: Conditionally support 128-bit with intmax_t? +*/ + +/////////////// Common.proto /////////////// + +static int __Pyx_check_twos_complement(void) { + if ((-1 != ~0)) { + PyErr_SetString(PyExc_RuntimeError, "Two's complement required for overflow checks."); + return 1; + } else if ((sizeof(short) == sizeof(int))) { + PyErr_SetString(PyExc_RuntimeError, "sizeof(short) < sizeof(int) required for overflow checks."); + return 1; + } else { + return 0; + } +} + +#define __PYX_IS_UNSIGNED(type) ((((type) -1) > 0)) +#define __PYX_SIGN_BIT(type) ((((unsigned type) 1) << (sizeof(type) * 8 - 1))) +#define __PYX_HALF_MAX(type) ((((type) 1) << (sizeof(type) * 8 - 2))) +#define __PYX_MIN(type) ((__PYX_IS_UNSIGNED(type) ? (type) 0 : 0 - __PYX_HALF_MAX(type) - __PYX_HALF_MAX(type))) +#define __PYX_MAX(type) ((~__PYX_MIN(type))) + +#define __Pyx_add_no_overflow(a, b, overflow) ((a) + (b)) +#define __Pyx_add_const_no_overflow(a, b, overflow) ((a) + (b)) +#define __Pyx_sub_no_overflow(a, b, overflow) ((a) - (b)) +#define __Pyx_sub_const_no_overflow(a, b, overflow) ((a) - (b)) +#define __Pyx_mul_no_overflow(a, b, overflow) ((a) * (b)) +#define __Pyx_mul_const_no_overflow(a, b, overflow) ((a) * (b)) +#define __Pyx_div_no_overflow(a, b, overflow) ((a) / (b)) +#define __Pyx_div_const_no_overflow(a, b, overflow) ((a) / (b)) + +/////////////// Common.init /////////////// +//@substitute: naming + +// FIXME: Propagate the error here instead of just printing it. +if (unlikely(__Pyx_check_twos_complement())) { + PyErr_WriteUnraisable($module_cname); +} + +/////////////// BaseCaseUnsigned.proto /////////////// + +static CYTHON_INLINE {{UINT}} __Pyx_add_{{NAME}}_checking_overflow({{UINT}} a, {{UINT}} b, int *overflow); +static CYTHON_INLINE {{UINT}} __Pyx_sub_{{NAME}}_checking_overflow({{UINT}} a, {{UINT}} b, int *overflow); +static CYTHON_INLINE {{UINT}} __Pyx_mul_{{NAME}}_checking_overflow({{UINT}} a, {{UINT}} b, int *overflow); +static CYTHON_INLINE {{UINT}} __Pyx_div_{{NAME}}_checking_overflow({{UINT}} a, {{UINT}} b, int *overflow); + +// Use these when b is known at compile time. +#define __Pyx_add_const_{{NAME}}_checking_overflow __Pyx_add_{{NAME}}_checking_overflow +#define __Pyx_sub_const_{{NAME}}_checking_overflow __Pyx_sub_{{NAME}}_checking_overflow +static CYTHON_INLINE {{UINT}} __Pyx_mul_const_{{NAME}}_checking_overflow({{UINT}} a, {{UINT}} constant, int *overflow); +#define __Pyx_div_const_{{NAME}}_checking_overflow __Pyx_div_{{NAME}}_checking_overflow + +/////////////// BaseCaseUnsigned /////////////// + +static CYTHON_INLINE {{UINT}} __Pyx_add_{{NAME}}_checking_overflow({{UINT}} a, {{UINT}} b, int *overflow) { + {{UINT}} r = a + b; + *overflow |= r < a; + return r; +} + +static CYTHON_INLINE {{UINT}} __Pyx_sub_{{NAME}}_checking_overflow({{UINT}} a, {{UINT}} b, int *overflow) { + {{UINT}} r = a - b; + *overflow |= r > a; + return r; +} + +static CYTHON_INLINE {{UINT}} __Pyx_mul_{{NAME}}_checking_overflow({{UINT}} a, {{UINT}} b, int *overflow) { + if ((sizeof({{UINT}}) < sizeof(unsigned long))) { + unsigned long big_r = ((unsigned long) a) * ((unsigned long) b); + {{UINT}} r = ({{UINT}}) big_r; + *overflow |= big_r != r; + return r; +#ifdef HAVE_LONG_LONG + } else if ((sizeof({{UINT}}) < sizeof(unsigned PY_LONG_LONG))) { + unsigned PY_LONG_LONG big_r = ((unsigned PY_LONG_LONG) a) * ((unsigned PY_LONG_LONG) b); + {{UINT}} r = ({{UINT}}) big_r; + *overflow |= big_r != r; + return r; +#endif + } else { + {{UINT}} prod = a * b; + double dprod = ((double) a) * ((double) b); + // Overflow results in an error of at least 2^sizeof(UINT), + // whereas rounding represents an error on the order of 2^(sizeof(UINT)-53). + *overflow |= fabs(dprod - prod) > (__PYX_MAX({{UINT}}) / 2); + return prod; + } +} + +static CYTHON_INLINE {{UINT}} __Pyx_mul_const_{{NAME}}_checking_overflow({{UINT}} a, {{UINT}} b, int *overflow) { + if (b > 1) { + *overflow |= a > __PYX_MAX({{UINT}}) / b; + } + return a * b; +} + + +static CYTHON_INLINE {{UINT}} __Pyx_div_{{NAME}}_checking_overflow({{UINT}} a, {{UINT}} b, int *overflow) { + if (b == 0) { + *overflow |= 1; + return 0; + } + return a / b; +} + + +/////////////// BaseCaseSigned.proto /////////////// + +static CYTHON_INLINE {{INT}} __Pyx_add_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow); +static CYTHON_INLINE {{INT}} __Pyx_sub_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow); +static CYTHON_INLINE {{INT}} __Pyx_mul_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow); +static CYTHON_INLINE {{INT}} __Pyx_div_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow); + + +// Use when b is known at compile time. +static CYTHON_INLINE {{INT}} __Pyx_add_const_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow); +static CYTHON_INLINE {{INT}} __Pyx_sub_const_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow); +static CYTHON_INLINE {{INT}} __Pyx_mul_const_{{NAME}}_checking_overflow({{INT}} a, {{INT}} constant, int *overflow); +#define __Pyx_div_const_{{NAME}}_checking_overflow __Pyx_div_{{NAME}}_checking_overflow + +/////////////// BaseCaseSigned /////////////// + +static CYTHON_INLINE {{INT}} __Pyx_add_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow) { + if ((sizeof({{INT}}) < sizeof(long))) { + long big_r = ((long) a) + ((long) b); + {{INT}} r = ({{INT}}) big_r; + *overflow |= big_r != r; + return r; +#ifdef HAVE_LONG_LONG + } else if ((sizeof({{INT}}) < sizeof(PY_LONG_LONG))) { + PY_LONG_LONG big_r = ((PY_LONG_LONG) a) + ((PY_LONG_LONG) b); + {{INT}} r = ({{INT}}) big_r; + *overflow |= big_r != r; + return r; +#endif + } else { + // Signed overflow undefined, but unsigned overflow is well defined. + {{INT}} r = ({{INT}}) ((unsigned {{INT}}) a + (unsigned {{INT}}) b); + // Overflow happened if the operands have the same sign, but the result + // has opposite sign. + // sign(a) == sign(b) != sign(r) + {{INT}} sign_a = __PYX_SIGN_BIT({{INT}}) & a; + {{INT}} sign_b = __PYX_SIGN_BIT({{INT}}) & b; + {{INT}} sign_r = __PYX_SIGN_BIT({{INT}}) & r; + *overflow |= (sign_a == sign_b) & (sign_a != sign_r); + return r; + } +} + +static CYTHON_INLINE {{INT}} __Pyx_add_const_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow) { + if (b > 0) { + *overflow |= a > __PYX_MAX({{INT}}) - b; + } else if (b < 0) { + *overflow |= a < __PYX_MIN({{INT}}) - b; + } + return a + b; +} + +static CYTHON_INLINE {{INT}} __Pyx_sub_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow) { + *overflow |= b == __PYX_MIN({{INT}}); + return __Pyx_add_{{NAME}}_checking_overflow(a, -b, overflow); +} + +static CYTHON_INLINE {{INT}} __Pyx_sub_const_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow) { + *overflow |= b == __PYX_MIN({{INT}}); + return __Pyx_add_const_{{NAME}}_checking_overflow(a, -b, overflow); +} + +static CYTHON_INLINE {{INT}} __Pyx_mul_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow) { + if ((sizeof({{INT}}) < sizeof(long))) { + long big_r = ((long) a) * ((long) b); + {{INT}} r = ({{INT}}) big_r; + *overflow |= big_r != r; + return ({{INT}}) r; +#ifdef HAVE_LONG_LONG + } else if ((sizeof({{INT}}) < sizeof(PY_LONG_LONG))) { + PY_LONG_LONG big_r = ((PY_LONG_LONG) a) * ((PY_LONG_LONG) b); + {{INT}} r = ({{INT}}) big_r; + *overflow |= big_r != r; + return ({{INT}}) r; +#endif + } else { + {{INT}} prod = a * b; + double dprod = ((double) a) * ((double) b); + // Overflow results in an error of at least 2^sizeof(INT), + // whereas rounding represents an error on the order of 2^(sizeof(INT)-53). + *overflow |= fabs(dprod - prod) > (__PYX_MAX({{INT}}) / 2); + return prod; + } +} + +static CYTHON_INLINE {{INT}} __Pyx_mul_const_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow) { + if (b > 1) { + *overflow |= a > __PYX_MAX({{INT}}) / b; + *overflow |= a < __PYX_MIN({{INT}}) / b; + } else if (b == -1) { + *overflow |= a == __PYX_MIN({{INT}}); + } else if (b < -1) { + *overflow |= a > __PYX_MIN({{INT}}) / b; + *overflow |= a < __PYX_MAX({{INT}}) / b; + } + return a * b; +} + +static CYTHON_INLINE {{INT}} __Pyx_div_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow) { + if (b == 0) { + *overflow |= 1; + return 0; + } + *overflow |= (a == __PYX_MIN({{INT}})) & (b == -1); + return a / b; +} + + +/////////////// SizeCheck.init /////////////// +//@substitute: naming + +// FIXME: Propagate the error here instead of just printing it. +if (unlikely(__Pyx_check_sane_{{NAME}}())) { + PyErr_WriteUnraisable($module_cname); +} + +/////////////// SizeCheck.proto /////////////// + +static int __Pyx_check_sane_{{NAME}}(void) { + if (((sizeof({{TYPE}}) <= sizeof(int)) || +#ifdef HAVE_LONG_LONG + (sizeof({{TYPE}}) == sizeof(PY_LONG_LONG)) || +#endif + (sizeof({{TYPE}}) == sizeof(long)))) { + return 0; + } else { + PyErr_Format(PyExc_RuntimeError, \ + "Bad size for int type %.{{max(60, len(TYPE))}}s: %d", "{{TYPE}}", (int) sizeof({{TYPE}})); + return 1; + } +} + + +/////////////// Binop.proto /////////////// + +static CYTHON_INLINE {{TYPE}} __Pyx_{{BINOP}}_{{NAME}}_checking_overflow({{TYPE}} a, {{TYPE}} b, int *overflow); + +/////////////// Binop /////////////// + +static CYTHON_INLINE {{TYPE}} __Pyx_{{BINOP}}_{{NAME}}_checking_overflow({{TYPE}} a, {{TYPE}} b, int *overflow) { + if ((sizeof({{TYPE}}) < sizeof(int))) { + return __Pyx_{{BINOP}}_no_overflow(a, b, overflow); + } else if (__PYX_IS_UNSIGNED({{TYPE}})) { + if ((sizeof({{TYPE}}) == sizeof(unsigned int))) { + return __Pyx_{{BINOP}}_unsigned_int_checking_overflow(a, b, overflow); + } else if ((sizeof({{TYPE}}) == sizeof(unsigned long))) { + return __Pyx_{{BINOP}}_unsigned_long_checking_overflow(a, b, overflow); +#ifdef HAVE_LONG_LONG + } else if ((sizeof({{TYPE}}) == sizeof(unsigned PY_LONG_LONG))) { + return __Pyx_{{BINOP}}_unsigned_long_long_checking_overflow(a, b, overflow); +#endif + } else { + abort(); return 0; /* handled elsewhere */ + } + } else { + if ((sizeof({{TYPE}}) == sizeof(int))) { + return __Pyx_{{BINOP}}_int_checking_overflow(a, b, overflow); + } else if ((sizeof({{TYPE}}) == sizeof(long))) { + return __Pyx_{{BINOP}}_long_checking_overflow(a, b, overflow); +#ifdef HAVE_LONG_LONG + } else if ((sizeof({{TYPE}}) == sizeof(PY_LONG_LONG))) { + return __Pyx_{{BINOP}}_long_long_checking_overflow(a, b, overflow); +#endif + } else { + abort(); return 0; /* handled elsewhere */ + } + } +} + +/////////////// LeftShift.proto /////////////// + +static CYTHON_INLINE {{TYPE}} __Pyx_lshift_{{NAME}}_checking_overflow({{TYPE}} a, {{TYPE}} b, int *overflow) { + *overflow |= +#if {{SIGNED}} + (b < 0) | +#endif + (b > ({{TYPE}}) (8 * sizeof({{TYPE}}))) | (a > (__PYX_MAX({{TYPE}}) >> b)); + return a << b; +} +#define __Pyx_lshift_const_{{NAME}}_checking_overflow __Pyx_lshift_{{NAME}}_checking_overflow + + +/////////////// UnaryNegOverflows.proto /////////////// + +//FIXME: shouldn't the macro name be prefixed by "__Pyx_" ? Too late now, I guess... +// from intobject.c +#define UNARY_NEG_WOULD_OVERFLOW(x) \ + (((x) < 0) & ((unsigned long)(x) == 0-(unsigned long)(x))) diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/Printing.c b/venv/lib/python3.8/site-packages/Cython/Utility/Printing.c new file mode 100644 index 0000000..71aa7ea --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/Printing.c @@ -0,0 +1,176 @@ +////////////////////// Print.proto ////////////////////// +//@substitute: naming + +static int __Pyx_Print(PyObject*, PyObject *, int); /*proto*/ +#if CYTHON_COMPILING_IN_PYPY || PY_MAJOR_VERSION >= 3 +static PyObject* $print_function = 0; +static PyObject* $print_function_kwargs = 0; +#endif + +////////////////////// Print.cleanup ////////////////////// +//@substitute: naming + +#if CYTHON_COMPILING_IN_PYPY || PY_MAJOR_VERSION >= 3 +Py_CLEAR($print_function); +Py_CLEAR($print_function_kwargs); +#endif + +////////////////////// Print ////////////////////// +//@substitute: naming + +#if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION < 3 +static PyObject *__Pyx_GetStdout(void) { + PyObject *f = PySys_GetObject((char *)"stdout"); + if (!f) { + PyErr_SetString(PyExc_RuntimeError, "lost sys.stdout"); + } + return f; +} + +static int __Pyx_Print(PyObject* f, PyObject *arg_tuple, int newline) { + int i; + + if (!f) { + if (!(f = __Pyx_GetStdout())) + return -1; + } + Py_INCREF(f); + for (i=0; i < PyTuple_GET_SIZE(arg_tuple); i++) { + PyObject* v; + if (PyFile_SoftSpace(f, 1)) { + if (PyFile_WriteString(" ", f) < 0) + goto error; + } + v = PyTuple_GET_ITEM(arg_tuple, i); + if (PyFile_WriteObject(v, f, Py_PRINT_RAW) < 0) + goto error; + if (PyString_Check(v)) { + char *s = PyString_AsString(v); + Py_ssize_t len = PyString_Size(v); + if (len > 0) { + // append soft-space if necessary (not using isspace() due to C/C++ problem on MacOS-X) + switch (s[len-1]) { + case ' ': break; + case '\f': case '\r': case '\n': case '\t': case '\v': + PyFile_SoftSpace(f, 0); + break; + default: break; + } + } + } + } + if (newline) { + if (PyFile_WriteString("\n", f) < 0) + goto error; + PyFile_SoftSpace(f, 0); + } + Py_DECREF(f); + return 0; +error: + Py_DECREF(f); + return -1; +} + +#else /* Python 3 has a print function */ + +static int __Pyx_Print(PyObject* stream, PyObject *arg_tuple, int newline) { + PyObject* kwargs = 0; + PyObject* result = 0; + PyObject* end_string; + if (unlikely(!$print_function)) { + $print_function = PyObject_GetAttr($builtins_cname, PYIDENT("print")); + if (!$print_function) + return -1; + } + if (stream) { + kwargs = PyDict_New(); + if (unlikely(!kwargs)) + return -1; + if (unlikely(PyDict_SetItem(kwargs, PYIDENT("file"), stream) < 0)) + goto bad; + if (!newline) { + end_string = PyUnicode_FromStringAndSize(" ", 1); + if (unlikely(!end_string)) + goto bad; + if (PyDict_SetItem(kwargs, PYIDENT("end"), end_string) < 0) { + Py_DECREF(end_string); + goto bad; + } + Py_DECREF(end_string); + } + } else if (!newline) { + if (unlikely(!$print_function_kwargs)) { + $print_function_kwargs = PyDict_New(); + if (unlikely(!$print_function_kwargs)) + return -1; + end_string = PyUnicode_FromStringAndSize(" ", 1); + if (unlikely(!end_string)) + return -1; + if (PyDict_SetItem($print_function_kwargs, PYIDENT("end"), end_string) < 0) { + Py_DECREF(end_string); + return -1; + } + Py_DECREF(end_string); + } + kwargs = $print_function_kwargs; + } + result = PyObject_Call($print_function, arg_tuple, kwargs); + if (unlikely(kwargs) && (kwargs != $print_function_kwargs)) + Py_DECREF(kwargs); + if (!result) + return -1; + Py_DECREF(result); + return 0; +bad: + if (kwargs != $print_function_kwargs) + Py_XDECREF(kwargs); + return -1; +} +#endif + +////////////////////// PrintOne.proto ////////////////////// +//@requires: Print + +static int __Pyx_PrintOne(PyObject* stream, PyObject *o); /*proto*/ + +////////////////////// PrintOne ////////////////////// + +#if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION < 3 + +static int __Pyx_PrintOne(PyObject* f, PyObject *o) { + if (!f) { + if (!(f = __Pyx_GetStdout())) + return -1; + } + Py_INCREF(f); + if (PyFile_SoftSpace(f, 0)) { + if (PyFile_WriteString(" ", f) < 0) + goto error; + } + if (PyFile_WriteObject(o, f, Py_PRINT_RAW) < 0) + goto error; + if (PyFile_WriteString("\n", f) < 0) + goto error; + Py_DECREF(f); + return 0; +error: + Py_DECREF(f); + return -1; + /* the line below is just to avoid C compiler + * warnings about unused functions */ + return __Pyx_Print(f, NULL, 0); +} + +#else /* Python 3 has a print function */ + +static int __Pyx_PrintOne(PyObject* stream, PyObject *o) { + int res; + PyObject* arg_tuple = PyTuple_Pack(1, o); + if (unlikely(!arg_tuple)) + return -1; + res = __Pyx_Print(stream, arg_tuple, 1); + Py_DECREF(arg_tuple); + return res; +} + +#endif diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/Profile.c b/venv/lib/python3.8/site-packages/Cython/Utility/Profile.c new file mode 100644 index 0000000..5f2de7e --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/Profile.c @@ -0,0 +1,335 @@ +/////////////// Profile.proto /////////////// +//@requires: Exceptions.c::PyErrFetchRestore +//@substitute: naming + +// Note that cPython ignores PyTrace_EXCEPTION, +// but maybe some other profilers don't. + +#ifndef CYTHON_PROFILE +#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_PYSTON + #define CYTHON_PROFILE 0 +#else + #define CYTHON_PROFILE 1 +#endif +#endif + +#ifndef CYTHON_TRACE_NOGIL + #define CYTHON_TRACE_NOGIL 0 +#else + #if CYTHON_TRACE_NOGIL && !defined(CYTHON_TRACE) + #define CYTHON_TRACE 1 + #endif +#endif + +#ifndef CYTHON_TRACE + #define CYTHON_TRACE 0 +#endif + +#if CYTHON_TRACE + #undef CYTHON_PROFILE_REUSE_FRAME +#endif + +#ifndef CYTHON_PROFILE_REUSE_FRAME + #define CYTHON_PROFILE_REUSE_FRAME 0 +#endif + +#if CYTHON_PROFILE || CYTHON_TRACE + + #include "compile.h" + #include "frameobject.h" + #include "traceback.h" + + #if CYTHON_PROFILE_REUSE_FRAME + #define CYTHON_FRAME_MODIFIER static + #define CYTHON_FRAME_DEL(frame) + #else + #define CYTHON_FRAME_MODIFIER + #define CYTHON_FRAME_DEL(frame) Py_CLEAR(frame) + #endif + + #define __Pyx_TraceDeclarations \ + static PyCodeObject *$frame_code_cname = NULL; \ + CYTHON_FRAME_MODIFIER PyFrameObject *$frame_cname = NULL; \ + int __Pyx_use_tracing = 0; + + #define __Pyx_TraceFrameInit(codeobj) \ + if (codeobj) $frame_code_cname = (PyCodeObject*) codeobj; + + #ifdef WITH_THREAD + #define __Pyx_TraceCall(funcname, srcfile, firstlineno, nogil, goto_error) \ + if (nogil) { \ + if (CYTHON_TRACE_NOGIL) { \ + PyThreadState *tstate; \ + PyGILState_STATE state = PyGILState_Ensure(); \ + tstate = __Pyx_PyThreadState_Current; \ + if (unlikely(tstate->use_tracing) && !tstate->tracing && \ + (tstate->c_profilefunc || (CYTHON_TRACE && tstate->c_tracefunc))) { \ + __Pyx_use_tracing = __Pyx_TraceSetupAndCall(&$frame_code_cname, &$frame_cname, tstate, funcname, srcfile, firstlineno); \ + } \ + PyGILState_Release(state); \ + if (unlikely(__Pyx_use_tracing < 0)) goto_error; \ + } \ + } else { \ + PyThreadState* tstate = PyThreadState_GET(); \ + if (unlikely(tstate->use_tracing) && !tstate->tracing && \ + (tstate->c_profilefunc || (CYTHON_TRACE && tstate->c_tracefunc))) { \ + __Pyx_use_tracing = __Pyx_TraceSetupAndCall(&$frame_code_cname, &$frame_cname, tstate, funcname, srcfile, firstlineno); \ + if (unlikely(__Pyx_use_tracing < 0)) goto_error; \ + } \ + } + #else + #define __Pyx_TraceCall(funcname, srcfile, firstlineno, nogil, goto_error) \ + { PyThreadState* tstate = PyThreadState_GET(); \ + if (unlikely(tstate->use_tracing) && !tstate->tracing && \ + (tstate->c_profilefunc || (CYTHON_TRACE && tstate->c_tracefunc))) { \ + __Pyx_use_tracing = __Pyx_TraceSetupAndCall(&$frame_code_cname, &$frame_cname, tstate, funcname, srcfile, firstlineno); \ + if (unlikely(__Pyx_use_tracing < 0)) goto_error; \ + } \ + } + #endif + + #define __Pyx_TraceException() \ + if (likely(!__Pyx_use_tracing)); else { \ + PyThreadState* tstate = __Pyx_PyThreadState_Current; \ + if (tstate->use_tracing && \ + (tstate->c_profilefunc || (CYTHON_TRACE && tstate->c_tracefunc))) { \ + tstate->tracing++; \ + tstate->use_tracing = 0; \ + PyObject *exc_info = __Pyx_GetExceptionTuple(tstate); \ + if (exc_info) { \ + if (CYTHON_TRACE && tstate->c_tracefunc) \ + tstate->c_tracefunc( \ + tstate->c_traceobj, $frame_cname, PyTrace_EXCEPTION, exc_info); \ + tstate->c_profilefunc( \ + tstate->c_profileobj, $frame_cname, PyTrace_EXCEPTION, exc_info); \ + Py_DECREF(exc_info); \ + } \ + tstate->use_tracing = 1; \ + tstate->tracing--; \ + } \ + } + + static void __Pyx_call_return_trace_func(PyThreadState *tstate, PyFrameObject *frame, PyObject *result) { + PyObject *type, *value, *traceback; + __Pyx_ErrFetchInState(tstate, &type, &value, &traceback); + tstate->tracing++; + tstate->use_tracing = 0; + if (CYTHON_TRACE && tstate->c_tracefunc) + tstate->c_tracefunc(tstate->c_traceobj, frame, PyTrace_RETURN, result); + if (tstate->c_profilefunc) + tstate->c_profilefunc(tstate->c_profileobj, frame, PyTrace_RETURN, result); + CYTHON_FRAME_DEL(frame); + tstate->use_tracing = 1; + tstate->tracing--; + __Pyx_ErrRestoreInState(tstate, type, value, traceback); + } + + #ifdef WITH_THREAD + #define __Pyx_TraceReturn(result, nogil) \ + if (likely(!__Pyx_use_tracing)); else { \ + if (nogil) { \ + if (CYTHON_TRACE_NOGIL) { \ + PyThreadState *tstate; \ + PyGILState_STATE state = PyGILState_Ensure(); \ + tstate = __Pyx_PyThreadState_Current; \ + if (tstate->use_tracing) { \ + __Pyx_call_return_trace_func(tstate, $frame_cname, (PyObject*)result); \ + } \ + PyGILState_Release(state); \ + } \ + } else { \ + PyThreadState* tstate = __Pyx_PyThreadState_Current; \ + if (tstate->use_tracing) { \ + __Pyx_call_return_trace_func(tstate, $frame_cname, (PyObject*)result); \ + } \ + } \ + } + #else + #define __Pyx_TraceReturn(result, nogil) \ + if (likely(!__Pyx_use_tracing)); else { \ + PyThreadState* tstate = __Pyx_PyThreadState_Current; \ + if (tstate->use_tracing) { \ + __Pyx_call_return_trace_func(tstate, $frame_cname, (PyObject*)result); \ + } \ + } + #endif + + static PyCodeObject *__Pyx_createFrameCodeObject(const char *funcname, const char *srcfile, int firstlineno); /*proto*/ + static int __Pyx_TraceSetupAndCall(PyCodeObject** code, PyFrameObject** frame, PyThreadState* tstate, const char *funcname, const char *srcfile, int firstlineno); /*proto*/ + +#else + + #define __Pyx_TraceDeclarations + #define __Pyx_TraceFrameInit(codeobj) + // mark error label as used to avoid compiler warnings + #define __Pyx_TraceCall(funcname, srcfile, firstlineno, nogil, goto_error) if ((1)); else goto_error; + #define __Pyx_TraceException() + #define __Pyx_TraceReturn(result, nogil) + +#endif /* CYTHON_PROFILE */ + +#if CYTHON_TRACE + // see call_trace_protected() in CPython's ceval.c + static int __Pyx_call_line_trace_func(PyThreadState *tstate, PyFrameObject *frame, int lineno) { + int ret; + PyObject *type, *value, *traceback; + __Pyx_ErrFetchInState(tstate, &type, &value, &traceback); + __Pyx_PyFrame_SetLineNumber(frame, lineno); + tstate->tracing++; + tstate->use_tracing = 0; + ret = tstate->c_tracefunc(tstate->c_traceobj, frame, PyTrace_LINE, NULL); + tstate->use_tracing = 1; + tstate->tracing--; + if (likely(!ret)) { + __Pyx_ErrRestoreInState(tstate, type, value, traceback); + } else { + Py_XDECREF(type); + Py_XDECREF(value); + Py_XDECREF(traceback); + } + return ret; + } + + #ifdef WITH_THREAD + #define __Pyx_TraceLine(lineno, nogil, goto_error) \ + if (likely(!__Pyx_use_tracing)); else { \ + if (nogil) { \ + if (CYTHON_TRACE_NOGIL) { \ + int ret = 0; \ + PyThreadState *tstate; \ + PyGILState_STATE state = PyGILState_Ensure(); \ + tstate = __Pyx_PyThreadState_Current; \ + if (unlikely(tstate->use_tracing && tstate->c_tracefunc && $frame_cname->f_trace)) { \ + ret = __Pyx_call_line_trace_func(tstate, $frame_cname, lineno); \ + } \ + PyGILState_Release(state); \ + if (unlikely(ret)) goto_error; \ + } \ + } else { \ + PyThreadState* tstate = __Pyx_PyThreadState_Current; \ + if (unlikely(tstate->use_tracing && tstate->c_tracefunc && $frame_cname->f_trace)) { \ + int ret = __Pyx_call_line_trace_func(tstate, $frame_cname, lineno); \ + if (unlikely(ret)) goto_error; \ + } \ + } \ + } + #else + #define __Pyx_TraceLine(lineno, nogil, goto_error) \ + if (likely(!__Pyx_use_tracing)); else { \ + PyThreadState* tstate = __Pyx_PyThreadState_Current; \ + if (unlikely(tstate->use_tracing && tstate->c_tracefunc && $frame_cname->f_trace)) { \ + int ret = __Pyx_call_line_trace_func(tstate, $frame_cname, lineno); \ + if (unlikely(ret)) goto_error; \ + } \ + } + #endif +#else + // mark error label as used to avoid compiler warnings + #define __Pyx_TraceLine(lineno, nogil, goto_error) if ((1)); else goto_error; +#endif + +/////////////// Profile /////////////// +//@substitute: naming + +#if CYTHON_PROFILE + +static int __Pyx_TraceSetupAndCall(PyCodeObject** code, + PyFrameObject** frame, + PyThreadState* tstate, + const char *funcname, + const char *srcfile, + int firstlineno) { + PyObject *type, *value, *traceback; + int retval; + if (*frame == NULL || !CYTHON_PROFILE_REUSE_FRAME) { + if (*code == NULL) { + *code = __Pyx_createFrameCodeObject(funcname, srcfile, firstlineno); + if (*code == NULL) return 0; + } + *frame = PyFrame_New( + tstate, /*PyThreadState *tstate*/ + *code, /*PyCodeObject *code*/ + $moddict_cname, /*PyObject *globals*/ + 0 /*PyObject *locals*/ + ); + if (*frame == NULL) return 0; + if (CYTHON_TRACE && (*frame)->f_trace == NULL) { + // this enables "f_lineno" lookup, at least in CPython ... + Py_INCREF(Py_None); + (*frame)->f_trace = Py_None; + } +#if PY_VERSION_HEX < 0x030400B1 + } else { + (*frame)->f_tstate = tstate; +#endif + } + __Pyx_PyFrame_SetLineNumber(*frame, firstlineno); + retval = 1; + tstate->tracing++; + tstate->use_tracing = 0; + __Pyx_ErrFetchInState(tstate, &type, &value, &traceback); + #if CYTHON_TRACE + if (tstate->c_tracefunc) + retval = tstate->c_tracefunc(tstate->c_traceobj, *frame, PyTrace_CALL, NULL) == 0; + if (retval && tstate->c_profilefunc) + #endif + retval = tstate->c_profilefunc(tstate->c_profileobj, *frame, PyTrace_CALL, NULL) == 0; + tstate->use_tracing = (tstate->c_profilefunc || + (CYTHON_TRACE && tstate->c_tracefunc)); + tstate->tracing--; + if (retval) { + __Pyx_ErrRestoreInState(tstate, type, value, traceback); + return tstate->use_tracing && retval; + } else { + Py_XDECREF(type); + Py_XDECREF(value); + Py_XDECREF(traceback); + return -1; + } +} + +static PyCodeObject *__Pyx_createFrameCodeObject(const char *funcname, const char *srcfile, int firstlineno) { + PyCodeObject *py_code = 0; + +#if PY_MAJOR_VERSION >= 3 + py_code = PyCode_NewEmpty(srcfile, funcname, firstlineno); + // make CPython use a fresh dict for "f_locals" at need (see GH #1836) + if (likely(py_code)) { + py_code->co_flags |= CO_OPTIMIZED | CO_NEWLOCALS; + } +#else + PyObject *py_srcfile = 0; + PyObject *py_funcname = 0; + + py_funcname = PyString_FromString(funcname); + if (unlikely(!py_funcname)) goto bad; + py_srcfile = PyString_FromString(srcfile); + if (unlikely(!py_srcfile)) goto bad; + + py_code = PyCode_New( + 0, /*int argcount,*/ + 0, /*int nlocals,*/ + 0, /*int stacksize,*/ + // make CPython use a fresh dict for "f_locals" at need (see GH #1836) + CO_OPTIMIZED | CO_NEWLOCALS, /*int flags,*/ + $empty_bytes, /*PyObject *code,*/ + $empty_tuple, /*PyObject *consts,*/ + $empty_tuple, /*PyObject *names,*/ + $empty_tuple, /*PyObject *varnames,*/ + $empty_tuple, /*PyObject *freevars,*/ + $empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + firstlineno, /*int firstlineno,*/ + $empty_bytes /*PyObject *lnotab*/ + ); + +bad: + Py_XDECREF(py_srcfile); + Py_XDECREF(py_funcname); +#endif + + return py_code; +} + +#endif /* CYTHON_PROFILE */ diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/StringTools.c b/venv/lib/python3.8/site-packages/Cython/Utility/StringTools.c new file mode 100644 index 0000000..68315d9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/StringTools.c @@ -0,0 +1,1164 @@ + +//////////////////// IncludeStringH.proto //////////////////// + +#include + +//////////////////// IncludeCppStringH.proto //////////////////// + +#include + +//////////////////// InitStrings.proto //////////////////// + +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /*proto*/ + +//////////////////// InitStrings //////////////////// + +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { + while (t->p) { + #if PY_MAJOR_VERSION < 3 + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + #else /* Python 3+ has unicode identifiers */ + if (t->is_unicode | t->is_str) { + if (t->intern) { + *t->p = PyUnicode_InternFromString(t->s); + } else if (t->encoding) { + *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); + } else { + *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); + } + } else { + *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); + } + #endif + if (!*t->p) + return -1; + // initialise cached hash value + if (PyObject_Hash(*t->p) == -1) + return -1; + ++t; + } + return 0; +} + +//////////////////// BytesContains.proto //////////////////// + +static CYTHON_INLINE int __Pyx_BytesContains(PyObject* bytes, char character); /*proto*/ + +//////////////////// BytesContains //////////////////// +//@requires: IncludeStringH + +static CYTHON_INLINE int __Pyx_BytesContains(PyObject* bytes, char character) { + const Py_ssize_t length = PyBytes_GET_SIZE(bytes); + char* char_start = PyBytes_AS_STRING(bytes); + return memchr(char_start, (unsigned char)character, (size_t)length) != NULL; +} + + +//////////////////// PyUCS4InUnicode.proto //////////////////// + +static CYTHON_INLINE int __Pyx_UnicodeContainsUCS4(PyObject* unicode, Py_UCS4 character); /*proto*/ + +//////////////////// PyUCS4InUnicode //////////////////// + +static int __Pyx_PyUnicodeBufferContainsUCS4_SP(Py_UNICODE* buffer, Py_ssize_t length, Py_UCS4 character) { + /* handle surrogate pairs for Py_UNICODE buffers in 16bit Unicode builds */ + Py_UNICODE high_val, low_val; + Py_UNICODE* pos; + high_val = (Py_UNICODE) (0xD800 | (((character - 0x10000) >> 10) & ((1<<10)-1))); + low_val = (Py_UNICODE) (0xDC00 | ( (character - 0x10000) & ((1<<10)-1))); + for (pos=buffer; pos < buffer+length-1; pos++) { + if (unlikely((high_val == pos[0]) & (low_val == pos[1]))) return 1; + } + return 0; +} + +static int __Pyx_PyUnicodeBufferContainsUCS4_BMP(Py_UNICODE* buffer, Py_ssize_t length, Py_UCS4 character) { + Py_UNICODE uchar; + Py_UNICODE* pos; + uchar = (Py_UNICODE) character; + for (pos=buffer; pos < buffer+length; pos++) { + if (unlikely(uchar == pos[0])) return 1; + } + return 0; +} + +static CYTHON_INLINE int __Pyx_UnicodeContainsUCS4(PyObject* unicode, Py_UCS4 character) { +#if CYTHON_PEP393_ENABLED + const int kind = PyUnicode_KIND(unicode); + if (likely(kind != PyUnicode_WCHAR_KIND)) { + Py_ssize_t i; + const void* udata = PyUnicode_DATA(unicode); + const Py_ssize_t length = PyUnicode_GET_LENGTH(unicode); + for (i=0; i < length; i++) { + if (unlikely(character == PyUnicode_READ(kind, udata, i))) return 1; + } + return 0; + } +#endif + if (Py_UNICODE_SIZE == 2 && unlikely(character > 65535)) { + return __Pyx_PyUnicodeBufferContainsUCS4_SP( + PyUnicode_AS_UNICODE(unicode), + PyUnicode_GET_SIZE(unicode), + character); + } else { + return __Pyx_PyUnicodeBufferContainsUCS4_BMP( + PyUnicode_AS_UNICODE(unicode), + PyUnicode_GET_SIZE(unicode), + character); + + } +} + + +//////////////////// PyUnicodeContains.proto //////////////////// + +static CYTHON_INLINE int __Pyx_PyUnicode_ContainsTF(PyObject* substring, PyObject* text, int eq) { + int result = PyUnicode_Contains(text, substring); + return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); +} + + +//////////////////// CStringEquals.proto //////////////////// + +static CYTHON_INLINE int __Pyx_StrEq(const char *, const char *); /*proto*/ + +//////////////////// CStringEquals //////////////////// + +static CYTHON_INLINE int __Pyx_StrEq(const char *s1, const char *s2) { + while (*s1 != '\0' && *s1 == *s2) { s1++; s2++; } + return *s1 == *s2; +} + + +//////////////////// StrEquals.proto //////////////////// +//@requires: BytesEquals +//@requires: UnicodeEquals + +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyString_Equals __Pyx_PyUnicode_Equals +#else +#define __Pyx_PyString_Equals __Pyx_PyBytes_Equals +#endif + + +//////////////////// UnicodeEquals.proto //////////////////// + +static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); /*proto*/ + +//////////////////// UnicodeEquals //////////////////// +//@requires: BytesEquals + +static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) { +#if CYTHON_COMPILING_IN_PYPY + return PyObject_RichCompareBool(s1, s2, equals); +#else +#if PY_MAJOR_VERSION < 3 + PyObject* owned_ref = NULL; +#endif + int s1_is_unicode, s2_is_unicode; + if (s1 == s2) { + /* as done by PyObject_RichCompareBool(); also catches the (interned) empty string */ + goto return_eq; + } + s1_is_unicode = PyUnicode_CheckExact(s1); + s2_is_unicode = PyUnicode_CheckExact(s2); +#if PY_MAJOR_VERSION < 3 + if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) { + owned_ref = PyUnicode_FromObject(s2); + if (unlikely(!owned_ref)) + return -1; + s2 = owned_ref; + s2_is_unicode = 1; + } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) { + owned_ref = PyUnicode_FromObject(s1); + if (unlikely(!owned_ref)) + return -1; + s1 = owned_ref; + s1_is_unicode = 1; + } else if (((!s2_is_unicode) & (!s1_is_unicode))) { + return __Pyx_PyBytes_Equals(s1, s2, equals); + } +#endif + if (s1_is_unicode & s2_is_unicode) { + Py_ssize_t length; + int kind; + void *data1, *data2; + if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0)) + return -1; + length = __Pyx_PyUnicode_GET_LENGTH(s1); + if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) { + goto return_ne; + } +#if CYTHON_USE_UNICODE_INTERNALS + { + Py_hash_t hash1, hash2; + #if CYTHON_PEP393_ENABLED + hash1 = ((PyASCIIObject*)s1)->hash; + hash2 = ((PyASCIIObject*)s2)->hash; + #else + hash1 = ((PyUnicodeObject*)s1)->hash; + hash2 = ((PyUnicodeObject*)s2)->hash; + #endif + if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { + goto return_ne; + } + } +#endif + // len(s1) == len(s2) >= 1 (empty string is interned, and "s1 is not s2") + kind = __Pyx_PyUnicode_KIND(s1); + if (kind != __Pyx_PyUnicode_KIND(s2)) { + goto return_ne; + } + data1 = __Pyx_PyUnicode_DATA(s1); + data2 = __Pyx_PyUnicode_DATA(s2); + if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) { + goto return_ne; + } else if (length == 1) { + goto return_eq; + } else { + int result = memcmp(data1, data2, (size_t)(length * kind)); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_EQ) ? (result == 0) : (result != 0); + } + } else if ((s1 == Py_None) & s2_is_unicode) { + goto return_ne; + } else if ((s2 == Py_None) & s1_is_unicode) { + goto return_ne; + } else { + int result; + PyObject* py_result = PyObject_RichCompare(s1, s2, equals); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + if (!py_result) + return -1; + result = __Pyx_PyObject_IsTrue(py_result); + Py_DECREF(py_result); + return result; + } +return_eq: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_EQ); +return_ne: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_NE); +#endif +} + + +//////////////////// BytesEquals.proto //////////////////// + +static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals); /*proto*/ + +//////////////////// BytesEquals //////////////////// +//@requires: IncludeStringH + +static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) { +#if CYTHON_COMPILING_IN_PYPY + return PyObject_RichCompareBool(s1, s2, equals); +#else + if (s1 == s2) { + /* as done by PyObject_RichCompareBool(); also catches the (interned) empty string */ + return (equals == Py_EQ); + } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) { + const char *ps1, *ps2; + Py_ssize_t length = PyBytes_GET_SIZE(s1); + if (length != PyBytes_GET_SIZE(s2)) + return (equals == Py_NE); + // len(s1) == len(s2) >= 1 (empty string is interned, and "s1 is not s2") + ps1 = PyBytes_AS_STRING(s1); + ps2 = PyBytes_AS_STRING(s2); + if (ps1[0] != ps2[0]) { + return (equals == Py_NE); + } else if (length == 1) { + return (equals == Py_EQ); + } else { + int result; +#if CYTHON_USE_UNICODE_INTERNALS + Py_hash_t hash1, hash2; + hash1 = ((PyBytesObject*)s1)->ob_shash; + hash2 = ((PyBytesObject*)s2)->ob_shash; + if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { + return (equals == Py_NE); + } +#endif + result = memcmp(ps1, ps2, (size_t)length); + return (equals == Py_EQ) ? (result == 0) : (result != 0); + } + } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) { + return (equals == Py_NE); + } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) { + return (equals == Py_NE); + } else { + int result; + PyObject* py_result = PyObject_RichCompare(s1, s2, equals); + if (!py_result) + return -1; + result = __Pyx_PyObject_IsTrue(py_result); + Py_DECREF(py_result); + return result; + } +#endif +} + +//////////////////// GetItemIntByteArray.proto //////////////////// + +#define __Pyx_GetItemInt_ByteArray(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck) \ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ? \ + __Pyx_GetItemInt_ByteArray_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) : \ + (PyErr_SetString(PyExc_IndexError, "bytearray index out of range"), -1)) + +static CYTHON_INLINE int __Pyx_GetItemInt_ByteArray_Fast(PyObject* string, Py_ssize_t i, + int wraparound, int boundscheck); + +//////////////////// GetItemIntByteArray //////////////////// + +static CYTHON_INLINE int __Pyx_GetItemInt_ByteArray_Fast(PyObject* string, Py_ssize_t i, + int wraparound, int boundscheck) { + Py_ssize_t length; + if (wraparound | boundscheck) { + length = PyByteArray_GET_SIZE(string); + if (wraparound & unlikely(i < 0)) i += length; + if ((!boundscheck) || likely(__Pyx_is_valid_index(i, length))) { + return (unsigned char) (PyByteArray_AS_STRING(string)[i]); + } else { + PyErr_SetString(PyExc_IndexError, "bytearray index out of range"); + return -1; + } + } else { + return (unsigned char) (PyByteArray_AS_STRING(string)[i]); + } +} + + +//////////////////// SetItemIntByteArray.proto //////////////////// + +#define __Pyx_SetItemInt_ByteArray(o, i, v, type, is_signed, to_py_func, is_list, wraparound, boundscheck) \ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ? \ + __Pyx_SetItemInt_ByteArray_Fast(o, (Py_ssize_t)i, v, wraparound, boundscheck) : \ + (PyErr_SetString(PyExc_IndexError, "bytearray index out of range"), -1)) + +static CYTHON_INLINE int __Pyx_SetItemInt_ByteArray_Fast(PyObject* string, Py_ssize_t i, unsigned char v, + int wraparound, int boundscheck); + +//////////////////// SetItemIntByteArray //////////////////// + +static CYTHON_INLINE int __Pyx_SetItemInt_ByteArray_Fast(PyObject* string, Py_ssize_t i, unsigned char v, + int wraparound, int boundscheck) { + Py_ssize_t length; + if (wraparound | boundscheck) { + length = PyByteArray_GET_SIZE(string); + if (wraparound & unlikely(i < 0)) i += length; + if ((!boundscheck) || likely(__Pyx_is_valid_index(i, length))) { + PyByteArray_AS_STRING(string)[i] = (char) v; + return 0; + } else { + PyErr_SetString(PyExc_IndexError, "bytearray index out of range"); + return -1; + } + } else { + PyByteArray_AS_STRING(string)[i] = (char) v; + return 0; + } +} + + +//////////////////// GetItemIntUnicode.proto //////////////////// + +#define __Pyx_GetItemInt_Unicode(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck) \ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ? \ + __Pyx_GetItemInt_Unicode_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) : \ + (PyErr_SetString(PyExc_IndexError, "string index out of range"), (Py_UCS4)-1)) + +static CYTHON_INLINE Py_UCS4 __Pyx_GetItemInt_Unicode_Fast(PyObject* ustring, Py_ssize_t i, + int wraparound, int boundscheck); + +//////////////////// GetItemIntUnicode //////////////////// + +static CYTHON_INLINE Py_UCS4 __Pyx_GetItemInt_Unicode_Fast(PyObject* ustring, Py_ssize_t i, + int wraparound, int boundscheck) { + Py_ssize_t length; + if (unlikely(__Pyx_PyUnicode_READY(ustring) < 0)) return (Py_UCS4)-1; + if (wraparound | boundscheck) { + length = __Pyx_PyUnicode_GET_LENGTH(ustring); + if (wraparound & unlikely(i < 0)) i += length; + if ((!boundscheck) || likely(__Pyx_is_valid_index(i, length))) { + return __Pyx_PyUnicode_READ_CHAR(ustring, i); + } else { + PyErr_SetString(PyExc_IndexError, "string index out of range"); + return (Py_UCS4)-1; + } + } else { + return __Pyx_PyUnicode_READ_CHAR(ustring, i); + } +} + + +/////////////// decode_c_string_utf16.proto /////////////// + +static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16(const char *s, Py_ssize_t size, const char *errors) { + int byteorder = 0; + return PyUnicode_DecodeUTF16(s, size, errors, &byteorder); +} +static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16LE(const char *s, Py_ssize_t size, const char *errors) { + int byteorder = -1; + return PyUnicode_DecodeUTF16(s, size, errors, &byteorder); +} +static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16BE(const char *s, Py_ssize_t size, const char *errors) { + int byteorder = 1; + return PyUnicode_DecodeUTF16(s, size, errors, &byteorder); +} + +/////////////// decode_cpp_string.proto /////////////// +//@requires: IncludeCppStringH +//@requires: decode_c_bytes + +static CYTHON_INLINE PyObject* __Pyx_decode_cpp_string( + std::string cppstring, Py_ssize_t start, Py_ssize_t stop, + const char* encoding, const char* errors, + PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) { + return __Pyx_decode_c_bytes( + cppstring.data(), cppstring.size(), start, stop, encoding, errors, decode_func); +} + +/////////////// decode_c_string.proto /////////////// + +static CYTHON_INLINE PyObject* __Pyx_decode_c_string( + const char* cstring, Py_ssize_t start, Py_ssize_t stop, + const char* encoding, const char* errors, + PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)); + +/////////////// decode_c_string /////////////// +//@requires: IncludeStringH +//@requires: decode_c_string_utf16 + +/* duplicate code to avoid calling strlen() if start >= 0 and stop >= 0 */ +static CYTHON_INLINE PyObject* __Pyx_decode_c_string( + const char* cstring, Py_ssize_t start, Py_ssize_t stop, + const char* encoding, const char* errors, + PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) { + Py_ssize_t length; + if (unlikely((start < 0) | (stop < 0))) { + size_t slen = strlen(cstring); + if (unlikely(slen > (size_t) PY_SSIZE_T_MAX)) { + PyErr_SetString(PyExc_OverflowError, + "c-string too long to convert to Python"); + return NULL; + } + length = (Py_ssize_t) slen; + if (start < 0) { + start += length; + if (start < 0) + start = 0; + } + if (stop < 0) + stop += length; + } + if (unlikely(stop <= start)) + return PyUnicode_FromUnicode(NULL, 0); + length = stop - start; + cstring += start; + if (decode_func) { + return decode_func(cstring, length, errors); + } else { + return PyUnicode_Decode(cstring, length, encoding, errors); + } +} + +/////////////// decode_c_bytes.proto /////////////// + +static CYTHON_INLINE PyObject* __Pyx_decode_c_bytes( + const char* cstring, Py_ssize_t length, Py_ssize_t start, Py_ssize_t stop, + const char* encoding, const char* errors, + PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)); + +/////////////// decode_c_bytes /////////////// +//@requires: decode_c_string_utf16 + +static CYTHON_INLINE PyObject* __Pyx_decode_c_bytes( + const char* cstring, Py_ssize_t length, Py_ssize_t start, Py_ssize_t stop, + const char* encoding, const char* errors, + PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) { + if (unlikely((start < 0) | (stop < 0))) { + if (start < 0) { + start += length; + if (start < 0) + start = 0; + } + if (stop < 0) + stop += length; + } + if (stop > length) + stop = length; + if (unlikely(stop <= start)) + return PyUnicode_FromUnicode(NULL, 0); + length = stop - start; + cstring += start; + if (decode_func) { + return decode_func(cstring, length, errors); + } else { + return PyUnicode_Decode(cstring, length, encoding, errors); + } +} + +/////////////// decode_bytes.proto /////////////// +//@requires: decode_c_bytes + +static CYTHON_INLINE PyObject* __Pyx_decode_bytes( + PyObject* string, Py_ssize_t start, Py_ssize_t stop, + const char* encoding, const char* errors, + PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) { + return __Pyx_decode_c_bytes( + PyBytes_AS_STRING(string), PyBytes_GET_SIZE(string), + start, stop, encoding, errors, decode_func); +} + +/////////////// decode_bytearray.proto /////////////// +//@requires: decode_c_bytes + +static CYTHON_INLINE PyObject* __Pyx_decode_bytearray( + PyObject* string, Py_ssize_t start, Py_ssize_t stop, + const char* encoding, const char* errors, + PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) { + return __Pyx_decode_c_bytes( + PyByteArray_AS_STRING(string), PyByteArray_GET_SIZE(string), + start, stop, encoding, errors, decode_func); +} + +/////////////// PyUnicode_Substring.proto /////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_Substring( + PyObject* text, Py_ssize_t start, Py_ssize_t stop); + +/////////////// PyUnicode_Substring /////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_Substring( + PyObject* text, Py_ssize_t start, Py_ssize_t stop) { + Py_ssize_t length; + if (unlikely(__Pyx_PyUnicode_READY(text) == -1)) return NULL; + length = __Pyx_PyUnicode_GET_LENGTH(text); + if (start < 0) { + start += length; + if (start < 0) + start = 0; + } + if (stop < 0) + stop += length; + else if (stop > length) + stop = length; + if (stop <= start) + return PyUnicode_FromUnicode(NULL, 0); +#if CYTHON_PEP393_ENABLED + return PyUnicode_FromKindAndData(PyUnicode_KIND(text), + PyUnicode_1BYTE_DATA(text) + start*PyUnicode_KIND(text), stop-start); +#else + return PyUnicode_FromUnicode(PyUnicode_AS_UNICODE(text)+start, stop-start); +#endif +} + + +/////////////// py_unicode_istitle.proto /////////////// + +// Py_UNICODE_ISTITLE() doesn't match unicode.istitle() as the latter +// additionally allows character that comply with Py_UNICODE_ISUPPER() + +#if PY_VERSION_HEX < 0x030200A2 +static CYTHON_INLINE int __Pyx_Py_UNICODE_ISTITLE(Py_UNICODE uchar) +#else +static CYTHON_INLINE int __Pyx_Py_UNICODE_ISTITLE(Py_UCS4 uchar) +#endif +{ + return Py_UNICODE_ISTITLE(uchar) || Py_UNICODE_ISUPPER(uchar); +} + + +/////////////// unicode_tailmatch.proto /////////////// + +static int __Pyx_PyUnicode_Tailmatch( + PyObject* s, PyObject* substr, Py_ssize_t start, Py_ssize_t end, int direction); /*proto*/ + +/////////////// unicode_tailmatch /////////////// + +// Python's unicode.startswith() and unicode.endswith() support a +// tuple of prefixes/suffixes, whereas it's much more common to +// test for a single unicode string. + +static int __Pyx_PyUnicode_TailmatchTuple(PyObject* s, PyObject* substrings, + Py_ssize_t start, Py_ssize_t end, int direction) { + Py_ssize_t i, count = PyTuple_GET_SIZE(substrings); + for (i = 0; i < count; i++) { + Py_ssize_t result; +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + result = PyUnicode_Tailmatch(s, PyTuple_GET_ITEM(substrings, i), + start, end, direction); +#else + PyObject* sub = PySequence_ITEM(substrings, i); + if (unlikely(!sub)) return -1; + result = PyUnicode_Tailmatch(s, sub, start, end, direction); + Py_DECREF(sub); +#endif + if (result) { + return (int) result; + } + } + return 0; +} + +static int __Pyx_PyUnicode_Tailmatch(PyObject* s, PyObject* substr, + Py_ssize_t start, Py_ssize_t end, int direction) { + if (unlikely(PyTuple_Check(substr))) { + return __Pyx_PyUnicode_TailmatchTuple(s, substr, start, end, direction); + } + return (int) PyUnicode_Tailmatch(s, substr, start, end, direction); +} + + +/////////////// bytes_tailmatch.proto /////////////// + +static int __Pyx_PyBytes_SingleTailmatch(PyObject* self, PyObject* arg, + Py_ssize_t start, Py_ssize_t end, int direction); /*proto*/ +static int __Pyx_PyBytes_Tailmatch(PyObject* self, PyObject* substr, + Py_ssize_t start, Py_ssize_t end, int direction); /*proto*/ + +/////////////// bytes_tailmatch /////////////// + +static int __Pyx_PyBytes_SingleTailmatch(PyObject* self, PyObject* arg, + Py_ssize_t start, Py_ssize_t end, int direction) { + const char* self_ptr = PyBytes_AS_STRING(self); + Py_ssize_t self_len = PyBytes_GET_SIZE(self); + const char* sub_ptr; + Py_ssize_t sub_len; + int retval; + + Py_buffer view; + view.obj = NULL; + + if ( PyBytes_Check(arg) ) { + sub_ptr = PyBytes_AS_STRING(arg); + sub_len = PyBytes_GET_SIZE(arg); + } +#if PY_MAJOR_VERSION < 3 + // Python 2.x allows mixing unicode and str + else if ( PyUnicode_Check(arg) ) { + return (int) PyUnicode_Tailmatch(self, arg, start, end, direction); + } +#endif + else { + if (unlikely(PyObject_GetBuffer(self, &view, PyBUF_SIMPLE) == -1)) + return -1; + sub_ptr = (const char*) view.buf; + sub_len = view.len; + } + + if (end > self_len) + end = self_len; + else if (end < 0) + end += self_len; + if (end < 0) + end = 0; + if (start < 0) + start += self_len; + if (start < 0) + start = 0; + + if (direction > 0) { + /* endswith */ + if (end-sub_len > start) + start = end - sub_len; + } + + if (start + sub_len <= end) + retval = !memcmp(self_ptr+start, sub_ptr, (size_t)sub_len); + else + retval = 0; + + if (view.obj) + PyBuffer_Release(&view); + + return retval; +} + +static int __Pyx_PyBytes_TailmatchTuple(PyObject* self, PyObject* substrings, + Py_ssize_t start, Py_ssize_t end, int direction) { + Py_ssize_t i, count = PyTuple_GET_SIZE(substrings); + for (i = 0; i < count; i++) { + int result; +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + result = __Pyx_PyBytes_SingleTailmatch(self, PyTuple_GET_ITEM(substrings, i), + start, end, direction); +#else + PyObject* sub = PySequence_ITEM(substrings, i); + if (unlikely(!sub)) return -1; + result = __Pyx_PyBytes_SingleTailmatch(self, sub, start, end, direction); + Py_DECREF(sub); +#endif + if (result) { + return result; + } + } + return 0; +} + +static int __Pyx_PyBytes_Tailmatch(PyObject* self, PyObject* substr, + Py_ssize_t start, Py_ssize_t end, int direction) { + if (unlikely(PyTuple_Check(substr))) { + return __Pyx_PyBytes_TailmatchTuple(self, substr, start, end, direction); + } + + return __Pyx_PyBytes_SingleTailmatch(self, substr, start, end, direction); +} + + +/////////////// str_tailmatch.proto /////////////// + +static CYTHON_INLINE int __Pyx_PyStr_Tailmatch(PyObject* self, PyObject* arg, Py_ssize_t start, + Py_ssize_t end, int direction); /*proto*/ + +/////////////// str_tailmatch /////////////// +//@requires: bytes_tailmatch +//@requires: unicode_tailmatch + +static CYTHON_INLINE int __Pyx_PyStr_Tailmatch(PyObject* self, PyObject* arg, Py_ssize_t start, + Py_ssize_t end, int direction) +{ + // We do not use a C compiler macro here to avoid "unused function" + // warnings for the *_Tailmatch() function that is not being used in + // the specific CPython version. The C compiler will generate the same + // code anyway, and will usually just remove the unused function. + if (PY_MAJOR_VERSION < 3) + return __Pyx_PyBytes_Tailmatch(self, arg, start, end, direction); + else + return __Pyx_PyUnicode_Tailmatch(self, arg, start, end, direction); +} + + +/////////////// bytes_index.proto /////////////// + +static CYTHON_INLINE char __Pyx_PyBytes_GetItemInt(PyObject* bytes, Py_ssize_t index, int check_bounds); /*proto*/ + +/////////////// bytes_index /////////////// + +static CYTHON_INLINE char __Pyx_PyBytes_GetItemInt(PyObject* bytes, Py_ssize_t index, int check_bounds) { + if (index < 0) + index += PyBytes_GET_SIZE(bytes); + if (check_bounds) { + Py_ssize_t size = PyBytes_GET_SIZE(bytes); + if (unlikely(!__Pyx_is_valid_index(index, size))) { + PyErr_SetString(PyExc_IndexError, "string index out of range"); + return (char) -1; + } + } + return PyBytes_AS_STRING(bytes)[index]; +} + + +//////////////////// StringJoin.proto //////////////////// + +#if PY_MAJOR_VERSION < 3 +#define __Pyx_PyString_Join __Pyx_PyBytes_Join +#define __Pyx_PyBaseString_Join(s, v) (PyUnicode_CheckExact(s) ? PyUnicode_Join(s, v) : __Pyx_PyBytes_Join(s, v)) +#else +#define __Pyx_PyString_Join PyUnicode_Join +#define __Pyx_PyBaseString_Join PyUnicode_Join +#endif + +#if CYTHON_COMPILING_IN_CPYTHON + #if PY_MAJOR_VERSION < 3 + #define __Pyx_PyBytes_Join _PyString_Join + #else + #define __Pyx_PyBytes_Join _PyBytes_Join + #endif +#else +static CYTHON_INLINE PyObject* __Pyx_PyBytes_Join(PyObject* sep, PyObject* values); /*proto*/ +#endif + + +//////////////////// StringJoin //////////////////// + +#if !CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyBytes_Join(PyObject* sep, PyObject* values) { + return PyObject_CallMethodObjArgs(sep, PYIDENT("join"), values, NULL); +} +#endif + + +/////////////// JoinPyUnicode.proto /////////////// + +static PyObject* __Pyx_PyUnicode_Join(PyObject* value_tuple, Py_ssize_t value_count, Py_ssize_t result_ulength, + Py_UCS4 max_char); + +/////////////// JoinPyUnicode /////////////// +//@requires: IncludeStringH +//@substitute: naming + +static PyObject* __Pyx_PyUnicode_Join(PyObject* value_tuple, Py_ssize_t value_count, Py_ssize_t result_ulength, + CYTHON_UNUSED Py_UCS4 max_char) { +#if CYTHON_USE_UNICODE_INTERNALS && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + PyObject *result_uval; + int result_ukind; + Py_ssize_t i, char_pos; + void *result_udata; +#if CYTHON_PEP393_ENABLED + // Py 3.3+ (post PEP-393) + result_uval = PyUnicode_New(result_ulength, max_char); + if (unlikely(!result_uval)) return NULL; + result_ukind = (max_char <= 255) ? PyUnicode_1BYTE_KIND : (max_char <= 65535) ? PyUnicode_2BYTE_KIND : PyUnicode_4BYTE_KIND; + result_udata = PyUnicode_DATA(result_uval); +#else + // Py 2.x/3.2 (pre PEP-393) + result_uval = PyUnicode_FromUnicode(NULL, result_ulength); + if (unlikely(!result_uval)) return NULL; + result_ukind = sizeof(Py_UNICODE); + result_udata = PyUnicode_AS_UNICODE(result_uval); +#endif + + char_pos = 0; + for (i=0; i < value_count; i++) { + int ukind; + Py_ssize_t ulength; + void *udata; + PyObject *uval = PyTuple_GET_ITEM(value_tuple, i); + if (unlikely(__Pyx_PyUnicode_READY(uval))) + goto bad; + ulength = __Pyx_PyUnicode_GET_LENGTH(uval); + if (unlikely(!ulength)) + continue; + if (unlikely(char_pos + ulength < 0)) + goto overflow; + ukind = __Pyx_PyUnicode_KIND(uval); + udata = __Pyx_PyUnicode_DATA(uval); + if (!CYTHON_PEP393_ENABLED || ukind == result_ukind) { + memcpy((char *)result_udata + char_pos * result_ukind, udata, (size_t) (ulength * result_ukind)); + } else { + #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030300F0 || defined(_PyUnicode_FastCopyCharacters) + _PyUnicode_FastCopyCharacters(result_uval, char_pos, uval, 0, ulength); + #else + Py_ssize_t j; + for (j=0; j < ulength; j++) { + Py_UCS4 uchar = __Pyx_PyUnicode_READ(ukind, udata, j); + __Pyx_PyUnicode_WRITE(result_ukind, result_udata, char_pos+j, uchar); + } + #endif + } + char_pos += ulength; + } + return result_uval; +overflow: + PyErr_SetString(PyExc_OverflowError, "join() result is too long for a Python string"); +bad: + Py_DECREF(result_uval); + return NULL; +#else + // non-CPython fallback + result_ulength++; + value_count++; + return PyUnicode_Join($empty_unicode, value_tuple); +#endif +} + + +/////////////// BuildPyUnicode.proto /////////////// + +static PyObject* __Pyx_PyUnicode_BuildFromAscii(Py_ssize_t ulength, char* chars, int clength, + int prepend_sign, char padding_char); + +/////////////// BuildPyUnicode /////////////// + +// Create a PyUnicode object from an ASCII char*, e.g. a formatted number. + +static PyObject* __Pyx_PyUnicode_BuildFromAscii(Py_ssize_t ulength, char* chars, int clength, + int prepend_sign, char padding_char) { + PyObject *uval; + Py_ssize_t uoffset = ulength - clength; +#if CYTHON_USE_UNICODE_INTERNALS + Py_ssize_t i; +#if CYTHON_PEP393_ENABLED + // Py 3.3+ (post PEP-393) + void *udata; + uval = PyUnicode_New(ulength, 127); + if (unlikely(!uval)) return NULL; + udata = PyUnicode_DATA(uval); +#else + // Py 2.x/3.2 (pre PEP-393) + Py_UNICODE *udata; + uval = PyUnicode_FromUnicode(NULL, ulength); + if (unlikely(!uval)) return NULL; + udata = PyUnicode_AS_UNICODE(uval); +#endif + if (uoffset > 0) { + i = 0; + if (prepend_sign) { + __Pyx_PyUnicode_WRITE(PyUnicode_1BYTE_KIND, udata, 0, '-'); + i++; + } + for (; i < uoffset; i++) { + __Pyx_PyUnicode_WRITE(PyUnicode_1BYTE_KIND, udata, i, padding_char); + } + } + for (i=0; i < clength; i++) { + __Pyx_PyUnicode_WRITE(PyUnicode_1BYTE_KIND, udata, uoffset+i, chars[i]); + } + +#else + // non-CPython + { + PyObject *sign = NULL, *padding = NULL; + uval = NULL; + if (uoffset > 0) { + prepend_sign = !!prepend_sign; + if (uoffset > prepend_sign) { + padding = PyUnicode_FromOrdinal(padding_char); + if (likely(padding) && uoffset > prepend_sign + 1) { + PyObject *tmp; + PyObject *repeat = PyInt_FromSize_t(uoffset - prepend_sign); + if (unlikely(!repeat)) goto done_or_error; + tmp = PyNumber_Multiply(padding, repeat); + Py_DECREF(repeat); + Py_DECREF(padding); + padding = tmp; + } + if (unlikely(!padding)) goto done_or_error; + } + if (prepend_sign) { + sign = PyUnicode_FromOrdinal('-'); + if (unlikely(!sign)) goto done_or_error; + } + } + + uval = PyUnicode_DecodeASCII(chars, clength, NULL); + if (likely(uval) && padding) { + PyObject *tmp = PyNumber_Add(padding, uval); + Py_DECREF(uval); + uval = tmp; + } + if (likely(uval) && sign) { + PyObject *tmp = PyNumber_Add(sign, uval); + Py_DECREF(uval); + uval = tmp; + } +done_or_error: + Py_XDECREF(padding); + Py_XDECREF(sign); + } +#endif + + return uval; +} + + +//////////////////// ByteArrayAppendObject.proto //////////////////// + +static CYTHON_INLINE int __Pyx_PyByteArray_AppendObject(PyObject* bytearray, PyObject* value); + +//////////////////// ByteArrayAppendObject //////////////////// +//@requires: ByteArrayAppend + +static CYTHON_INLINE int __Pyx_PyByteArray_AppendObject(PyObject* bytearray, PyObject* value) { + Py_ssize_t ival; +#if PY_MAJOR_VERSION < 3 + if (unlikely(PyString_Check(value))) { + if (unlikely(PyString_GET_SIZE(value) != 1)) { + PyErr_SetString(PyExc_ValueError, "string must be of size 1"); + return -1; + } + ival = (unsigned char) (PyString_AS_STRING(value)[0]); + } else +#endif +#if CYTHON_USE_PYLONG_INTERNALS + if (likely(PyLong_CheckExact(value)) && likely(Py_SIZE(value) == 1 || Py_SIZE(value) == 0)) { + if (Py_SIZE(value) == 0) { + ival = 0; + } else { + ival = ((PyLongObject*)value)->ob_digit[0]; + if (unlikely(ival > 255)) goto bad_range; + } + } else +#endif + { + // CPython calls PyNumber_Index() internally + ival = __Pyx_PyIndex_AsSsize_t(value); + if (unlikely(!__Pyx_is_valid_index(ival, 256))) { + if (ival == -1 && PyErr_Occurred()) + return -1; + goto bad_range; + } + } + return __Pyx_PyByteArray_Append(bytearray, ival); +bad_range: + PyErr_SetString(PyExc_ValueError, "byte must be in range(0, 256)"); + return -1; +} + +//////////////////// ByteArrayAppend.proto //////////////////// + +static CYTHON_INLINE int __Pyx_PyByteArray_Append(PyObject* bytearray, int value); + +//////////////////// ByteArrayAppend //////////////////// +//@requires: ObjectHandling.c::PyObjectCallMethod1 + +static CYTHON_INLINE int __Pyx_PyByteArray_Append(PyObject* bytearray, int value) { + PyObject *pyval, *retval; +#if CYTHON_COMPILING_IN_CPYTHON + if (likely(__Pyx_is_valid_index(value, 256))) { + Py_ssize_t n = Py_SIZE(bytearray); + if (likely(n != PY_SSIZE_T_MAX)) { + if (unlikely(PyByteArray_Resize(bytearray, n + 1) < 0)) + return -1; + PyByteArray_AS_STRING(bytearray)[n] = value; + return 0; + } + } else { + PyErr_SetString(PyExc_ValueError, "byte must be in range(0, 256)"); + return -1; + } +#endif + pyval = PyInt_FromLong(value); + if (unlikely(!pyval)) + return -1; + retval = __Pyx_PyObject_CallMethod1(bytearray, PYIDENT("append"), pyval); + Py_DECREF(pyval); + if (unlikely(!retval)) + return -1; + Py_DECREF(retval); + return 0; +} + + +//////////////////// PyObjectFormat.proto //////////////////// + +#if CYTHON_USE_UNICODE_WRITER +static PyObject* __Pyx_PyObject_Format(PyObject* s, PyObject* f); +#else +#define __Pyx_PyObject_Format(s, f) PyObject_Format(s, f) +#endif + +//////////////////// PyObjectFormat //////////////////// + +#if CYTHON_USE_UNICODE_WRITER +static PyObject* __Pyx_PyObject_Format(PyObject* obj, PyObject* format_spec) { + int ret; + _PyUnicodeWriter writer; + + if (likely(PyFloat_CheckExact(obj))) { + // copied from CPython 3.5 "float__format__()" in floatobject.c +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x03040000 + _PyUnicodeWriter_Init(&writer, 0); +#else + _PyUnicodeWriter_Init(&writer); +#endif + ret = _PyFloat_FormatAdvancedWriter( + &writer, + obj, + format_spec, 0, PyUnicode_GET_LENGTH(format_spec)); + } else if (likely(PyLong_CheckExact(obj))) { + // copied from CPython 3.5 "long__format__()" in longobject.c +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x03040000 + _PyUnicodeWriter_Init(&writer, 0); +#else + _PyUnicodeWriter_Init(&writer); +#endif + ret = _PyLong_FormatAdvancedWriter( + &writer, + obj, + format_spec, 0, PyUnicode_GET_LENGTH(format_spec)); + } else { + return PyObject_Format(obj, format_spec); + } + + if (unlikely(ret == -1)) { + _PyUnicodeWriter_Dealloc(&writer); + return NULL; + } + return _PyUnicodeWriter_Finish(&writer); +} +#endif + + +//////////////////// PyObjectFormatSimple.proto //////////////////// + +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyObject_FormatSimple(s, f) ( \ + likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) : \ + PyObject_Format(s, f)) +#elif PY_MAJOR_VERSION < 3 + // str is common in Py2, but formatting must return a Unicode string + #define __Pyx_PyObject_FormatSimple(s, f) ( \ + likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) : \ + likely(PyString_CheckExact(s)) ? PyUnicode_FromEncodedObject(s, NULL, "strict") : \ + PyObject_Format(s, f)) +#elif CYTHON_USE_TYPE_SLOTS + // Py3 nicely returns unicode strings from str() which makes this quite efficient for builtin types + #define __Pyx_PyObject_FormatSimple(s, f) ( \ + likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) : \ + likely(PyLong_CheckExact(s)) ? PyLong_Type.tp_str(s) : \ + likely(PyFloat_CheckExact(s)) ? PyFloat_Type.tp_str(s) : \ + PyObject_Format(s, f)) +#else + #define __Pyx_PyObject_FormatSimple(s, f) ( \ + likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) : \ + PyObject_Format(s, f)) +#endif + + +//////////////////// PyObjectFormatAndDecref.proto //////////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyObject_FormatSimpleAndDecref(PyObject* s, PyObject* f); +static CYTHON_INLINE PyObject* __Pyx_PyObject_FormatAndDecref(PyObject* s, PyObject* f); + +//////////////////// PyObjectFormatAndDecref //////////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyObject_FormatSimpleAndDecref(PyObject* s, PyObject* f) { + if (unlikely(!s)) return NULL; + if (likely(PyUnicode_CheckExact(s))) return s; + #if PY_MAJOR_VERSION < 3 + // str is common in Py2, but formatting must return a Unicode string + if (likely(PyString_CheckExact(s))) { + PyObject *result = PyUnicode_FromEncodedObject(s, NULL, "strict"); + Py_DECREF(s); + return result; + } + #endif + return __Pyx_PyObject_FormatAndDecref(s, f); +} + +static CYTHON_INLINE PyObject* __Pyx_PyObject_FormatAndDecref(PyObject* s, PyObject* f) { + PyObject *result = PyObject_Format(s, f); + Py_DECREF(s); + return result; +} + + +//////////////////// PyUnicode_Unicode.proto //////////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_Unicode(PyObject *obj);/*proto*/ + +//////////////////// PyUnicode_Unicode //////////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_Unicode(PyObject *obj) { + if (unlikely(obj == Py_None)) + obj = PYUNICODE("None"); + return __Pyx_NewRef(obj); +} + + +//////////////////// PyObject_Unicode.proto //////////////////// + +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyObject_Unicode(obj) \ + (likely(PyUnicode_CheckExact(obj)) ? __Pyx_NewRef(obj) : PyObject_Str(obj)) +#else +#define __Pyx_PyObject_Unicode(obj) \ + (likely(PyUnicode_CheckExact(obj)) ? __Pyx_NewRef(obj) : PyObject_Unicode(obj)) +#endif diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/TestCyUtilityLoader.pyx b/venv/lib/python3.8/site-packages/Cython/Utility/TestCyUtilityLoader.pyx new file mode 100644 index 0000000..00e7a76 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/TestCyUtilityLoader.pyx @@ -0,0 +1,8 @@ +########## TestCyUtilityLoader ########## +#@requires: OtherUtility + +test {{cy_loader}} impl + + +########## OtherUtility ########## +req {{cy_loader}} impl diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/TestCythonScope.pyx b/venv/lib/python3.8/site-packages/Cython/Utility/TestCythonScope.pyx new file mode 100644 index 0000000..f585be2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/TestCythonScope.pyx @@ -0,0 +1,64 @@ +########## TestClass ########## +# These utilities are for testing purposes + +cdef extern from *: + cdef object __pyx_test_dep(object) + +@cname('__pyx_TestClass') +cdef class TestClass(object): + cdef public int value + + def __init__(self, int value): + self.value = value + + def __str__(self): + return 'TestClass(%d)' % self.value + + cdef cdef_method(self, int value): + print 'Hello from cdef_method', value + + cpdef cpdef_method(self, int value): + print 'Hello from cpdef_method', value + + def def_method(self, int value): + print 'Hello from def_method', value + + @cname('cdef_cname') + cdef cdef_cname_method(self, int value): + print "Hello from cdef_cname_method", value + + @cname('cpdef_cname') + cpdef cpdef_cname_method(self, int value): + print "Hello from cpdef_cname_method", value + + @cname('def_cname') + def def_cname_method(self, int value): + print "Hello from def_cname_method", value + +@cname('__pyx_test_call_other_cy_util') +cdef test_call(obj): + print 'test_call' + __pyx_test_dep(obj) + +@cname('__pyx_TestClass_New') +cdef _testclass_new(int value): + return TestClass(value) + +########### TestDep ########## + +@cname('__pyx_test_dep') +cdef test_dep(obj): + print 'test_dep', obj + +########## TestScope ########## + +@cname('__pyx_testscope') +cdef object _testscope(int value): + return "hello from cython scope, value=%d" % value + +########## View.TestScope ########## + +@cname('__pyx_view_testscope') +cdef object _testscope(int value): + return "hello from cython.view scope, value=%d" % value + diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/TestUtilityLoader.c b/venv/lib/python3.8/site-packages/Cython/Utility/TestUtilityLoader.c new file mode 100644 index 0000000..595305f --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/TestUtilityLoader.c @@ -0,0 +1,12 @@ +////////// TestUtilityLoader.proto ////////// +test {{loader}} prototype + +////////// TestUtilityLoader ////////// +//@requires: OtherUtility +test {{loader}} impl + +////////// OtherUtility.proto ////////// +req {{loader}} proto + +////////// OtherUtility ////////// +req {{loader}} impl diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/TypeConversion.c b/venv/lib/python3.8/site-packages/Cython/Utility/TypeConversion.c new file mode 100644 index 0000000..1983457 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/TypeConversion.c @@ -0,0 +1,986 @@ +/////////////// TypeConversions.proto /////////////// + +/* Type Conversion Predeclarations */ + +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) + +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) ( \ + (sizeof(type) < sizeof(Py_ssize_t)) || \ + (sizeof(type) > sizeof(Py_ssize_t) && \ + likely(v < (type)PY_SSIZE_T_MAX || \ + v == (type)PY_SSIZE_T_MAX) && \ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN || \ + v == (type)PY_SSIZE_T_MIN))) || \ + (sizeof(type) == sizeof(Py_ssize_t) && \ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX || \ + v == (type)PY_SSIZE_T_MAX))) ) + +static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { + // Optimisation from Section 14.2 "Bounds Checking" in + // https://www.agner.org/optimize/optimizing_cpp.pdf + // See https://bugs.python.org/issue28397 + // The cast to unsigned effectively tests for "0 <= i < limit". + return (size_t) i < (size_t) limit; +} + +// fast and unsafe abs(Py_ssize_t) that ignores the overflow for (-PY_SSIZE_T_MAX-1) +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) + // abs() is defined for long, but 64-bits type on MSVC is long long. + // Use MS-specific _abs64 instead. + #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + // gcc or clang on 64 bit windows. + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) +#endif + +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); + +#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); + +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize +#endif + +#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) + +// There used to be a Py_UNICODE_strlen() in CPython 3.x, but it is deprecated since Py3.3. +static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { + const Py_UNICODE *u_end = u; + while (*u_end++) ; + return (size_t)(u_end - u - 1); +} + +#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) +#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode + +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); + +#define __Pyx_PySequence_Tuple(obj) \ + (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) + +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); + +#if CYTHON_ASSUME_SAFE_MACROS +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) + +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) + +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif + +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) + +// __PYX_DEFAULT_STRING_ENCODING is either a user provided string constant +// or we need to look it up here +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +static char* __PYX_DEFAULT_STRING_ENCODING; + +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif + +/////////////// TypeConversions /////////////// + +/* Type Conversion Functions */ + +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); +} + +// Py3.7 returns a "const char*" for unicode strings +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} + +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#if !CYTHON_PEP393_ENABLED +static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + char* defenc_c; + // borrowed reference, cached internally in 'o' by CPython + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + // raise the error + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif /*__PYX_DEFAULT_STRING_ENCODING_IS_ASCII*/ + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +} + +#else /* CYTHON_PEP393_ENABLED: */ + +static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (likely(PyUnicode_IS_ASCII(o))) { + // cached for the lifetime of the object + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + // raise the error + PyUnicode_AsASCIIString(o); + return NULL; + } +#else /* __PYX_DEFAULT_STRING_ENCODING_IS_ASCII */ + return PyUnicode_AsUTF8AndSize(o, length); +#endif /* __PYX_DEFAULT_STRING_ENCODING_IS_ASCII */ +} +#endif /* CYTHON_PEP393_ENABLED */ +#endif + +// Py3.7 returns a "const char*" for unicode strings +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { + return __Pyx_PyUnicode_AsStringAndSize(o, length); + } else +#endif /* __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT */ + +#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} + +/* Note: __Pyx_PyObject_IsTrue is written to minimize branching. */ +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} + +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { + int retval; + if (unlikely(!x)) return -1; + retval = __Pyx_PyObject_IsTrue(x); + Py_DECREF(x); + return retval; +} + +static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { +#if PY_MAJOR_VERSION >= 3 + if (PyLong_Check(result)) { + // CPython issue #17576: warn if 'result' not of exact type int. + if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, + "__int__ returned non-int (type %.200s). " + "The ability to return an instance of a strict subclass of int " + "is deprecated, and may be removed in a future version of Python.", + Py_TYPE(result)->tp_name)) { + Py_DECREF(result); + return NULL; + } + return result; + } +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type %.200s)", + type_name, type_name, Py_TYPE(result)->tp_name); + Py_DECREF(result); + return NULL; +} + +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS + PyNumberMethods *m; +#endif + const char *name = NULL; + PyObject *res = NULL; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x) || PyLong_Check(x))) +#else + if (likely(PyLong_Check(x))) +#endif + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS + m = Py_TYPE(x)->tp_as_number; + #if PY_MAJOR_VERSION < 3 + if (m && m->nb_int) { + name = "int"; + res = m->nb_int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = m->nb_long(x); + } + #else + if (likely(m && m->nb_int)) { + name = "int"; + res = m->nb_int(x); + } + #endif +#else + if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { + res = PyNumber_Int(x); + } +#endif + if (likely(res)) { +#if PY_MAJOR_VERSION < 3 + if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { +#else + if (unlikely(!PyLong_CheckExact(res))) { +#endif + return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} + +{{py: from Cython.Utility import pylong_join }} + +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(b); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)b)->ob_digit; + const Py_ssize_t size = Py_SIZE(b); + // handle most common case first to avoid indirect branch and optimise branch prediction + if (likely(__Pyx_sst_abs(size) <= 1)) { + ival = likely(size) ? digits[0] : 0; + if (size == -1) ival = -ival; + return ival; + } else { + switch (size) { + {{for _size in (2, 3, 4)}} + {{for _case in (_size, -_size)}} + case {{_case}}: + if (8 * sizeof(Py_ssize_t) > {{_size}} * PyLong_SHIFT) { + return {{'-' if _case < 0 else ''}}(Py_ssize_t) {{pylong_join(_size, 'digits', 'size_t')}}; + } + break; + {{endfor}} + {{endfor}} + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} + + +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { + return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); +} + + +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); +} + + +/////////////// ToPyCTupleUtility.proto /////////////// +static PyObject* {{funcname}}({{struct_type_decl}}); + +/////////////// ToPyCTupleUtility /////////////// +static PyObject* {{funcname}}({{struct_type_decl}} value) { + PyObject* item = NULL; + PyObject* result = PyTuple_New({{size}}); + if (!result) goto bad; + + {{for ix, component in enumerate(components):}} + {{py:attr = "value.f%s" % ix}} + item = {{component.to_py_function}}({{attr}}); + if (!item) goto bad; + PyTuple_SET_ITEM(result, {{ix}}, item); + {{endfor}} + + return result; +bad: + Py_XDECREF(item); + Py_XDECREF(result); + return NULL; +} + + +/////////////// FromPyCTupleUtility.proto /////////////// +static {{struct_type_decl}} {{funcname}}(PyObject *); + +/////////////// FromPyCTupleUtility /////////////// +static {{struct_type_decl}} {{funcname}}(PyObject * o) { + {{struct_type_decl}} result; + + if (!PyTuple_Check(o) || PyTuple_GET_SIZE(o) != {{size}}) { + PyErr_Format(PyExc_TypeError, "Expected %.16s of size %d, got %.200s", "a tuple", {{size}}, Py_TYPE(o)->tp_name); + goto bad; + } + +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + {{for ix, component in enumerate(components):}} + {{py:attr = "result.f%s" % ix}} + {{attr}} = {{component.from_py_function}}(PyTuple_GET_ITEM(o, {{ix}})); + if ({{component.error_condition(attr)}}) goto bad; + {{endfor}} +#else + { + PyObject *item; + {{for ix, component in enumerate(components):}} + {{py:attr = "result.f%s" % ix}} + item = PySequence_ITEM(o, {{ix}}); if (unlikely(!item)) goto bad; + {{attr}} = {{component.from_py_function}}(item); + Py_DECREF(item); + if ({{component.error_condition(attr)}}) goto bad; + {{endfor}} + } +#endif + + return result; +bad: + return result; +} + + +/////////////// UnicodeAsUCS4.proto /////////////// + +static CYTHON_INLINE Py_UCS4 __Pyx_PyUnicode_AsPy_UCS4(PyObject*); + +/////////////// UnicodeAsUCS4 /////////////// + +static CYTHON_INLINE Py_UCS4 __Pyx_PyUnicode_AsPy_UCS4(PyObject* x) { + Py_ssize_t length; + #if CYTHON_PEP393_ENABLED + length = PyUnicode_GET_LENGTH(x); + if (likely(length == 1)) { + return PyUnicode_READ_CHAR(x, 0); + } + #else + length = PyUnicode_GET_SIZE(x); + if (likely(length == 1)) { + return PyUnicode_AS_UNICODE(x)[0]; + } + #if Py_UNICODE_SIZE == 2 + else if (PyUnicode_GET_SIZE(x) == 2) { + Py_UCS4 high_val = PyUnicode_AS_UNICODE(x)[0]; + if (high_val >= 0xD800 && high_val <= 0xDBFF) { + Py_UCS4 low_val = PyUnicode_AS_UNICODE(x)[1]; + if (low_val >= 0xDC00 && low_val <= 0xDFFF) { + return 0x10000 + (((high_val & ((1<<10)-1)) << 10) | (low_val & ((1<<10)-1))); + } + } + } + #endif + #endif + PyErr_Format(PyExc_ValueError, + "only single character unicode strings can be converted to Py_UCS4, " + "got length %" CYTHON_FORMAT_SSIZE_T "d", length); + return (Py_UCS4)-1; +} + + +/////////////// ObjectAsUCS4.proto /////////////// +//@requires: UnicodeAsUCS4 + +#define __Pyx_PyObject_AsPy_UCS4(x) \ + (likely(PyUnicode_Check(x)) ? __Pyx_PyUnicode_AsPy_UCS4(x) : __Pyx__PyObject_AsPy_UCS4(x)) +static Py_UCS4 __Pyx__PyObject_AsPy_UCS4(PyObject*); + +/////////////// ObjectAsUCS4 /////////////// + +static Py_UCS4 __Pyx__PyObject_AsPy_UCS4_raise_error(long ival) { + if (ival < 0) { + if (!PyErr_Occurred()) + PyErr_SetString(PyExc_OverflowError, + "cannot convert negative value to Py_UCS4"); + } else { + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to Py_UCS4"); + } + return (Py_UCS4)-1; +} + +static Py_UCS4 __Pyx__PyObject_AsPy_UCS4(PyObject* x) { + long ival; + ival = __Pyx_PyInt_As_long(x); + if (unlikely(!__Pyx_is_valid_index(ival, 1114111 + 1))) { + return __Pyx__PyObject_AsPy_UCS4_raise_error(ival); + } + return (Py_UCS4)ival; +} + + +/////////////// ObjectAsPyUnicode.proto /////////////// + +static CYTHON_INLINE Py_UNICODE __Pyx_PyObject_AsPy_UNICODE(PyObject*); + +/////////////// ObjectAsPyUnicode /////////////// + +static CYTHON_INLINE Py_UNICODE __Pyx_PyObject_AsPy_UNICODE(PyObject* x) { + long ival; + #if CYTHON_PEP393_ENABLED + #if Py_UNICODE_SIZE > 2 + const long maxval = 1114111; + #else + const long maxval = 65535; + #endif + #else + static long maxval = 0; + #endif + if (PyUnicode_Check(x)) { + if (unlikely(__Pyx_PyUnicode_GET_LENGTH(x) != 1)) { + PyErr_Format(PyExc_ValueError, + "only single character unicode strings can be converted to Py_UNICODE, " + "got length %" CYTHON_FORMAT_SSIZE_T "d", __Pyx_PyUnicode_GET_LENGTH(x)); + return (Py_UNICODE)-1; + } + #if CYTHON_PEP393_ENABLED + ival = PyUnicode_READ_CHAR(x, 0); + #else + return PyUnicode_AS_UNICODE(x)[0]; + #endif + } else { + #if !CYTHON_PEP393_ENABLED + if (unlikely(!maxval)) + maxval = (long)PyUnicode_GetMax(); + #endif + ival = __Pyx_PyInt_As_long(x); + } + if (unlikely(!__Pyx_is_valid_index(ival, maxval + 1))) { + if (ival < 0) { + if (!PyErr_Occurred()) + PyErr_SetString(PyExc_OverflowError, + "cannot convert negative value to Py_UNICODE"); + return (Py_UNICODE)-1; + } else { + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to Py_UNICODE"); + } + return (Py_UNICODE)-1; + } + return (Py_UNICODE)ival; +} + + +/////////////// CIntToPy.proto /////////////// + +static CYTHON_INLINE PyObject* {{TO_PY_FUNCTION}}({{TYPE}} value); + +/////////////// CIntToPy /////////////// + +static CYTHON_INLINE PyObject* {{TO_PY_FUNCTION}}({{TYPE}} value) { + const {{TYPE}} neg_one = ({{TYPE}}) (({{TYPE}}) 0 - ({{TYPE}}) 1), const_zero = ({{TYPE}}) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof({{TYPE}}) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof({{TYPE}}) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof({{TYPE}}) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof({{TYPE}}) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof({{TYPE}}) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof({{TYPE}}), + little, !is_unsigned); + } +} + + +/////////////// CIntToDigits /////////////// + +static const char DIGIT_PAIRS_10[2*10*10+1] = { + "00010203040506070809" + "10111213141516171819" + "20212223242526272829" + "30313233343536373839" + "40414243444546474849" + "50515253545556575859" + "60616263646566676869" + "70717273747576777879" + "80818283848586878889" + "90919293949596979899" +}; + +static const char DIGIT_PAIRS_8[2*8*8+1] = { + "0001020304050607" + "1011121314151617" + "2021222324252627" + "3031323334353637" + "4041424344454647" + "5051525354555657" + "6061626364656667" + "7071727374757677" +}; + +static const char DIGITS_HEX[2*16+1] = { + "0123456789abcdef" + "0123456789ABCDEF" +}; + + +/////////////// CIntToPyUnicode.proto /////////////// + +static CYTHON_INLINE PyObject* {{TO_PY_FUNCTION}}({{TYPE}} value, Py_ssize_t width, char padding_char, char format_char); + +/////////////// CIntToPyUnicode /////////////// +//@requires: StringTools.c::BuildPyUnicode +//@requires: CIntToDigits + +#ifdef _MSC_VER + #ifndef _MSC_STDINT_H_ + #if _MSC_VER < 1300 + typedef unsigned short uint16_t; + #else + typedef unsigned __int16 uint16_t; + #endif + #endif +#else + #include +#endif + +// NOTE: inlining because most arguments are constant, which collapses lots of code below + +// GCC diagnostic pragmas were introduced in GCC 4.6 +#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) +#define GCC_DIAGNOSTIC +#endif +static CYTHON_INLINE PyObject* {{TO_PY_FUNCTION}}({{TYPE}} value, Py_ssize_t width, char padding_char, char format_char) { + // simple and conservative C string allocation on the stack: each byte gives at most 3 digits, plus sign + char digits[sizeof({{TYPE}})*3+2]; + // 'dpos' points to end of digits array + 1 initially to allow for pre-decrement looping + char *dpos, *end = digits + sizeof({{TYPE}})*3+2; + const char *hex_digits = DIGITS_HEX; + Py_ssize_t length, ulength; + int prepend_sign, last_one_off; + {{TYPE}} remaining; +#ifdef GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const {{TYPE}} neg_one = ({{TYPE}}) -1, const_zero = ({{TYPE}}) 0; +#ifdef GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; + + if (format_char == 'X') { + hex_digits += 16; + format_char = 'x'; + } + + // surprise: even trivial sprintf() calls don't get optimised in gcc (4.8) + remaining = value; /* not using abs(value) to avoid overflow problems */ + last_one_off = 0; + dpos = end; + do { + int digit_pos; + switch (format_char) { + case 'o': + digit_pos = abs((int)(remaining % (8*8))); + remaining = ({{TYPE}}) (remaining / (8*8)); + dpos -= 2; + *(uint16_t*)dpos = ((const uint16_t*)DIGIT_PAIRS_8)[digit_pos]; /* copy 2 digits at a time */ + last_one_off = (digit_pos < 8); + break; + case 'd': + digit_pos = abs((int)(remaining % (10*10))); + remaining = ({{TYPE}}) (remaining / (10*10)); + dpos -= 2; + *(uint16_t*)dpos = ((const uint16_t*)DIGIT_PAIRS_10)[digit_pos]; /* copy 2 digits at a time */ + last_one_off = (digit_pos < 10); + break; + case 'x': + *(--dpos) = hex_digits[abs((int)(remaining % 16))]; + remaining = ({{TYPE}}) (remaining / 16); + break; + default: + assert(0); + break; + } + } while (unlikely(remaining != 0)); + + if (last_one_off) { + assert(*dpos == '0'); + dpos++; + } + length = end - dpos; + ulength = length; + prepend_sign = 0; + if (!is_unsigned && value <= neg_one) { + if (padding_char == ' ' || width <= length + 1) { + *(--dpos) = '-'; + ++length; + } else { + prepend_sign = 1; + } + ++ulength; + } + if (width > ulength) { + ulength = width; + } + // single character unicode strings are cached in CPython => use PyUnicode_FromOrdinal() for them + if (ulength == 1) { + return PyUnicode_FromOrdinal(*dpos); + } + return __Pyx_PyUnicode_BuildFromAscii(ulength, dpos, (int) length, prepend_sign, padding_char); +} + + +/////////////// CBIntToPyUnicode.proto /////////////// + +#define {{TO_PY_FUNCTION}}(value) \ + ((value) ? __Pyx_NewRef({{TRUE_CONST}}) : __Pyx_NewRef({{FALSE_CONST}})) + + +/////////////// PyIntFromDouble.proto /////////////// + +#if PY_MAJOR_VERSION < 3 +static CYTHON_INLINE PyObject* __Pyx_PyInt_FromDouble(double value); +#else +#define __Pyx_PyInt_FromDouble(value) PyLong_FromDouble(value) +#endif + +/////////////// PyIntFromDouble /////////////// + +#if PY_MAJOR_VERSION < 3 +static CYTHON_INLINE PyObject* __Pyx_PyInt_FromDouble(double value) { + if (value >= (double)LONG_MIN && value <= (double)LONG_MAX) { + return PyInt_FromLong((long)value); + } + return PyLong_FromDouble(value); +} +#endif + + +/////////////// CIntFromPyVerify /////////////// + +// see CIntFromPy +#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value) \ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) + +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value) \ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) + +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc) \ + { \ + func_type value = func_value; \ + if (sizeof(target_type) < sizeof(func_type)) { \ + if (unlikely(value != (func_type) (target_type) value)) { \ + func_type zero = 0; \ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred())) \ + return (target_type) -1; \ + if (is_unsigned && unlikely(value < zero)) \ + goto raise_neg_overflow; \ + else \ + goto raise_overflow; \ + } \ + } \ + return (target_type) value; \ + } + + +/////////////// CIntFromPy.proto /////////////// + +static CYTHON_INLINE {{TYPE}} {{FROM_PY_FUNCTION}}(PyObject *); + +/////////////// CIntFromPy /////////////// +//@requires: CIntFromPyVerify + +{{py: from Cython.Utility import pylong_join }} + +static CYTHON_INLINE {{TYPE}} {{FROM_PY_FUNCTION}}(PyObject *x) { + const {{TYPE}} neg_one = ({{TYPE}}) (({{TYPE}}) 0 - ({{TYPE}}) 1), const_zero = ({{TYPE}}) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof({{TYPE}}) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT({{TYPE}}, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return ({{TYPE}}) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return ({{TYPE}}) 0; + case 1: __PYX_VERIFY_RETURN_INT({{TYPE}}, digit, digits[0]) + {{for _size in (2, 3, 4)}} + case {{_size}}: + if (8 * sizeof({{TYPE}}) > {{_size-1}} * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > {{_size}} * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT({{TYPE}}, unsigned long, {{pylong_join(_size, 'digits')}}) + } else if (8 * sizeof({{TYPE}}) >= {{_size}} * PyLong_SHIFT) { + return ({{TYPE}}) {{pylong_join(_size, 'digits', TYPE)}}; + } + } + break; + {{endfor}} + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + // misuse Py_False as a quick way to compare to a '0' int object in PyPy + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return ({{TYPE}}) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof({{TYPE}}) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC({{TYPE}}, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof({{TYPE}}) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC({{TYPE}}, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { + // signed +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return ({{TYPE}}) 0; + case -1: __PYX_VERIFY_RETURN_INT({{TYPE}}, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT({{TYPE}}, digit, +digits[0]) + {{for _size in (2, 3, 4)}} + {{for _case in (-_size, _size)}} + case {{_case}}: + if (8 * sizeof({{TYPE}}){{' - 1' if _case < 0 else ''}} > {{_size-1}} * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > {{_size}} * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT({{TYPE}}, {{'long' if _case < 0 else 'unsigned long'}}, {{'-(long) ' if _case < 0 else ''}}{{pylong_join(_size, 'digits')}}) + } else if (8 * sizeof({{TYPE}}) - 1 > {{_size}} * PyLong_SHIFT) { + return ({{TYPE}}) ({{'((%s)-1)*' % TYPE if _case < 0 else ''}}{{pylong_join(_size, 'digits', TYPE)}}); + } + } + break; + {{endfor}} + {{endfor}} + } +#endif + if (sizeof({{TYPE}}) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC({{TYPE}}, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof({{TYPE}}) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC({{TYPE}}, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + {{TYPE}} val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return ({{TYPE}}) -1; + } + } else { + {{TYPE}} val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return ({{TYPE}}) -1; + val = {{FROM_PY_FUNCTION}}(tmp); + Py_DECREF(tmp); + return val; + } + +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to {{TYPE}}"); + return ({{TYPE}}) -1; + +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to {{TYPE}}"); + return ({{TYPE}}) -1; +} diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/__init__.py b/venv/lib/python3.8/site-packages/Cython/Utility/__init__.py new file mode 100644 index 0000000..73ccc1e --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/__init__.py @@ -0,0 +1,29 @@ + +def pylong_join(count, digits_ptr='digits', join_type='unsigned long'): + """ + Generate an unrolled shift-then-or loop over the first 'count' digits. + Assumes that they fit into 'join_type'. + + (((d[2] << n) | d[1]) << n) | d[0] + """ + return ('(' * (count * 2) + ' | '.join( + "(%s)%s[%d])%s)" % (join_type, digits_ptr, _i, " << PyLong_SHIFT" if _i else '') + for _i in range(count-1, -1, -1))) + + +# although it could potentially make use of data independence, +# this implementation is a bit slower than the simpler one above +def _pylong_join(count, digits_ptr='digits', join_type='unsigned long'): + """ + Generate an or-ed series of shifts for the first 'count' digits. + Assumes that they fit into 'join_type'. + + (d[2] << 2*n) | (d[1] << 1*n) | d[0] + """ + def shift(n): + # avoid compiler warnings for overly large shifts that will be discarded anyway + return " << (%d * PyLong_SHIFT < 8 * sizeof(%s) ? %d * PyLong_SHIFT : 0)" % (n, join_type, n) if n else '' + + return '(%s)' % ' | '.join( + "(((%s)%s[%d])%s)" % (join_type, digits_ptr, i, shift(i)) + for i in range(count-1, -1, -1)) diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/Utility/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..d1cc6d2 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/Utility/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/Utility/arrayarray.h b/venv/lib/python3.8/site-packages/Cython/Utility/arrayarray.h new file mode 100644 index 0000000..61a883f --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utility/arrayarray.h @@ -0,0 +1,149 @@ +/////////////// ArrayAPI.proto /////////////// + +// arrayarray.h +// +// Artificial C-API for Python's type, +// used by array.pxd +// +// last changes: 2009-05-15 rk +// 2012-05-02 andreasvc +// (see revision control) +// + +#ifndef _ARRAYARRAY_H +#define _ARRAYARRAY_H + +// These two forward declarations are explicitly handled in the type +// declaration code, as including them here is too late for cython-defined +// types to use them. +// struct arrayobject; +// typedef struct arrayobject arrayobject; + +// All possible arraydescr values are defined in the vector "descriptors" +// below. That's defined later because the appropriate get and set +// functions aren't visible yet. +typedef struct arraydescr { + int typecode; + int itemsize; + PyObject * (*getitem)(struct arrayobject *, Py_ssize_t); + int (*setitem)(struct arrayobject *, Py_ssize_t, PyObject *); +#if PY_MAJOR_VERSION >= 3 + char *formats; +#endif +} arraydescr; + + +struct arrayobject { + PyObject_HEAD + Py_ssize_t ob_size; + union { + char *ob_item; + float *as_floats; + double *as_doubles; + int *as_ints; + unsigned int *as_uints; + unsigned char *as_uchars; + signed char *as_schars; + char *as_chars; + unsigned long *as_ulongs; + long *as_longs; +#if PY_MAJOR_VERSION >= 3 + unsigned long long *as_ulonglongs; + long long *as_longlongs; +#endif + short *as_shorts; + unsigned short *as_ushorts; + Py_UNICODE *as_pyunicodes; + void *as_voidptr; + } data; + Py_ssize_t allocated; + struct arraydescr *ob_descr; + PyObject *weakreflist; /* List of weak references */ +#if PY_MAJOR_VERSION >= 3 + int ob_exports; /* Number of exported buffers */ +#endif +}; + +#ifndef NO_NEWARRAY_INLINE +// fast creation of a new array +static CYTHON_INLINE PyObject * newarrayobject(PyTypeObject *type, Py_ssize_t size, + struct arraydescr *descr) { + arrayobject *op; + size_t nbytes; + + if (size < 0) { + PyErr_BadInternalCall(); + return NULL; + } + + nbytes = size * descr->itemsize; + // Check for overflow + if (nbytes / descr->itemsize != (size_t)size) { + return PyErr_NoMemory(); + } + op = (arrayobject *) type->tp_alloc(type, 0); + if (op == NULL) { + return NULL; + } + op->ob_descr = descr; + op->allocated = size; + op->weakreflist = NULL; + op->ob_size = size; + if (size <= 0) { + op->data.ob_item = NULL; + } + else { + op->data.ob_item = PyMem_NEW(char, nbytes); + if (op->data.ob_item == NULL) { + Py_DECREF(op); + return PyErr_NoMemory(); + } + } + return (PyObject *) op; +} +#else +PyObject* newarrayobject(PyTypeObject *type, Py_ssize_t size, + struct arraydescr *descr); +#endif /* ifndef NO_NEWARRAY_INLINE */ + +// fast resize (reallocation to the point) +// not designed for filing small increments (but for fast opaque array apps) +static CYTHON_INLINE int resize(arrayobject *self, Py_ssize_t n) { + void *items = (void*) self->data.ob_item; + PyMem_Resize(items, char, (size_t)(n * self->ob_descr->itemsize)); + if (items == NULL) { + PyErr_NoMemory(); + return -1; + } + self->data.ob_item = (char*) items; + self->ob_size = n; + self->allocated = n; + return 0; +} + +// suitable for small increments; over allocation 50% ; +static CYTHON_INLINE int resize_smart(arrayobject *self, Py_ssize_t n) { + void *items = (void*) self->data.ob_item; + Py_ssize_t newsize; + if (n < self->allocated && n*4 > self->allocated) { + self->ob_size = n; + return 0; + } + newsize = n + (n / 2) + 1; + if (newsize <= n) { /* overflow */ + PyErr_NoMemory(); + return -1; + } + PyMem_Resize(items, char, (size_t)(newsize * self->ob_descr->itemsize)); + if (items == NULL) { + PyErr_NoMemory(); + return -1; + } + self->data.ob_item = (char*) items; + self->ob_size = n; + self->allocated = newsize; + return 0; +} + +#endif +/* _ARRAYARRAY_H */ diff --git a/venv/lib/python3.8/site-packages/Cython/Utils.py b/venv/lib/python3.8/site-packages/Cython/Utils.py new file mode 100644 index 0000000..83b2988 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/Utils.py @@ -0,0 +1,464 @@ +# +# Cython -- Things that don't belong +# anywhere else in particular +# + +from __future__ import absolute_import + +try: + from __builtin__ import basestring +except ImportError: + basestring = str + +try: + FileNotFoundError +except NameError: + FileNotFoundError = OSError + +import os +import sys +import re +import io +import codecs +import shutil +from contextlib import contextmanager + +modification_time = os.path.getmtime + +_function_caches = [] +def clear_function_caches(): + for cache in _function_caches: + cache.clear() + +def cached_function(f): + cache = {} + _function_caches.append(cache) + uncomputed = object() + def wrapper(*args): + res = cache.get(args, uncomputed) + if res is uncomputed: + res = cache[args] = f(*args) + return res + wrapper.uncached = f + return wrapper + +def cached_method(f): + cache_name = '__%s_cache' % f.__name__ + def wrapper(self, *args): + cache = getattr(self, cache_name, None) + if cache is None: + cache = {} + setattr(self, cache_name, cache) + if args in cache: + return cache[args] + res = cache[args] = f(self, *args) + return res + return wrapper + +def replace_suffix(path, newsuf): + base, _ = os.path.splitext(path) + return base + newsuf + + +def open_new_file(path): + if os.path.exists(path): + # Make sure to create a new file here so we can + # safely hard link the output files. + os.unlink(path) + + # we use the ISO-8859-1 encoding here because we only write pure + # ASCII strings or (e.g. for file names) byte encoded strings as + # Unicode, so we need a direct mapping from the first 256 Unicode + # characters to a byte sequence, which ISO-8859-1 provides + + # note: can't use io.open() in Py2 as we may be writing str objects + return codecs.open(path, "w", encoding="ISO-8859-1") + + +def castrate_file(path, st): + # Remove junk contents from an output file after a + # failed compilation. + # Also sets access and modification times back to + # those specified by st (a stat struct). + try: + f = open_new_file(path) + except EnvironmentError: + pass + else: + f.write( + "#error Do not use this file, it is the result of a failed Cython compilation.\n") + f.close() + if st: + os.utime(path, (st.st_atime, st.st_mtime-1)) + +def file_newer_than(path, time): + ftime = modification_time(path) + return ftime > time + + +def safe_makedirs(path): + try: + os.makedirs(path) + except OSError: + if not os.path.isdir(path): + raise + + +def copy_file_to_dir_if_newer(sourcefile, destdir): + """ + Copy file sourcefile to directory destdir (creating it if needed), + preserving metadata. If the destination file exists and is not + older than the source file, the copying is skipped. + """ + destfile = os.path.join(destdir, os.path.basename(sourcefile)) + try: + desttime = modification_time(destfile) + except OSError: + # New file does not exist, destdir may or may not exist + safe_makedirs(destdir) + else: + # New file already exists + if not file_newer_than(sourcefile, desttime): + return + shutil.copy2(sourcefile, destfile) + + +@cached_function +def find_root_package_dir(file_path): + dir = os.path.dirname(file_path) + if file_path == dir: + return dir + elif is_package_dir(dir): + return find_root_package_dir(dir) + else: + return dir + +@cached_function +def check_package_dir(dir, package_names): + for dirname in package_names: + dir = os.path.join(dir, dirname) + if not is_package_dir(dir): + return None + return dir + +@cached_function +def is_package_dir(dir_path): + for filename in ("__init__.py", + "__init__.pyc", + "__init__.pyx", + "__init__.pxd"): + path = os.path.join(dir_path, filename) + if path_exists(path): + return 1 + +@cached_function +def path_exists(path): + # try on the filesystem first + if os.path.exists(path): + return True + # figure out if a PEP 302 loader is around + try: + loader = __loader__ + # XXX the code below assumes a 'zipimport.zipimporter' instance + # XXX should be easy to generalize, but too lazy right now to write it + archive_path = getattr(loader, 'archive', None) + if archive_path: + normpath = os.path.normpath(path) + if normpath.startswith(archive_path): + arcname = normpath[len(archive_path)+1:] + try: + loader.get_data(arcname) + return True + except IOError: + return False + except NameError: + pass + return False + +# file name encodings + +def decode_filename(filename): + if isinstance(filename, bytes): + try: + filename_encoding = sys.getfilesystemencoding() + if filename_encoding is None: + filename_encoding = sys.getdefaultencoding() + filename = filename.decode(filename_encoding) + except UnicodeDecodeError: + pass + return filename + +# support for source file encoding detection + +_match_file_encoding = re.compile(br"(\w*coding)[:=]\s*([-\w.]+)").search + + +def detect_opened_file_encoding(f): + # PEPs 263 and 3120 + # Most of the time the first two lines fall in the first couple of hundred chars, + # and this bulk read/split is much faster. + lines = () + start = b'' + while len(lines) < 3: + data = f.read(500) + start += data + lines = start.split(b"\n") + if not data: + break + m = _match_file_encoding(lines[0]) + if m and m.group(1) != b'c_string_encoding': + return m.group(2).decode('iso8859-1') + elif len(lines) > 1: + m = _match_file_encoding(lines[1]) + if m: + return m.group(2).decode('iso8859-1') + return "UTF-8" + + +def skip_bom(f): + """ + Read past a BOM at the beginning of a source file. + This could be added to the scanner, but it's *substantially* easier + to keep it at this level. + """ + if f.read(1) != u'\uFEFF': + f.seek(0) + + +def open_source_file(source_filename, encoding=None, error_handling=None): + stream = None + try: + if encoding is None: + # Most of the time the encoding is not specified, so try hard to open the file only once. + f = io.open(source_filename, 'rb') + encoding = detect_opened_file_encoding(f) + f.seek(0) + stream = io.TextIOWrapper(f, encoding=encoding, errors=error_handling) + else: + stream = io.open(source_filename, encoding=encoding, errors=error_handling) + + except OSError: + if os.path.exists(source_filename): + raise # File is there, but something went wrong reading from it. + # Allow source files to be in zip files etc. + try: + loader = __loader__ + if source_filename.startswith(loader.archive): + stream = open_source_from_loader( + loader, source_filename, + encoding, error_handling) + except (NameError, AttributeError): + pass + + if stream is None: + raise FileNotFoundError(source_filename) + skip_bom(stream) + return stream + + +def open_source_from_loader(loader, + source_filename, + encoding=None, error_handling=None): + nrmpath = os.path.normpath(source_filename) + arcname = nrmpath[len(loader.archive)+1:] + data = loader.get_data(arcname) + return io.TextIOWrapper(io.BytesIO(data), + encoding=encoding, + errors=error_handling) + + +def str_to_number(value): + # note: this expects a string as input that was accepted by the + # parser already, with an optional "-" sign in front + is_neg = False + if value[:1] == '-': + is_neg = True + value = value[1:] + if len(value) < 2: + value = int(value, 0) + elif value[0] == '0': + literal_type = value[1] # 0'o' - 0'b' - 0'x' + if literal_type in 'xX': + # hex notation ('0x1AF') + value = int(value[2:], 16) + elif literal_type in 'oO': + # Py3 octal notation ('0o136') + value = int(value[2:], 8) + elif literal_type in 'bB': + # Py3 binary notation ('0b101') + value = int(value[2:], 2) + else: + # Py2 octal notation ('0136') + value = int(value, 8) + else: + value = int(value, 0) + return -value if is_neg else value + + +def long_literal(value): + if isinstance(value, basestring): + value = str_to_number(value) + return not -2**31 <= value < 2**31 + + +@cached_function +def get_cython_cache_dir(): + r""" + Return the base directory containing Cython's caches. + + Priority: + + 1. CYTHON_CACHE_DIR + 2. (OS X): ~/Library/Caches/Cython + (posix not OS X): XDG_CACHE_HOME/cython if XDG_CACHE_HOME defined + 3. ~/.cython + + """ + if 'CYTHON_CACHE_DIR' in os.environ: + return os.environ['CYTHON_CACHE_DIR'] + + parent = None + if os.name == 'posix': + if sys.platform == 'darwin': + parent = os.path.expanduser('~/Library/Caches') + else: + # this could fallback on ~/.cache + parent = os.environ.get('XDG_CACHE_HOME') + + if parent and os.path.isdir(parent): + return os.path.join(parent, 'cython') + + # last fallback: ~/.cython + return os.path.expanduser(os.path.join('~', '.cython')) + + +@contextmanager +def captured_fd(stream=2, encoding=None): + pipe_in = t = None + orig_stream = os.dup(stream) # keep copy of original stream + try: + pipe_in, pipe_out = os.pipe() + os.dup2(pipe_out, stream) # replace stream by copy of pipe + try: + os.close(pipe_out) # close original pipe-out stream + data = [] + + def copy(): + try: + while True: + d = os.read(pipe_in, 1000) + if d: + data.append(d) + else: + break + finally: + os.close(pipe_in) + + def get_output(): + output = b''.join(data) + if encoding: + output = output.decode(encoding) + return output + + from threading import Thread + t = Thread(target=copy) + t.daemon = True # just in case + t.start() + yield get_output + finally: + os.dup2(orig_stream, stream) # restore original stream + if t is not None: + t.join() + finally: + os.close(orig_stream) + + +def print_bytes(s, header_text=None, end=b'\n', file=sys.stdout, flush=True): + if header_text: + file.write(header_text) # note: text! => file.write() instead of out.write() + file.flush() + try: + out = file.buffer # Py3 + except AttributeError: + out = file # Py2 + out.write(s) + if end: + out.write(end) + if flush: + out.flush() + +class LazyStr: + def __init__(self, callback): + self.callback = callback + def __str__(self): + return self.callback() + def __repr__(self): + return self.callback() + def __add__(self, right): + return self.callback() + right + def __radd__(self, left): + return left + self.callback() + + +class OrderedSet(object): + def __init__(self, elements=()): + self._list = [] + self._set = set() + self.update(elements) + def __iter__(self): + return iter(self._list) + def update(self, elements): + for e in elements: + self.add(e) + def add(self, e): + if e not in self._set: + self._list.append(e) + self._set.add(e) + + +# Class decorator that adds a metaclass and recreates the class with it. +# Copied from 'six'. +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + + +def raise_error_if_module_name_forbidden(full_module_name): + #it is bad idea to call the pyx-file cython.pyx, so fail early + if full_module_name == 'cython' or full_module_name.startswith('cython.'): + raise ValueError('cython is a special module, cannot be used as a module name') + + +def build_hex_version(version_string): + """ + Parse and translate '4.3a1' into the readable hex representation '0x040300A1' (like PY_VERSION_HEX). + """ + # First, parse '4.12a1' into [4, 12, 0, 0xA01]. + digits = [] + release_status = 0xF0 + for digit in re.split('([.abrc]+)', version_string): + if digit in ('a', 'b', 'rc'): + release_status = {'a': 0xA0, 'b': 0xB0, 'rc': 0xC0}[digit] + digits = (digits + [0, 0])[:3] # 1.2a1 -> 1.2.0a1 + elif digit != '.': + digits.append(int(digit)) + digits = (digits + [0] * 3)[:4] + digits[3] += release_status + + # Then, build a single hex value, two hex digits per version part. + hexversion = 0 + for digit in digits: + hexversion = (hexversion << 8) + digit + + return '0x%08X' % hexversion diff --git a/venv/lib/python3.8/site-packages/Cython/__init__.py b/venv/lib/python3.8/site-packages/Cython/__init__.py new file mode 100644 index 0000000..549246b --- /dev/null +++ b/venv/lib/python3.8/site-packages/Cython/__init__.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from .Shadow import __version__ + +# Void cython.* directives (for case insensitive operating systems). +from .Shadow import * + + +def load_ipython_extension(ip): + """Load the extension in IPython.""" + from .Build.IpythonMagic import CythonMagics # pylint: disable=cyclic-import + ip.register_magics(CythonMagics) diff --git a/venv/lib/python3.8/site-packages/Cython/__pycache__/CodeWriter.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/__pycache__/CodeWriter.cpython-38.pyc new file mode 100644 index 0000000..89e86d2 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/__pycache__/CodeWriter.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/__pycache__/Coverage.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/__pycache__/Coverage.cpython-38.pyc new file mode 100644 index 0000000..09c43c7 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/__pycache__/Coverage.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/__pycache__/Debugging.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/__pycache__/Debugging.cpython-38.pyc new file mode 100644 index 0000000..a74b443 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/__pycache__/Debugging.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/__pycache__/Shadow.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/__pycache__/Shadow.cpython-38.pyc new file mode 100644 index 0000000..66b96cc Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/__pycache__/Shadow.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/__pycache__/StringIOTree.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/__pycache__/StringIOTree.cpython-38.pyc new file mode 100644 index 0000000..3535f1b Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/__pycache__/StringIOTree.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/__pycache__/TestUtils.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/__pycache__/TestUtils.cpython-38.pyc new file mode 100644 index 0000000..6a87902 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/__pycache__/TestUtils.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/__pycache__/Utils.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/__pycache__/Utils.cpython-38.pyc new file mode 100644 index 0000000..f3025b4 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/__pycache__/Utils.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Cython/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/Cython/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..632afc9 Binary files /dev/null and b/venv/lib/python3.8/site-packages/Cython/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/LICENSE.rst b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/LICENSE.rst new file mode 100644 index 0000000..c37cae4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2007 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/METADATA b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/METADATA new file mode 100644 index 0000000..f54bb5c --- /dev/null +++ b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/METADATA @@ -0,0 +1,113 @@ +Metadata-Version: 2.1 +Name: Jinja2 +Version: 3.1.2 +Summary: A very fast and expressive template engine. +Home-page: https://palletsprojects.com/p/jinja/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://jinja.palletsprojects.com/ +Project-URL: Changes, https://jinja.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/jinja/ +Project-URL: Issue Tracker, https://github.com/pallets/jinja/issues/ +Project-URL: Twitter, https://twitter.com/PalletsTeam +Project-URL: Chat, https://discord.gg/pallets +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Text Processing :: Markup :: HTML +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst +Requires-Dist: MarkupSafe (>=2.0) +Provides-Extra: i18n +Requires-Dist: Babel (>=2.7) ; extra == 'i18n' + +Jinja +===== + +Jinja is a fast, expressive, extensible templating engine. Special +placeholders in the template allow writing code similar to Python +syntax. Then the template is passed data to render the final document. + +It includes: + +- Template inheritance and inclusion. +- Define and import macros within templates. +- HTML templates can use autoescaping to prevent XSS from untrusted + user input. +- A sandboxed environment can safely render untrusted templates. +- AsyncIO support for generating templates and calling async + functions. +- I18N support with Babel. +- Templates are compiled to optimized Python code just-in-time and + cached, or can be compiled ahead-of-time. +- Exceptions point to the correct line in templates to make debugging + easier. +- Extensible filters, tests, functions, and even syntax. + +Jinja's philosophy is that while application logic belongs in Python if +possible, it shouldn't make the template designer's job difficult by +restricting functionality too much. + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + $ pip install -U Jinja2 + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +In A Nutshell +------------- + +.. code-block:: jinja + + {% extends "base.html" %} + {% block title %}Members{% endblock %} + {% block content %} + + {% endblock %} + + +Donate +------ + +The Pallets organization develops and supports Jinja and other popular +packages. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, `please +donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://jinja.palletsprojects.com/ +- Changes: https://jinja.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/Jinja2/ +- Source Code: https://github.com/pallets/jinja/ +- Issue Tracker: https://github.com/pallets/jinja/issues/ +- Website: https://palletsprojects.com/p/jinja/ +- Twitter: https://twitter.com/PalletsTeam +- Chat: https://discord.gg/pallets + + diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/RECORD b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/RECORD new file mode 100644 index 0000000..885aec0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/RECORD @@ -0,0 +1,59 @@ +Jinja2-3.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Jinja2-3.1.2.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 +Jinja2-3.1.2.dist-info/METADATA,sha256=PZ6v2SIidMNixR7MRUX9f7ZWsPwtXanknqiZUmRbh4U,3539 +Jinja2-3.1.2.dist-info/RECORD,, +Jinja2-3.1.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +Jinja2-3.1.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +Jinja2-3.1.2.dist-info/entry_points.txt,sha256=zRd62fbqIyfUpsRtU7EVIFyiu1tPwfgO7EvPErnxgTE,59 +Jinja2-3.1.2.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7 +jinja2/__init__.py,sha256=8vGduD8ytwgD6GDSqpYc2m3aU-T7PKOAddvVXgGr_Fs,1927 +jinja2/__pycache__/__init__.cpython-38.pyc,, +jinja2/__pycache__/_identifier.cpython-38.pyc,, +jinja2/__pycache__/async_utils.cpython-38.pyc,, +jinja2/__pycache__/bccache.cpython-38.pyc,, +jinja2/__pycache__/compiler.cpython-38.pyc,, +jinja2/__pycache__/constants.cpython-38.pyc,, +jinja2/__pycache__/debug.cpython-38.pyc,, +jinja2/__pycache__/defaults.cpython-38.pyc,, +jinja2/__pycache__/environment.cpython-38.pyc,, +jinja2/__pycache__/exceptions.cpython-38.pyc,, +jinja2/__pycache__/ext.cpython-38.pyc,, +jinja2/__pycache__/filters.cpython-38.pyc,, +jinja2/__pycache__/idtracking.cpython-38.pyc,, +jinja2/__pycache__/lexer.cpython-38.pyc,, +jinja2/__pycache__/loaders.cpython-38.pyc,, +jinja2/__pycache__/meta.cpython-38.pyc,, +jinja2/__pycache__/nativetypes.cpython-38.pyc,, +jinja2/__pycache__/nodes.cpython-38.pyc,, +jinja2/__pycache__/optimizer.cpython-38.pyc,, +jinja2/__pycache__/parser.cpython-38.pyc,, +jinja2/__pycache__/runtime.cpython-38.pyc,, +jinja2/__pycache__/sandbox.cpython-38.pyc,, +jinja2/__pycache__/tests.cpython-38.pyc,, +jinja2/__pycache__/utils.cpython-38.pyc,, +jinja2/__pycache__/visitor.cpython-38.pyc,, +jinja2/_identifier.py,sha256=_zYctNKzRqlk_murTNlzrju1FFJL7Va_Ijqqd7ii2lU,1958 +jinja2/async_utils.py,sha256=dHlbTeaxFPtAOQEYOGYh_PHcDT0rsDaUJAFDl_0XtTg,2472 +jinja2/bccache.py,sha256=mhz5xtLxCcHRAa56azOhphIAe19u1we0ojifNMClDio,14061 +jinja2/compiler.py,sha256=Gs-N8ThJ7OWK4-reKoO8Wh1ZXz95MVphBKNVf75qBr8,72172 +jinja2/constants.py,sha256=GMoFydBF_kdpaRKPoM5cl5MviquVRLVyZtfp5-16jg0,1433 +jinja2/debug.py,sha256=iWJ432RadxJNnaMOPrjIDInz50UEgni3_HKuFXi2vuQ,6299 +jinja2/defaults.py,sha256=boBcSw78h-lp20YbaXSJsqkAI2uN_mD_TtCydpeq5wU,1267 +jinja2/environment.py,sha256=6uHIcc7ZblqOMdx_uYNKqRnnwAF0_nzbyeMP9FFtuh4,61349 +jinja2/exceptions.py,sha256=ioHeHrWwCWNaXX1inHmHVblvc4haO7AXsjCp3GfWvx0,5071 +jinja2/ext.py,sha256=ivr3P7LKbddiXDVez20EflcO3q2aHQwz9P_PgWGHVqE,31502 +jinja2/filters.py,sha256=9js1V-h2RlyW90IhLiBGLM2U-k6SCy2F4BUUMgB3K9Q,53509 +jinja2/idtracking.py,sha256=GfNmadir4oDALVxzn3DL9YInhJDr69ebXeA2ygfuCGA,10704 +jinja2/lexer.py,sha256=DW2nX9zk-6MWp65YR2bqqj0xqCvLtD-u9NWT8AnFRxQ,29726 +jinja2/loaders.py,sha256=BfptfvTVpClUd-leMkHczdyPNYFzp_n7PKOJ98iyHOg,23207 +jinja2/meta.py,sha256=GNPEvifmSaU3CMxlbheBOZjeZ277HThOPUTf1RkppKQ,4396 +jinja2/nativetypes.py,sha256=DXgORDPRmVWgy034H0xL8eF7qYoK3DrMxs-935d0Fzk,4226 +jinja2/nodes.py,sha256=i34GPRAZexXMT6bwuf5SEyvdmS-bRCy9KMjwN5O6pjk,34550 +jinja2/optimizer.py,sha256=tHkMwXxfZkbfA1KmLcqmBMSaz7RLIvvItrJcPoXTyD8,1650 +jinja2/parser.py,sha256=nHd-DFHbiygvfaPtm9rcQXJChZG7DPsWfiEsqfwKerY,39595 +jinja2/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +jinja2/runtime.py,sha256=5CmD5BjbEJxSiDNTFBeKCaq8qU4aYD2v6q2EluyExms,33476 +jinja2/sandbox.py,sha256=Y0xZeXQnH6EX5VjaV2YixESxoepnRbW_3UeQosaBU3M,14584 +jinja2/tests.py,sha256=Am5Z6Lmfr2XaH_npIfJJ8MdXtWsbLjMULZJulTAj30E,5905 +jinja2/utils.py,sha256=u9jXESxGn8ATZNVolwmkjUVu4SA-tLgV0W7PcSfPfdQ,23965 +jinja2/visitor.py,sha256=MH14C6yq24G_KVtWzjwaI7Wg14PCJIYlWW1kpkxYak0,3568 diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/REQUESTED b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/WHEEL b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/WHEEL new file mode 100644 index 0000000..becc9a6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/entry_points.txt b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/entry_points.txt new file mode 100644 index 0000000..7b9666c --- /dev/null +++ b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[babel.extractors] +jinja2 = jinja2.ext:babel_extract[i18n] diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/top_level.txt new file mode 100644 index 0000000..7f7afbf --- /dev/null +++ b/venv/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/top_level.txt @@ -0,0 +1 @@ +jinja2 diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/LICENSE.rst b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/LICENSE.rst new file mode 100644 index 0000000..9d227a0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2010 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/METADATA b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/METADATA new file mode 100644 index 0000000..485a5e0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/METADATA @@ -0,0 +1,101 @@ +Metadata-Version: 2.1 +Name: MarkupSafe +Version: 2.1.1 +Summary: Safely add untrusted strings to HTML/XML markup. +Home-page: https://palletsprojects.com/p/markupsafe/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://markupsafe.palletsprojects.com/ +Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/markupsafe/ +Project-URL: Issue Tracker, https://github.com/pallets/markupsafe/issues/ +Project-URL: Twitter, https://twitter.com/PalletsTeam +Project-URL: Chat, https://discord.gg/pallets +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Text Processing :: Markup :: HTML +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst + +MarkupSafe +========== + +MarkupSafe implements a text object that escapes characters so it is +safe to use in HTML and XML. Characters that have special meanings are +replaced so that they display as the actual characters. This mitigates +injection attacks, meaning untrusted user input can safely be displayed +on a page. + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + pip install -U MarkupSafe + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +Examples +-------- + +.. code-block:: pycon + + >>> from markupsafe import Markup, escape + + >>> # escape replaces special characters and wraps in Markup + >>> escape("") + Markup('<script>alert(document.cookie);</script>') + + >>> # wrap in Markup to mark text "safe" and prevent escaping + >>> Markup("Hello") + Markup('hello') + + >>> escape(Markup("Hello")) + Markup('hello') + + >>> # Markup is a str subclass + >>> # methods and operators escape their arguments + >>> template = Markup("Hello {name}") + >>> template.format(name='"World"') + Markup('Hello "World"') + + +Donate +------ + +The Pallets organization develops and supports MarkupSafe and other +popular packages. In order to grow the community of contributors and +users, and allow the maintainers to devote more time to the projects, +`please donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://markupsafe.palletsprojects.com/ +- Changes: https://markupsafe.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/MarkupSafe/ +- Source Code: https://github.com/pallets/markupsafe/ +- Issue Tracker: https://github.com/pallets/markupsafe/issues/ +- Website: https://palletsprojects.com/p/markupsafe/ +- Twitter: https://twitter.com/PalletsTeam +- Chat: https://discord.gg/pallets + + diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/RECORD b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/RECORD new file mode 100644 index 0000000..6f34462 --- /dev/null +++ b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/RECORD @@ -0,0 +1,14 @@ +MarkupSafe-2.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +MarkupSafe-2.1.1.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 +MarkupSafe-2.1.1.dist-info/METADATA,sha256=DC93VszmzjLQcrVChRUjtW4XbUwjTdbaplpgdlbFdbs,3242 +MarkupSafe-2.1.1.dist-info/RECORD,, +MarkupSafe-2.1.1.dist-info/WHEEL,sha256=paN2rHE-sLfyg0Z4YvQnentMRWXxZnkclRDH8E5J6qk,148 +MarkupSafe-2.1.1.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11 +markupsafe/__init__.py,sha256=xfaUQkKNRTdYWe6HnnJ2HjguFmS-C_0H6g8-Q9VAfkQ,9284 +markupsafe/__pycache__/__init__.cpython-38.pyc,, +markupsafe/__pycache__/_native.cpython-38.pyc,, +markupsafe/_native.py,sha256=GR86Qvo_GcgKmKreA1WmYN9ud17OFwkww8E-fiW-57s,1713 +markupsafe/_speedups.c,sha256=X2XvQVtIdcK4Usz70BvkzoOfjTCmQlDkkjYSn-swE0g,7083 +markupsafe/_speedups.cpython-38-x86_64-linux-gnu.so,sha256=gBmi2f9vNFVvJs2gdtjYKwK0tIgrxEqVUMbyL-1roRo,45008 +markupsafe/_speedups.pyi,sha256=vfMCsOgbAXRNLUXkyuyonG8uEWKYU4PDqNuMaDELAYw,229 +markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/WHEEL b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/WHEEL new file mode 100644 index 0000000..32bdea0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: false +Tag: cp38-cp38-manylinux_2_17_x86_64 +Tag: cp38-cp38-manylinux2014_x86_64 + diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/top_level.txt new file mode 100644 index 0000000..75bf729 --- /dev/null +++ b/venv/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/top_level.txt @@ -0,0 +1 @@ +markupsafe diff --git a/venv/lib/python3.8/site-packages/__pycache__/appdirs.cpython-38.pyc b/venv/lib/python3.8/site-packages/__pycache__/appdirs.cpython-38.pyc new file mode 100644 index 0000000..6b5b7d7 Binary files /dev/null and b/venv/lib/python3.8/site-packages/__pycache__/appdirs.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/__pycache__/cython.cpython-38.pyc b/venv/lib/python3.8/site-packages/__pycache__/cython.cpython-38.pyc new file mode 100644 index 0000000..949cff0 Binary files /dev/null and b/venv/lib/python3.8/site-packages/__pycache__/cython.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/__pycache__/sh.cpython-38.pyc b/venv/lib/python3.8/site-packages/__pycache__/sh.cpython-38.pyc new file mode 100644 index 0000000..9687104 Binary files /dev/null and b/venv/lib/python3.8/site-packages/__pycache__/sh.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/__pycache__/six.cpython-38.pyc b/venv/lib/python3.8/site-packages/__pycache__/six.cpython-38.pyc new file mode 100644 index 0000000..c5acde1 Binary files /dev/null and b/venv/lib/python3.8/site-packages/__pycache__/six.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/_distutils_hack/__init__.py b/venv/lib/python3.8/site-packages/_distutils_hack/__init__.py new file mode 100644 index 0000000..47ce249 --- /dev/null +++ b/venv/lib/python3.8/site-packages/_distutils_hack/__init__.py @@ -0,0 +1,128 @@ +import sys +import os +import re +import importlib +import warnings + + +is_pypy = '__pypy__' in sys.builtin_module_names + + +warnings.filterwarnings('ignore', + '.+ distutils .+ deprecated', + DeprecationWarning) + + +def warn_distutils_present(): + if 'distutils' not in sys.modules: + return + if is_pypy and sys.version_info < (3, 7): + # PyPy for 3.6 unconditionally imports distutils, so bypass the warning + # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250 + return + warnings.warn( + "Distutils was imported before Setuptools, but importing Setuptools " + "also replaces the `distutils` module in `sys.modules`. This may lead " + "to undesirable behaviors or errors. To avoid these issues, avoid " + "using distutils directly, ensure that setuptools is installed in the " + "traditional way (e.g. not an editable install), and/or make sure " + "that setuptools is always imported before distutils.") + + +def clear_distutils(): + if 'distutils' not in sys.modules: + return + warnings.warn("Setuptools is replacing distutils.") + mods = [name for name in sys.modules if re.match(r'distutils\b', name)] + for name in mods: + del sys.modules[name] + + +def enabled(): + """ + Allow selection of distutils by environment variable. + """ + which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'stdlib') + return which == 'local' + + +def ensure_local_distutils(): + clear_distutils() + distutils = importlib.import_module('setuptools._distutils') + distutils.__name__ = 'distutils' + sys.modules['distutils'] = distutils + + # sanity check that submodules load as expected + core = importlib.import_module('distutils.core') + assert '_distutils' in core.__file__, core.__file__ + + +def do_override(): + """ + Ensure that the local copy of distutils is preferred over stdlib. + + See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401 + for more motivation. + """ + if enabled(): + warn_distutils_present() + ensure_local_distutils() + + +class DistutilsMetaFinder: + def find_spec(self, fullname, path, target=None): + if path is not None: + return + + method_name = 'spec_for_{fullname}'.format(**locals()) + method = getattr(self, method_name, lambda: None) + return method() + + def spec_for_distutils(self): + import importlib.abc + import importlib.util + + class DistutilsLoader(importlib.abc.Loader): + + def create_module(self, spec): + return importlib.import_module('setuptools._distutils') + + def exec_module(self, module): + pass + + return importlib.util.spec_from_loader('distutils', DistutilsLoader()) + + def spec_for_pip(self): + """ + Ensure stdlib distutils when running under pip. + See pypa/pip#8761 for rationale. + """ + if self.pip_imported_during_build(): + return + clear_distutils() + self.spec_for_distutils = lambda: None + + @staticmethod + def pip_imported_during_build(): + """ + Detect if pip is being imported in a build script. Ref #2355. + """ + import traceback + return any( + frame.f_globals['__file__'].endswith('setup.py') + for frame, line in traceback.walk_stack(None) + ) + + +DISTUTILS_FINDER = DistutilsMetaFinder() + + +def add_shim(): + sys.meta_path.insert(0, DISTUTILS_FINDER) + + +def remove_shim(): + try: + sys.meta_path.remove(DISTUTILS_FINDER) + except ValueError: + pass diff --git a/venv/lib/python3.8/site-packages/_distutils_hack/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/_distutils_hack/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..699057a Binary files /dev/null and b/venv/lib/python3.8/site-packages/_distutils_hack/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/_distutils_hack/__pycache__/override.cpython-38.pyc b/venv/lib/python3.8/site-packages/_distutils_hack/__pycache__/override.cpython-38.pyc new file mode 100644 index 0000000..2ea4f49 Binary files /dev/null and b/venv/lib/python3.8/site-packages/_distutils_hack/__pycache__/override.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/_distutils_hack/override.py b/venv/lib/python3.8/site-packages/_distutils_hack/override.py new file mode 100644 index 0000000..2cc433a --- /dev/null +++ b/venv/lib/python3.8/site-packages/_distutils_hack/override.py @@ -0,0 +1 @@ +__import__('_distutils_hack').do_override() diff --git a/venv/lib/python3.8/site-packages/appdirs-1.4.4.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/appdirs-1.4.4.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/appdirs-1.4.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/appdirs-1.4.4.dist-info/LICENSE.txt b/venv/lib/python3.8/site-packages/appdirs-1.4.4.dist-info/LICENSE.txt new file mode 100644 index 0000000..107c614 --- /dev/null +++ b/venv/lib/python3.8/site-packages/appdirs-1.4.4.dist-info/LICENSE.txt @@ -0,0 +1,23 @@ +# This is the MIT license + +Copyright (c) 2010 ActiveState Software Inc. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/venv/lib/python3.8/site-packages/appdirs-1.4.4.dist-info/METADATA b/venv/lib/python3.8/site-packages/appdirs-1.4.4.dist-info/METADATA new file mode 100644 index 0000000..f950731 --- /dev/null +++ b/venv/lib/python3.8/site-packages/appdirs-1.4.4.dist-info/METADATA @@ -0,0 +1,264 @@ +Metadata-Version: 2.1 +Name: appdirs +Version: 1.4.4 +Summary: A small Python module for determining appropriate platform-specific dirs, e.g. a "user data dir". +Home-page: http://github.com/ActiveState/appdirs +Author: Trent Mick +Author-email: trentm@gmail.com +Maintainer: Jeff Rouse +Maintainer-email: jr@its.to +License: MIT +Keywords: application directory log cache user +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Topic :: Software Development :: Libraries :: Python Modules + + +.. image:: https://secure.travis-ci.org/ActiveState/appdirs.png + :target: http://travis-ci.org/ActiveState/appdirs + +the problem +=========== + +What directory should your app use for storing user data? If running on Mac OS X, you +should use:: + + ~/Library/Application Support/ + +If on Windows (at least English Win XP) that should be:: + + C:\Documents and Settings\\Application Data\Local Settings\\ + +or possibly:: + + C:\Documents and Settings\\Application Data\\ + +for `roaming profiles `_ but that is another story. + +On Linux (and other Unices) the dir, according to the `XDG +spec `_, is:: + + ~/.local/share/ + + +``appdirs`` to the rescue +========================= + +This kind of thing is what the ``appdirs`` module is for. ``appdirs`` will +help you choose an appropriate: + +- user data dir (``user_data_dir``) +- user config dir (``user_config_dir``) +- user cache dir (``user_cache_dir``) +- site data dir (``site_data_dir``) +- site config dir (``site_config_dir``) +- user log dir (``user_log_dir``) + +and also: + +- is a single module so other Python packages can include their own private copy +- is slightly opinionated on the directory names used. Look for "OPINION" in + documentation and code for when an opinion is being applied. + + +some example output +=================== + +On Mac OS X:: + + >>> from appdirs import * + >>> appname = "SuperApp" + >>> appauthor = "Acme" + >>> user_data_dir(appname, appauthor) + '/Users/trentm/Library/Application Support/SuperApp' + >>> site_data_dir(appname, appauthor) + '/Library/Application Support/SuperApp' + >>> user_cache_dir(appname, appauthor) + '/Users/trentm/Library/Caches/SuperApp' + >>> user_log_dir(appname, appauthor) + '/Users/trentm/Library/Logs/SuperApp' + +On Windows 7:: + + >>> from appdirs import * + >>> appname = "SuperApp" + >>> appauthor = "Acme" + >>> user_data_dir(appname, appauthor) + 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp' + >>> user_data_dir(appname, appauthor, roaming=True) + 'C:\\Users\\trentm\\AppData\\Roaming\\Acme\\SuperApp' + >>> user_cache_dir(appname, appauthor) + 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Cache' + >>> user_log_dir(appname, appauthor) + 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Logs' + +On Linux:: + + >>> from appdirs import * + >>> appname = "SuperApp" + >>> appauthor = "Acme" + >>> user_data_dir(appname, appauthor) + '/home/trentm/.local/share/SuperApp + >>> site_data_dir(appname, appauthor) + '/usr/local/share/SuperApp' + >>> site_data_dir(appname, appauthor, multipath=True) + '/usr/local/share/SuperApp:/usr/share/SuperApp' + >>> user_cache_dir(appname, appauthor) + '/home/trentm/.cache/SuperApp' + >>> user_log_dir(appname, appauthor) + '/home/trentm/.cache/SuperApp/log' + >>> user_config_dir(appname) + '/home/trentm/.config/SuperApp' + >>> site_config_dir(appname) + '/etc/xdg/SuperApp' + >>> os.environ['XDG_CONFIG_DIRS'] = '/etc:/usr/local/etc' + >>> site_config_dir(appname, multipath=True) + '/etc/SuperApp:/usr/local/etc/SuperApp' + + +``AppDirs`` for convenience +=========================== + +:: + + >>> from appdirs import AppDirs + >>> dirs = AppDirs("SuperApp", "Acme") + >>> dirs.user_data_dir + '/Users/trentm/Library/Application Support/SuperApp' + >>> dirs.site_data_dir + '/Library/Application Support/SuperApp' + >>> dirs.user_cache_dir + '/Users/trentm/Library/Caches/SuperApp' + >>> dirs.user_log_dir + '/Users/trentm/Library/Logs/SuperApp' + + + +Per-version isolation +===================== + +If you have multiple versions of your app in use that you want to be +able to run side-by-side, then you may want version-isolation for these +dirs:: + + >>> from appdirs import AppDirs + >>> dirs = AppDirs("SuperApp", "Acme", version="1.0") + >>> dirs.user_data_dir + '/Users/trentm/Library/Application Support/SuperApp/1.0' + >>> dirs.site_data_dir + '/Library/Application Support/SuperApp/1.0' + >>> dirs.user_cache_dir + '/Users/trentm/Library/Caches/SuperApp/1.0' + >>> dirs.user_log_dir + '/Users/trentm/Library/Logs/SuperApp/1.0' + + + +appdirs Changelog +================= + +appdirs 1.4.4 +------------- +- [PR #92] Don't import appdirs from setup.py + +Project officially classified as Stable which is important +for inclusion in other distros such as ActivePython. + +First of several incremental releases to catch up on maintenance. + +appdirs 1.4.3 +------------- +- [PR #76] Python 3.6 invalid escape sequence deprecation fixes +- Fix for Python 3.6 support + +appdirs 1.4.2 +------------- +- [PR #84] Allow installing without setuptools +- [PR #86] Fix string delimiters in setup.py description +- Add Python 3.6 support + +appdirs 1.4.1 +------------- +- [issue #38] Fix _winreg import on Windows Py3 +- [issue #55] Make appname optional + +appdirs 1.4.0 +------------- +- [PR #42] AppAuthor is now optional on Windows +- [issue 41] Support Jython on Windows, Mac, and Unix-like platforms. Windows + support requires `JNA `_. +- [PR #44] Fix incorrect behaviour of the site_config_dir method + +appdirs 1.3.0 +------------- +- [Unix, issue 16] Conform to XDG standard, instead of breaking it for + everybody +- [Unix] Removes gratuitous case mangling of the case, since \*nix-es are + usually case sensitive, so mangling is not wise +- [Unix] Fixes the utterly wrong behaviour in ``site_data_dir``, return result + based on XDG_DATA_DIRS and make room for respecting the standard which + specifies XDG_DATA_DIRS is a multiple-value variable +- [Issue 6] Add ``*_config_dir`` which are distinct on nix-es, according to + XDG specs; on Windows and Mac return the corresponding ``*_data_dir`` + +appdirs 1.2.0 +------------- + +- [Unix] Put ``user_log_dir`` under the *cache* dir on Unix. Seems to be more + typical. +- [issue 9] Make ``unicode`` work on py3k. + +appdirs 1.1.0 +------------- + +- [issue 4] Add ``AppDirs.user_log_dir``. +- [Unix, issue 2, issue 7] appdirs now conforms to `XDG base directory spec + `_. +- [Mac, issue 5] Fix ``site_data_dir()`` on Mac. +- [Mac] Drop use of 'Carbon' module in favour of hardcoded paths; supports + Python3 now. +- [Windows] Append "Cache" to ``user_cache_dir`` on Windows by default. Use + ``opinion=False`` option to disable this. +- Add ``appdirs.AppDirs`` convenience class. Usage: + + >>> dirs = AppDirs("SuperApp", "Acme", version="1.0") + >>> dirs.user_data_dir + '/Users/trentm/Library/Application Support/SuperApp/1.0' + +- [Windows] Cherry-pick Komodo's change to downgrade paths to the Windows short + paths if there are high bit chars. +- [Linux] Change default ``user_cache_dir()`` on Linux to be singular, e.g. + "~/.superapp/cache". +- [Windows] Add ``roaming`` option to ``user_data_dir()`` (for use on Windows only) + and change the default ``user_data_dir`` behaviour to use a *non*-roaming + profile dir (``CSIDL_LOCAL_APPDATA`` instead of ``CSIDL_APPDATA``). Why? Because + a large roaming profile can cause login speed issues. The "only syncs on + logout" behaviour can cause surprises in appdata info. + + +appdirs 1.0.1 (never released) +------------------------------ + +Started this changelog 27 July 2010. Before that this module originated in the +`Komodo `_ product as ``applib.py`` and then +as `applib/location.py +`_ (used by +`PyPM `_ in `ActivePython +`_). This is basically a fork of +applib.py 1.0.1 and applib/location.py 1.0.1. + + + diff --git a/venv/lib/python3.8/site-packages/appdirs-1.4.4.dist-info/RECORD b/venv/lib/python3.8/site-packages/appdirs-1.4.4.dist-info/RECORD new file mode 100644 index 0000000..6f3e45c --- /dev/null +++ b/venv/lib/python3.8/site-packages/appdirs-1.4.4.dist-info/RECORD @@ -0,0 +1,9 @@ +__pycache__/appdirs.cpython-38.pyc,, +appdirs-1.4.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +appdirs-1.4.4.dist-info/LICENSE.txt,sha256=Nt200KdFqTqyAyA9cZCBSxuJcn0lTK_0jHp6-71HAAs,1097 +appdirs-1.4.4.dist-info/METADATA,sha256=k5TVfXMNKGHTfp2wm6EJKTuGwGNuoQR5TqQgH8iwG8M,8981 +appdirs-1.4.4.dist-info/RECORD,, +appdirs-1.4.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +appdirs-1.4.4.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 +appdirs-1.4.4.dist-info/top_level.txt,sha256=nKncE8CUqZERJ6VuQWL4_bkunSPDNfn7KZqb4Tr5YEM,8 +appdirs.py,sha256=g99s2sXhnvTEm79oj4bWI0Toapc-_SmKKNXvOXHkVic,24720 diff --git a/venv/lib/python3.8/site-packages/appdirs-1.4.4.dist-info/REQUESTED b/venv/lib/python3.8/site-packages/appdirs-1.4.4.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.8/site-packages/appdirs-1.4.4.dist-info/WHEEL b/venv/lib/python3.8/site-packages/appdirs-1.4.4.dist-info/WHEEL new file mode 100644 index 0000000..ef99c6c --- /dev/null +++ b/venv/lib/python3.8/site-packages/appdirs-1.4.4.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/lib/python3.8/site-packages/appdirs-1.4.4.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/appdirs-1.4.4.dist-info/top_level.txt new file mode 100644 index 0000000..d64bc32 --- /dev/null +++ b/venv/lib/python3.8/site-packages/appdirs-1.4.4.dist-info/top_level.txt @@ -0,0 +1 @@ +appdirs diff --git a/venv/lib/python3.8/site-packages/appdirs.py b/venv/lib/python3.8/site-packages/appdirs.py new file mode 100644 index 0000000..2acd1de --- /dev/null +++ b/venv/lib/python3.8/site-packages/appdirs.py @@ -0,0 +1,608 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2005-2010 ActiveState Software Inc. +# Copyright (c) 2013 Eddy Petrișor + +"""Utilities for determining application-specific dirs. + +See for details and usage. +""" +# Dev Notes: +# - MSDN on where to store app data files: +# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120 +# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html +# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html + +__version__ = "1.4.4" +__version_info__ = tuple(int(segment) for segment in __version__.split(".")) + + +import sys +import os + +PY3 = sys.version_info[0] == 3 + +if PY3: + unicode = str + +if sys.platform.startswith('java'): + import platform + os_name = platform.java_ver()[3][0] + if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc. + system = 'win32' + elif os_name.startswith('Mac'): # "Mac OS X", etc. + system = 'darwin' + else: # "Linux", "SunOS", "FreeBSD", etc. + # Setting this to "linux2" is not ideal, but only Windows or Mac + # are actually checked for and the rest of the module expects + # *sys.platform* style strings. + system = 'linux2' +else: + system = sys.platform + + + +def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + + for a discussion of issues. + + Typical user data directories are: + Mac OS X: ~/Library/Application Support/ + Unix: ~/.local/share/ # or in $XDG_DATA_HOME, if defined + Win XP (not roaming): C:\Documents and Settings\\Application Data\\ + Win XP (roaming): C:\Documents and Settings\\Local Settings\Application Data\\ + Win 7 (not roaming): C:\Users\\AppData\Local\\ + Win 7 (roaming): C:\Users\\AppData\Roaming\\ + + For Unix, we follow the XDG spec and support $XDG_DATA_HOME. + That means, by default "~/.local/share/". + """ + if system == "win32": + if appauthor is None: + appauthor = appname + const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" + path = os.path.normpath(_get_win_folder(const)) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == 'darwin': + path = os.path.expanduser('~/Library/Application Support/') + if appname: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): + r"""Return full path to the user-shared data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "multipath" is an optional parameter only applicable to *nix + which indicates that the entire list of data dirs should be + returned. By default, the first item from XDG_DATA_DIRS is + returned, or '/usr/local/share/', + if XDG_DATA_DIRS is not set + + Typical site data directories are: + Mac OS X: /Library/Application Support/ + Unix: /usr/local/share/ or /usr/share/ + Win XP: C:\Documents and Settings\All Users\Application Data\\ + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) + Win 7: C:\ProgramData\\ # Hidden, but writeable on Win 7. + + For Unix, this is using the $XDG_DATA_DIRS[0] default. + + WARNING: Do not use this on Windows. See the Vista-Fail note above for why. + """ + if system == "win32": + if appauthor is None: + appauthor = appname + path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == 'darwin': + path = os.path.expanduser('/Library/Application Support') + if appname: + path = os.path.join(path, appname) + else: + # XDG default for $XDG_DATA_DIRS + # only first, if multipath is False + path = os.getenv('XDG_DATA_DIRS', + os.pathsep.join(['/usr/local/share', '/usr/share'])) + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + if appname: + if version: + appname = os.path.join(appname, version) + pathlist = [os.sep.join([x, appname]) for x in pathlist] + + if multipath: + path = os.pathsep.join(pathlist) + else: + path = pathlist[0] + return path + + if appname and version: + path = os.path.join(path, version) + return path + + +def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific config dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + + for a discussion of issues. + + Typical user config directories are: + Mac OS X: same as user_data_dir + Unix: ~/.config/ # or in $XDG_CONFIG_HOME, if defined + Win *: same as user_data_dir + + For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. + That means, by default "~/.config/". + """ + if system in ["win32", "darwin"]: + path = user_data_dir(appname, appauthor, None, roaming) + else: + path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): + r"""Return full path to the user-shared data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "multipath" is an optional parameter only applicable to *nix + which indicates that the entire list of config dirs should be + returned. By default, the first item from XDG_CONFIG_DIRS is + returned, or '/etc/xdg/', if XDG_CONFIG_DIRS is not set + + Typical site config directories are: + Mac OS X: same as site_data_dir + Unix: /etc/xdg/ or $XDG_CONFIG_DIRS[i]/ for each value in + $XDG_CONFIG_DIRS + Win *: same as site_data_dir + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) + + For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False + + WARNING: Do not use this on Windows. See the Vista-Fail note above for why. + """ + if system in ["win32", "darwin"]: + path = site_data_dir(appname, appauthor) + if appname and version: + path = os.path.join(path, version) + else: + # XDG default for $XDG_CONFIG_DIRS + # only first, if multipath is False + path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + if appname: + if version: + appname = os.path.join(appname, version) + pathlist = [os.sep.join([x, appname]) for x in pathlist] + + if multipath: + path = os.pathsep.join(pathlist) + else: + path = pathlist[0] + return path + + +def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): + r"""Return full path to the user-specific cache dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "opinion" (boolean) can be False to disable the appending of + "Cache" to the base app data dir for Windows. See + discussion below. + + Typical user cache directories are: + Mac OS X: ~/Library/Caches/ + Unix: ~/.cache/ (XDG default) + Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Cache + Vista: C:\Users\\AppData\Local\\\Cache + + On Windows the only suggestion in the MSDN docs is that local settings go in + the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming + app data dir (the default returned by `user_data_dir` above). Apps typically + put cache data somewhere *under* the given dir here. Some examples: + ...\Mozilla\Firefox\Profiles\\Cache + ...\Acme\SuperApp\Cache\1.0 + OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. + This can be disabled with the `opinion=False` option. + """ + if system == "win32": + if appauthor is None: + appauthor = appname + path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + if opinion: + path = os.path.join(path, "Cache") + elif system == 'darwin': + path = os.path.expanduser('~/Library/Caches') + if appname: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache')) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def user_state_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific state dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + + for a discussion of issues. + + Typical user state directories are: + Mac OS X: same as user_data_dir + Unix: ~/.local/state/ # or in $XDG_STATE_HOME, if defined + Win *: same as user_data_dir + + For Unix, we follow this Debian proposal + to extend the XDG spec and support $XDG_STATE_HOME. + + That means, by default "~/.local/state/". + """ + if system in ["win32", "darwin"]: + path = user_data_dir(appname, appauthor, None, roaming) + else: + path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): + r"""Return full path to the user-specific log dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "opinion" (boolean) can be False to disable the appending of + "Logs" to the base app data dir for Windows, and "log" to the + base cache dir for Unix. See discussion below. + + Typical user log directories are: + Mac OS X: ~/Library/Logs/ + Unix: ~/.cache//log # or under $XDG_CACHE_HOME if defined + Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Logs + Vista: C:\Users\\AppData\Local\\\Logs + + On Windows the only suggestion in the MSDN docs is that local settings + go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in + examples of what some windows apps use for a logs dir.) + + OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` + value for Windows and appends "log" to the user cache dir for Unix. + This can be disabled with the `opinion=False` option. + """ + if system == "darwin": + path = os.path.join( + os.path.expanduser('~/Library/Logs'), + appname) + elif system == "win32": + path = user_data_dir(appname, appauthor, version) + version = False + if opinion: + path = os.path.join(path, "Logs") + else: + path = user_cache_dir(appname, appauthor, version) + version = False + if opinion: + path = os.path.join(path, "log") + if appname and version: + path = os.path.join(path, version) + return path + + +class AppDirs(object): + """Convenience wrapper for getting application dirs.""" + def __init__(self, appname=None, appauthor=None, version=None, + roaming=False, multipath=False): + self.appname = appname + self.appauthor = appauthor + self.version = version + self.roaming = roaming + self.multipath = multipath + + @property + def user_data_dir(self): + return user_data_dir(self.appname, self.appauthor, + version=self.version, roaming=self.roaming) + + @property + def site_data_dir(self): + return site_data_dir(self.appname, self.appauthor, + version=self.version, multipath=self.multipath) + + @property + def user_config_dir(self): + return user_config_dir(self.appname, self.appauthor, + version=self.version, roaming=self.roaming) + + @property + def site_config_dir(self): + return site_config_dir(self.appname, self.appauthor, + version=self.version, multipath=self.multipath) + + @property + def user_cache_dir(self): + return user_cache_dir(self.appname, self.appauthor, + version=self.version) + + @property + def user_state_dir(self): + return user_state_dir(self.appname, self.appauthor, + version=self.version) + + @property + def user_log_dir(self): + return user_log_dir(self.appname, self.appauthor, + version=self.version) + + +#---- internal support stuff + +def _get_win_folder_from_registry(csidl_name): + """This is a fallback technique at best. I'm not sure if using the + registry for this guarantees us the correct answer for all CSIDL_* + names. + """ + if PY3: + import winreg as _winreg + else: + import _winreg + + shell_folder_name = { + "CSIDL_APPDATA": "AppData", + "CSIDL_COMMON_APPDATA": "Common AppData", + "CSIDL_LOCAL_APPDATA": "Local AppData", + }[csidl_name] + + key = _winreg.OpenKey( + _winreg.HKEY_CURRENT_USER, + r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" + ) + dir, type = _winreg.QueryValueEx(key, shell_folder_name) + return dir + + +def _get_win_folder_with_pywin32(csidl_name): + from win32com.shell import shellcon, shell + dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0) + # Try to make this a unicode path because SHGetFolderPath does + # not return unicode strings when there is unicode data in the + # path. + try: + dir = unicode(dir) + + # Downgrade to short path name if have highbit chars. See + # . + has_high_char = False + for c in dir: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + try: + import win32api + dir = win32api.GetShortPathName(dir) + except ImportError: + pass + except UnicodeError: + pass + return dir + + +def _get_win_folder_with_ctypes(csidl_name): + import ctypes + + csidl_const = { + "CSIDL_APPDATA": 26, + "CSIDL_COMMON_APPDATA": 35, + "CSIDL_LOCAL_APPDATA": 28, + }[csidl_name] + + buf = ctypes.create_unicode_buffer(1024) + ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) + + # Downgrade to short path name if have highbit chars. See + # . + has_high_char = False + for c in buf: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf2 = ctypes.create_unicode_buffer(1024) + if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): + buf = buf2 + + return buf.value + +def _get_win_folder_with_jna(csidl_name): + import array + from com.sun import jna + from com.sun.jna.platform import win32 + + buf_size = win32.WinDef.MAX_PATH * 2 + buf = array.zeros('c', buf_size) + shell = win32.Shell32.INSTANCE + shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf) + dir = jna.Native.toString(buf.tostring()).rstrip("\0") + + # Downgrade to short path name if have highbit chars. See + # . + has_high_char = False + for c in dir: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf = array.zeros('c', buf_size) + kernel = win32.Kernel32.INSTANCE + if kernel.GetShortPathName(dir, buf, buf_size): + dir = jna.Native.toString(buf.tostring()).rstrip("\0") + + return dir + +if system == "win32": + try: + import win32com.shell + _get_win_folder = _get_win_folder_with_pywin32 + except ImportError: + try: + from ctypes import windll + _get_win_folder = _get_win_folder_with_ctypes + except ImportError: + try: + import com.sun.jna + _get_win_folder = _get_win_folder_with_jna + except ImportError: + _get_win_folder = _get_win_folder_from_registry + + +#---- self test code + +if __name__ == "__main__": + appname = "MyApp" + appauthor = "MyCompany" + + props = ("user_data_dir", + "user_config_dir", + "user_cache_dir", + "user_state_dir", + "user_log_dir", + "site_data_dir", + "site_config_dir") + + print("-- app dirs %s --" % __version__) + + print("-- app dirs (with optional 'version')") + dirs = AppDirs(appname, appauthor, version="1.0") + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (without optional 'version')") + dirs = AppDirs(appname, appauthor) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (without optional 'appauthor')") + dirs = AppDirs(appname) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (with disabled 'appauthor')") + dirs = AppDirs(appname, appauthor=False) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) diff --git a/venv/lib/python3.8/site-packages/buildozer-1.4.0.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/buildozer-1.4.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/buildozer-1.4.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/buildozer-1.4.0.dist-info/LICENSE b/venv/lib/python3.8/site-packages/buildozer-1.4.0.dist-info/LICENSE new file mode 100644 index 0000000..d5d6b13 --- /dev/null +++ b/venv/lib/python3.8/site-packages/buildozer-1.4.0.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2010-2017 Kivy Team and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/venv/lib/python3.8/site-packages/buildozer-1.4.0.dist-info/METADATA b/venv/lib/python3.8/site-packages/buildozer-1.4.0.dist-info/METADATA new file mode 100644 index 0000000..da84336 --- /dev/null +++ b/venv/lib/python3.8/site-packages/buildozer-1.4.0.dist-info/METADATA @@ -0,0 +1,1477 @@ +Metadata-Version: 2.1 +Name: buildozer +Version: 1.4.0 +Summary: Generic Python packager for Android / iOS and Desktop +Home-page: https://github.com/kivy/buildozer +Author: Mathieu Virbel +Author-email: mat@kivy.org +License: MIT +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Topic :: Software Development :: Build Tools +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: pexpect +Requires-Dist: virtualenv +Requires-Dist: sh + +Buildozer +========= + +[![Tests](https://github.com/kivy/buildozer/workflows/Tests/badge.svg)](https://github.com/kivy/buildozer/actions?query=workflow%3ATests) +[![Android](https://github.com/kivy/buildozer/workflows/Android/badge.svg)](https://github.com/kivy/buildozer/actions?query=workflow%3AAndroid) +[![iOS](https://github.com/kivy/buildozer/workflows/iOS/badge.svg)](https://github.com/kivy/buildozer/actions?query=workflow%3AiOS) +[![Coverage Status](https://coveralls.io/repos/github/kivy/buildozer/badge.svg)](https://coveralls.io/github/kivy/buildozer) +[![Backers on Open Collective](https://opencollective.com/kivy/backers/badge.svg)](#backers) +[![Sponsors on Open Collective](https://opencollective.com/kivy/sponsors/badge.svg)](#sponsors) + +Buildozer is a tool for creating application packages easily. + +The goal is to have one "buildozer.spec" file in your app directory, describing +your application requirements and settings such as title, icon, included modules +etc. Buildozer will use that spec to create a package for Android, iOS, Windows, +OSX and/or Linux. + +Buildozer currently supports packaging for Android via the [python-for-android](https://github.com/kivy/python-for-android/) +project, and for iOS via the kivy-ios project. iOS and OSX are still under work. + +For Android, buildozer will automatically download and prepare the +build dependencies. For more information, see +[Android-SDK-NDK-Information](https://github.com/kivy/kivy/wiki/Android-SDK-NDK-Information). +Note that only Python 3 is supported. + +Note that this tool has nothing to do with the eponymous online build service +[buildozer.io](https://buildozer.io). + +## Installing Buildozer with target Python 3 (default): + +- Install buildozer: + + # via pip (latest stable, recommended) + # if you use a virtualenv, don't use the `--user` option + pip install --user buildozer + + # latest dev version + # if you use a virtualenv, don't use the `--user` option + pip install --user https://github.com/kivy/buildozer/archive/master.zip + + # git clone, for working on buildozer + git clone https://github.com/kivy/buildozer + cd buildozer + python setup.py build + pip install -e . + +- Check buildozer is in your path + + `which buildozer` + # if there is no result, and you installed with --user, add this line at the end of your `~/.bashrc` file. + export PATH=~/.local/bin/:$PATH + # and then run + . ~/.bashrc + +- Go into your application directory and run: + + buildozer init + # edit the buildozer.spec, then + buildozer android debug deploy run + + +## Buildozer Docker image + +A Dockerfile is available to use buildozer through a Docker environment. + +- Build with: + + docker build --tag=buildozer . + +- Run with: + + docker run --volume "$(pwd)":/home/user/hostcwd buildozer --version + + +## Buildozer GitHub action + +Use [ArtemSBulgakov/buildozer-action@v1](https://github.com/ArtemSBulgakov/buildozer-action) +to build your packages automatically on push or pull request. +See [full workflow example](https://github.com/ArtemSBulgakov/buildozer-action#full-workflow). + + +## Examples of Buildozer commands + +``` +# buildozer target command +buildozer android clean +buildozer android update +buildozer android deploy +buildozer android debug +buildozer android release + +# or all in one (compile in debug, deploy on device) +buildozer android debug deploy + +# set the default command if nothing set +buildozer setdefault android debug deploy run +``` + + +## Usage + +``` +Usage: + buildozer [--profile ] [--verbose] [target] ... + buildozer --version + +Available targets: + android Android target, based on python-for-android project + ios iOS target, based on kivy-ios project + +Global commands (without target): + distclean Clean the whole Buildozer environment + help Show the Buildozer help + init Create an initial buildozer.spec in the current directory + serve Serve the bin directory via SimpleHTTPServer + setdefault Set the default command to run when no arguments are given + version Show the Buildozer version + +Target commands: + clean Clean the target environment + update Update the target dependencies + debug Build the application in debug mode + release Build the application in release mode + deploy Deploy the application on the device + run Run the application on the device + serve Serve the bin directory via SimpleHTTPServer + +Target "ios" commands: + list_identities List the available identities to use for signing. + xcode Open the xcode project. + +Target "android" commands: + adb Run adb from the Android SDK. Args must come after --, or + use --alias to make an alias + logcat Show the log from the device + p4a Run p4a commands. Args must come after --, or use --alias + to make an alias +``` + + +## `buildozer.spec` + +See [buildozer/default.spec](https://raw.github.com/kivy/buildozer/master/buildozer/default.spec) for an up-to-date spec file. + + +## Default config + +You can override the value of *any* `buildozer.spec` config token by +setting an appropriate environment variable. These are all of the +form ``$SECTION_TOKEN``, where SECTION is the config file section and +TOKEN is the config token to override. Dots are replaced by +underscores. + +For example, here are some config tokens from the [app] section of the +config, along with the environment variables that would override them. + +- ``title`` -> ``$APP_TITLE`` +- ``package.name`` -> ``$APP_PACKAGE_NAME`` +- ``p4a.source_dir`` -> ``$APP_P4A_SOURCE_DIR`` + +## Support + +If you need assistance, you can ask for help on our mailing list: + +* User Group : https://groups.google.com/group/kivy-users +* Email : kivy-users@googlegroups.com + +Discord channel: + +Server : https://chat.kivy.org +Channel : #support + +For [debugging on Android](https://python-for-android.readthedocs.io/en/stable/troubleshooting/?highlight=adb#debugging-on-android), don't hesitate to use ADB to get logs of your application. + + +## Contributing + +We love pull requests and discussing novel ideas. Check out our +[contribution guide](https://kivy.org/docs/contribute.html) and +feel free to improve buildozer. + +The following mailing list and IRC channel are used exclusively for +discussions about developing the Kivy framework and its sister projects: + +* Dev Group : https://groups.google.com/group/kivy-dev +* Email : kivy-dev@googlegroups.com + +We also have a Discord channel: + +* Server : https://chat.kivy.org +* Channel : #support + + +## License + +Buildozer is released under the terms of the MIT License. Please refer to the +LICENSE file. + + +## Backers + +Thank you to all our backers! 🙏 [[Become a backer](https://opencollective.com/kivy#backer)] + + + + +## Sponsors + +Support this project by becoming a sponsor. Your logo will show up here with a link to your website. [[Become a sponsor](https://opencollective.com/kivy#sponsor)] + + + + + + + + + + + + + +# Change Log + +## [1.4.0](https://github.com/kivy/buildozer/tree/1.4.0) (2022-07-20) + +[Full Changelog](https://github.com/kivy/buildozer/compare/1.3.0...1.4.0) + +**Closed issues:** + +- error [\#1461](https://github.com/kivy/buildozer/issues/1461) +- error [\#1460](https://github.com/kivy/buildozer/issues/1460) +- ModuleNotFoundError: No module named '\_bz2' [\#1457](https://github.com/kivy/buildozer/issues/1457) +- `java.nio.file.NoSuchFileException` [\#1456](https://github.com/kivy/buildozer/issues/1456) +- kivy app doesnt start on android [\#1455](https://github.com/kivy/buildozer/issues/1455) +- \(WSL\) buildozer adb doesn't recognize usb [\#1453](https://github.com/kivy/buildozer/issues/1453) +- Error: Command \[...\] returned non-zero exit status 1 [\#1452](https://github.com/kivy/buildozer/issues/1452) +- Command failed: /home/username/.buildozer/android/platform/android-sdk/tools/bin/sdkmanager --sdk\_root=/home/username/.buildozer/android/platform/android-sdk platform-tools [\#1449](https://github.com/kivy/buildozer/issues/1449) +- gradlew assembly Debug execution error when building apk [\#1447](https://github.com/kivy/buildozer/issues/1447) +- How to run Python script in background in android? [\#1446](https://github.com/kivy/buildozer/issues/1446) +- buildozer appclean doesnt work. [\#1443](https://github.com/kivy/buildozer/issues/1443) +- Flags android.gradle\_dependencies && android.add\_jars don't work. [\#1442](https://github.com/kivy/buildozer/issues/1442) +- ModuleNotFoundError: No module named 'PIL' [\#1440](https://github.com/kivy/buildozer/issues/1440) +- Bulldozer error while running "buildozer -v android debug" [\#1439](https://github.com/kivy/buildozer/issues/1439) +- started app on three different andoid phones - there is no internet connection on 2 of them [\#1434](https://github.com/kivy/buildozer/issues/1434) +- Not Able to change the python version under buildozer 1.3.0 [\#1432](https://github.com/kivy/buildozer/issues/1432) +- ndk r22 invalid [\#1431](https://github.com/kivy/buildozer/issues/1431) +- error building numpy with bulldozer [\#1426](https://github.com/kivy/buildozer/issues/1426) +- sh.CommandNotFound: cmake [\#1424](https://github.com/kivy/buildozer/issues/1424) +- Compile-time name 'JNIUS\_PYTHON3' not defined [\#1422](https://github.com/kivy/buildozer/issues/1422) +- Error Building hostpython3 for arm64-v8a with buildozer [\#1421](https://github.com/kivy/buildozer/issues/1421) +- building for android failed [\#1420](https://github.com/kivy/buildozer/issues/1420) +- Error "Aidl not found, please install it." [\#1416](https://github.com/kivy/buildozer/issues/1416) +- Buildozer Release Signign Issue [\#1415](https://github.com/kivy/buildozer/issues/1415) +- Unable to run the buildozer command on initial application [\#1414](https://github.com/kivy/buildozer/issues/1414) +- buildozer -v android debug error,please help me,thank you! [\#1413](https://github.com/kivy/buildozer/issues/1413) +- Buildozer failed to create android app [\#1412](https://github.com/kivy/buildozer/issues/1412) +- module encodings found error or something else [\#1408](https://github.com/kivy/buildozer/issues/1408) +- \[ERROR\]: Build failed: Requested API target 27 is not available, install it with the SDK android tool. [\#1404](https://github.com/kivy/buildozer/issues/1404) +- buildozer requirements for firebase-admin Python? [\#1402](https://github.com/kivy/buildozer/issues/1402) +- I am given the following error, which I believe is due to recent changes for aab support in buildozer: " This buildozer version requires a python-for-android version with AAB \(Android App Bundle\) support. Please update your pinned version accordingly [\#1401](https://github.com/kivy/buildozer/issues/1401) +- How to mention Python modules used in Kivy buildozer.spec file? [\#1400](https://github.com/kivy/buildozer/issues/1400) +- Приложение вылетает при запуске [\#1399](https://github.com/kivy/buildozer/issues/1399) +- Buildozer deploy failed with python3 -m venv venv command [\#1389](https://github.com/kivy/buildozer/issues/1389) +- Gradlew Build error for Android on Linux [\#1371](https://github.com/kivy/buildozer/issues/1371) +- AAB [\#1353](https://github.com/kivy/buildozer/issues/1353) +- Error creating apk on Macos Big Sur [\#1345](https://github.com/kivy/buildozer/issues/1345) +- BUILD FAILED [\#1335](https://github.com/kivy/buildozer/issues/1335) + +**Merged pull requests:** + +- Updates default buildozer.spec NDK from 19b to 23b [\#1462](https://github.com/kivy/buildozer/pull/1462) ([misl6](https://github.com/misl6)) +- use p4a --add-source instead of manual copy of java files [\#1450](https://github.com/kivy/buildozer/pull/1450) ([tito](https://github.com/tito)) +- fix aar build [\#1444](https://github.com/kivy/buildozer/pull/1444) ([mzakharo](https://github.com/mzakharo)) +- Our self-hosted Apple Silicon runner now has been migrated to actions/runner v2.292.0 which now supports arm64 natively [\#1438](https://github.com/kivy/buildozer/pull/1438) ([misl6](https://github.com/misl6)) +- Changes for NDK23 [\#1427](https://github.com/kivy/buildozer/pull/1427) ([HyTurtle](https://github.com/HyTurtle)) +- Bump version to 1.4.0.dev0 [\#1411](https://github.com/kivy/buildozer/pull/1411) ([misl6](https://github.com/misl6)) + +## [1.3.0](https://github.com/kivy/buildozer/tree/1.3.0) (2022-03-13) +[Full Changelog](https://github.com/kivy/buildozer/compare/1.2.0...1.3.0) + +**Merged pull requests:** + +- Improves iOS CI workflow and adds Apple Silicon M1 runner [\#1393](https://github.com/kivy/buildozer/pull/1393) ([misl6](https://github.com/misl6)) +- iOS: force archive iOS destination [\#1392](https://github.com/kivy/buildozer/pull/1392) ([syrykh](https://github.com/syrykh)) +- simple typo [\#1390](https://github.com/kivy/buildozer/pull/1390) ([Jessime](https://github.com/Jessime)) +- Bump support-request to v2. Previous integration has been shut down. [\#1385](https://github.com/kivy/buildozer/pull/1385) ([misl6](https://github.com/misl6)) +- Add android.add\_assets [\#1382](https://github.com/kivy/buildozer/pull/1382) ([RobertFlatt](https://github.com/RobertFlatt)) +- add the option p4a.extra\_args [\#1369](https://github.com/kivy/buildozer/pull/1369) ([antocuni](https://github.com/antocuni)) +- Fix CI tests for iOS and Android [\#1365](https://github.com/kivy/buildozer/pull/1365) ([misl6](https://github.com/misl6)) +- Add aab \(Android App Bundle\) support [\#1356](https://github.com/kivy/buildozer/pull/1356) ([misl6](https://github.com/misl6)) +- Fixes deprecated plistlib API [\#1347](https://github.com/kivy/buildozer/pull/1347) ([meow464](https://github.com/meow464)) +- docs: fix a few simple typos [\#1327](https://github.com/kivy/buildozer/pull/1327) ([tshirtman](https://github.com/tshirtman)) +- use HTTPS urls [\#1319](https://github.com/kivy/buildozer/pull/1319) ([obfusk](https://github.com/obfusk)) +- iOS OTA manifest generation for in-house app distribution [\#1317](https://github.com/kivy/buildozer/pull/1317) ([syrykh](https://github.com/syrykh)) +- android: adaptive icon/launcher: expose p4a options in buildozer.spec [\#1312](https://github.com/kivy/buildozer/pull/1312) ([SomberNight](https://github.com/SomberNight)) +- Add enable\_androidx [\#1311](https://github.com/kivy/buildozer/pull/1311) ([RobertFlatt](https://github.com/RobertFlatt)) +- Update iOS codesign options [\#1307](https://github.com/kivy/buildozer/pull/1307) ([syrykh](https://github.com/syrykh)) +- Allow setting custom p4a URL instead of fork [\#1305](https://github.com/kivy/buildozer/pull/1305) ([syrykh](https://github.com/syrykh)) +- chore : Fixed code quality issues [\#1300](https://github.com/kivy/buildozer/pull/1300) ([powerexploit](https://github.com/powerexploit)) +- added few additional options for android p4a builds [\#1275](https://github.com/kivy/buildozer/pull/1275) ([vesellov](https://github.com/vesellov)) +- :recycle: Minor check\_requirements\(\) refactoring [\#1274](https://github.com/kivy/buildozer/pull/1274) ([AndreMiras](https://github.com/AndreMiras)) +- Allow displaying only the logcat of our app. [\#1272](https://github.com/kivy/buildozer/pull/1272) ([tshirtman](https://github.com/tshirtman)) +- fix ndk version check & download link [\#1271](https://github.com/kivy/buildozer/pull/1271) ([obfusk](https://github.com/obfusk)) +- Let buildozer.spec files pin to a specific p4a commit hash [\#1269](https://github.com/kivy/buildozer/pull/1269) ([xloem](https://github.com/xloem)) +- Change android.manifest\_placeholders default to None [\#1265](https://github.com/kivy/buildozer/pull/1265) ([misl6](https://github.com/misl6)) +- Pass debug loglevel down through to python for android [\#1260](https://github.com/kivy/buildozer/pull/1260) ([xloem](https://github.com/xloem)) +- Use develop branch for automated tests [\#1257](https://github.com/kivy/buildozer/pull/1257) ([misl6](https://github.com/misl6)) +- Add android.adb\_args option [\#1238](https://github.com/kivy/buildozer/pull/1238) ([germn](https://github.com/germn)) +- Add support for p4a --feature option [\#1229](https://github.com/kivy/buildozer/pull/1229) ([rambo](https://github.com/rambo)) +- Add option for controlling p4a distutils support, fixes \#1224 [\#1225](https://github.com/kivy/buildozer/pull/1225) ([rambo](https://github.com/rambo)) +- WSL workaround now accommodates WSL 2 [\#1220](https://github.com/kivy/buildozer/pull/1220) ([stefan-sherwood](https://github.com/stefan-sherwood)) +- Add android.backup\_rules parameter to buildozer.spec [\#1219](https://github.com/kivy/buildozer/pull/1219) ([Jorilx](https://github.com/Jorilx)) +- make include\_exts etc. properly case-insentitive [\#1217](https://github.com/kivy/buildozer/pull/1217) ([obfusk](https://github.com/obfusk)) +- Add manifestPlaceholders feature [\#1212](https://github.com/kivy/buildozer/pull/1212) ([misl6](https://github.com/misl6)) +- Add support for --presplash-lottie option of p4a [\#1208](https://github.com/kivy/buildozer/pull/1208) ([tshirtman](https://github.com/tshirtman)) +- Fix issue \#881: Add android.allow\_backup parameter to buildozer.spec [\#1206](https://github.com/kivy/buildozer/pull/1206) ([Jorilx](https://github.com/Jorilx)) +- Add link to GitHub action [\#1198](https://github.com/kivy/buildozer/pull/1198) ([ArtemSBulgakov](https://github.com/ArtemSBulgakov)) +- Fixes heading for iOS target [\#1175](https://github.com/kivy/buildozer/pull/1175) ([fullbuffer](https://github.com/fullbuffer)) +- :white\_check\_mark: Increases ios target test coverage [\#1171](https://github.com/kivy/buildozer/pull/1171) ([AndreMiras](https://github.com/AndreMiras)) +- :white\_check\_mark: Unit test ios target [\#1168](https://github.com/kivy/buildozer/pull/1168) ([AndreMiras](https://github.com/AndreMiras)) +- Expand "~" before every path normalization. [\#1165](https://github.com/kivy/buildozer/pull/1165) ([pakal](https://github.com/pakal)) +- :green\_apple: Improves iOS support [\#1160](https://github.com/kivy/buildozer/pull/1160) ([AndreMiras](https://github.com/AndreMiras)) +- :construction\_worker: Moves Android build to dedicated workflow file [\#1158](https://github.com/kivy/buildozer/pull/1158) ([AndreMiras](https://github.com/AndreMiras)) +- SafeConfigParser deprecated. [\#1155](https://github.com/kivy/buildozer/pull/1155) ([Julian-O](https://github.com/Julian-O)) +- Declare Buildozer to be Stable [\#1147](https://github.com/kivy/buildozer/pull/1147) ([Julian-O](https://github.com/Julian-O)) +- :fire: Drops garden support [\#1142](https://github.com/kivy/buildozer/pull/1142) ([AndreMiras](https://github.com/AndreMiras)) +- :arrow\_up: Bumps Ubuntu and OpenJDK versions [\#1140](https://github.com/kivy/buildozer/pull/1140) ([AndreMiras](https://github.com/AndreMiras)) +- Post release bump 1.2.0.dev0 [\#1139](https://github.com/kivy/buildozer/pull/1139) ([AndreMiras](https://github.com/AndreMiras)) +- Update quickstart.rst for Windows 10 [\#1124](https://github.com/kivy/buildozer/pull/1124) ([arqeco](https://github.com/arqeco)) +- Update instalation.rst for Windows 10 [\#1123](https://github.com/kivy/buildozer/pull/1123) ([arqeco](https://github.com/arqeco)) + +## [1.2.0](https://github.com/kivy/buildozer/tree/1.2.0) (2020-05-30) +[Full Changelog](https://github.com/kivy/buildozer/compare/1.1.0...1.2.0) + +**Merged pull requests:** + +- Bumps to latest Cython version [\#1132](https://github.com/kivy/buildozer/pull/1132) ([AndreMiras](https://github.com/AndreMiras)) +- Also integration test on macOS [\#1131](https://github.com/kivy/buildozer/pull/1131) ([AndreMiras](https://github.com/AndreMiras)) +- Tests android.numeric\_version config [\#1129](https://github.com/kivy/buildozer/pull/1129) ([AndreMiras](https://github.com/AndreMiras)) +- Refactored the TargetAndroid tests [\#1127](https://github.com/kivy/buildozer/pull/1127) ([pavelsof](https://github.com/pavelsof)) +- Adds p4a --numeric-version support [\#1126](https://github.com/kivy/buildozer/pull/1126) ([AndreMiras](https://github.com/AndreMiras)) +- Linter fixes and README.md update [\#1118](https://github.com/kivy/buildozer/pull/1118) ([AndreMiras](https://github.com/AndreMiras)) +- Removes Python 2 constructions [\#1114](https://github.com/kivy/buildozer/pull/1114) ([AndreMiras](https://github.com/AndreMiras)) + +## [1.1.0](https://github.com/kivy/buildozer/tree/1.1.0) (2020-05-18) +[Full Changelog](https://github.com/kivy/buildozer/compare/1.0...1.1.0) + +**Fixed bugs:** + +- [Docker image] SSLError("Can't connect to HTTPS URL because the SSL module is not available.") [\#1096](https://github.com/kivy/buildozer/issues/1096) +- NameError: name 'raw\_input' is not defined [\#1070](https://github.com/kivy/buildozer/issues/1070) +- Setup coverage testing [\#1058](https://github.com/kivy/buildozer/issues/1058) +- AttributeError: 'function' object has no attribute 'glob' [\#1044](https://github.com/kivy/buildozer/issues/1044) + +**Closed issues:** + +- Pygallary [\#1109](https://github.com/kivy/buildozer/issues/1109) +- Buildozer compilation ERROR: No matching distribution found for hostpython2 (from -r requirements.txt (line 2)) [\#1104](https://github.com/kivy/buildozer/issues/1104) +- Circular errors with Java version and Android SDK [\#1103](https://github.com/kivy/buildozer/issues/1103) +- APK not running on Android 10? [\#1102](https://github.com/kivy/buildozer/issues/1102) +- Buildozer command fails: Could not fetch URL https://pypi.org/simple/pip/: There was a problem confirming the ssl certificate [\#1095](https://github.com/kivy/buildozer/issues/1095) +- Command Failed Error [\#1092](https://github.com/kivy/buildozer/issues/1092) +- APK Can't be install (package can't be install in android) [\#1091](https://github.com/kivy/buildozer/issues/1091) +- buildozer Install python 3.8.1 [\#1090](https://github.com/kivy/buildozer/issues/1090) +- i have weak reference error while using python 3.8 [\#1089](https://github.com/kivy/buildozer/issues/1089) +- buildozer is choosing odd venv, and using a different python? [\#1080](https://github.com/kivy/buildozer/issues/1080) +- Fatal Python error: init\_fs\_encoding: failed to get the Python codec of the filesystem encoding [\#1076](https://github.com/kivy/buildozer/issues/1076) +- Buildozer just wont compile my app [\#1074](https://github.com/kivy/buildozer/issues/1074) +- Autoreconf not found? [\#1072](https://github.com/kivy/buildozer/issues/1072) +- error while buildozer android debug deploy run [\#1064](https://github.com/kivy/buildozer/issues/1064) +- Syntax error in Kivy and KivyMD dependencies file when using Buildozer to compile KivyMD app [\#1051](https://github.com/kivy/buildozer/issues/1051) +- Adding CFFI as a Buildozer requirement breaks the Android build [\#1050](https://github.com/kivy/buildozer/issues/1050) +- Is buildozer compatible with Python 3.7 [\#1048](https://github.com/kivy/buildozer/issues/1048) +- Could not resolve org.jetbrains.kotlin:kotlin-stdlib-jre8:1.2.0 [\#1042](https://github.com/kivy/buildozer/issues/1042) +- Error while running buildozer android debug deploy [\#1038](https://github.com/kivy/buildozer/issues/1038) +- cant able to build apk on linux with python3 [\#1033](https://github.com/kivy/buildozer/issues/1033) +- Keystore was tampered with, or password was incorrect [\#1028](https://github.com/kivy/buildozer/issues/1028) +- Windows support to build Android APK [\#1022](https://github.com/kivy/buildozer/issues/1022) +- How to make third party site-packages 'requests' run on Android [\#1021](https://github.com/kivy/buildozer/issues/1021) +- Crash on buildozer android debug command. Command failed: /usr/bin/python -m pythonforandroid.toolchain [\#1017](https://github.com/kivy/buildozer/issues/1017) +- Build failed: Couldn't find executable for CC [\#1014](https://github.com/kivy/buildozer/issues/1014) +- Cloning Error of python-for-android [\#1008](https://github.com/kivy/buildozer/issues/1008) +- checking whether the C compiler works... no [\#1007](https://github.com/kivy/buildozer/issues/1007) +- Kivy :How to ask for storage permission when app starts [\#1004](https://github.com/kivy/buildozer/issues/1004) +- Buildozer :No module named 'numpy.core.\_multiarray\_umath' [\#1002](https://github.com/kivy/buildozer/issues/1002) +- Kivy app Crashes while import openpyxl [\#1001](https://github.com/kivy/buildozer/issues/1001) +- Warning when i run "buildozer -v android debug" [\#982](https://github.com/kivy/buildozer/issues/982) +- sdkmanager is notinstalled [\#927](https://github.com/kivy/buildozer/issues/927) + +**Merged pull requests:** + +- Update README.md [\#1111](https://github.com/kivy/buildozer/pull/1111) ([tshirtman](https://github.com/tshirtman)) +- Fixes Docker apt cache missed [\#1107](https://github.com/kivy/buildozer/pull/1107) ([AndreMiras](https://github.com/AndreMiras)) +- Adds libssl-dev to the install dependencies [\#1106](https://github.com/kivy/buildozer/pull/1106) ([AndreMiras](https://github.com/AndreMiras)) +- Automatically publish to PyPI upon tagging [\#1105](https://github.com/kivy/buildozer/pull/1105) ([AndreMiras](https://github.com/AndreMiras)) +- Fix punctuation and typo in README.md [\#1101](https://github.com/kivy/buildozer/pull/1101) ([hematogender](https://github.com/hematogender)) +- Build integration test [\#1100](https://github.com/kivy/buildozer/pull/1100) ([AndreMiras](https://github.com/AndreMiras)) +- Fixes missing libssl-dev dependency [\#1099](https://github.com/kivy/buildozer/pull/1099) ([AndreMiras](https://github.com/AndreMiras)) +- Drops Python 2 support [\#1094](https://github.com/kivy/buildozer/pull/1094) ([AndreMiras](https://github.com/AndreMiras)) +- Checks SDK, NDK and p4a get downloaded on first run [\#1093](https://github.com/kivy/buildozer/pull/1093) ([AndreMiras](https://github.com/AndreMiras)) +- Integration testing [\#1083](https://github.com/kivy/buildozer/pull/1083) ([AndreMiras](https://github.com/AndreMiras)) +- Also tests against macOS platform [\#1078](https://github.com/kivy/buildozer/pull/1078) ([AndreMiras](https://github.com/AndreMiras)) +- Fix NameError with Python 3 and iOS target [\#1071](https://github.com/kivy/buildozer/pull/1071) ([lerela](https://github.com/lerela)) +- Unit tests TargetAndroid.build\_package() [\#1069](https://github.com/kivy/buildozer/pull/1069) ([AndreMiras](https://github.com/AndreMiras)) +- Dedicated Docker build/run job [\#1068](https://github.com/kivy/buildozer/pull/1068) ([AndreMiras](https://github.com/AndreMiras)) +- F841: local variable is assigned to but never used [\#1066](https://github.com/kivy/buildozer/pull/1066) ([AndreMiras](https://github.com/AndreMiras)) +- PEP8 organisation and fixes [\#1065](https://github.com/kivy/buildozer/pull/1065) ([AndreMiras](https://github.com/AndreMiras)) +- Fixes coveralls.io on pull requests [\#1063](https://github.com/kivy/buildozer/pull/1063) ([AndreMiras](https://github.com/AndreMiras)) +- Coveralls TOKEN is only available on branch master [\#1062](https://github.com/kivy/buildozer/pull/1062) ([AndreMiras](https://github.com/AndreMiras)) +- Starts unit testing buildozer/targets/android.py [\#1061](https://github.com/kivy/buildozer/pull/1061) ([AndreMiras](https://github.com/AndreMiras)) +- Setup coverage testing [\#1060](https://github.com/kivy/buildozer/pull/1060) ([AndreMiras](https://github.com/AndreMiras)) +- Fix Dockerfile dependencies [\#1053](https://github.com/kivy/buildozer/pull/1053) ([Sirfanas](https://github.com/Sirfanas)) +- Updates default buildozer.spec NDK from 17c to 19b [\#1041](https://github.com/kivy/buildozer/pull/1041) ([AndreMiras](https://github.com/AndreMiras)) +- Fix config typo in default.spec [\#1026](https://github.com/kivy/buildozer/pull/1026) ([touilleMan](https://github.com/touilleMan)) +- Android gradle build: fix apk name [\#1025](https://github.com/kivy/buildozer/pull/1025) ([SomberNight](https://github.com/SomberNight)) + +## [1.0](https://github.com/kivy/buildozer/tree/1.0) (2019-12-22) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.39...1.0) + +**Fixed bugs:** + +- Accept license terms prompt is not visible [\#916](https://github.com/kivy/buildozer/issues/916) +- Unable to build release. File "/usr/lib/python3.6/shutil.py", line 96, in copyfile with open\(src, 'rb'\) as fsrc: IOError: \[Errno 2\] No such file or directory: [\#851](https://github.com/kivy/buildozer/issues/851) +- error: cannot find -lpython3.7 [\#842](https://github.com/kivy/buildozer/issues/842) +- android.accept\_sdk\_license may misbehave [\#816](https://github.com/kivy/buildozer/issues/816) +- Python3 error with pexpect [\#221](https://github.com/kivy/buildozer/issues/221) + +**Closed issues:** + +- Sup Req : Buildozer debug does not complete - Error threading.py, sh.py, etc.. [\#1000](https://github.com/kivy/buildozer/issues/1000) +- Buildozer failed to execute the last command [\#999](https://github.com/kivy/buildozer/issues/999) +- Android fullscreen mode: cannot hide status bar! [\#989](https://github.com/kivy/buildozer/issues/989) +- buildozer uses wrong python version and disrespects requirement versions [\#988](https://github.com/kivy/buildozer/issues/988) +- The version of Kivy installed on this system is too old. [\#987](https://github.com/kivy/buildozer/issues/987) +- Failed to download any source lists! [\#986](https://github.com/kivy/buildozer/issues/986) +- Aidl cannot be executed error buildozer [\#984](https://github.com/kivy/buildozer/issues/984) +- buildozer debug error "\[WARNING\]" when i run buildozer andriod debug [\#980](https://github.com/kivy/buildozer/issues/980) +- Building kivy app with python3 requirement gives "No compiled python is present to zip, skipping." warning and "Unable to import kivy.\_clock. Have you perhaps forgotten to compile kivy? ..." error when run on android device. [\#977](https://github.com/kivy/buildozer/issues/977) +- Java And Python-for-android toolchain errors [\#975](https://github.com/kivy/buildozer/issues/975) +- buildozer + python3 [\#973](https://github.com/kivy/buildozer/issues/973) +- Error while running ".buildozer.../native-build/python -OO -m compileall -b -f /.../app [\#972](https://github.com/kivy/buildozer/issues/972) +- buildozer fails with kivymd link [\#968](https://github.com/kivy/buildozer/issues/968) +- ndk\_platform doesn't exist: /home/rr/android-ndk-r20/platforms/android-20/arch-arm [\#966](https://github.com/kivy/buildozer/issues/966) +- Paused at Installing/updating SDK platform tools if necessary [\#965](https://github.com/kivy/buildozer/issues/965) +- java.lang.NoClassDefFoundError: javax/xml/bind/annotation/XmlSchema [\#962](https://github.com/kivy/buildozer/issues/962) +- please add aidl into Dockerfile [\#960](https://github.com/kivy/buildozer/issues/960) +- Missing \_ctypes module [\#955](https://github.com/kivy/buildozer/issues/955) +- Kivy-Buildozer release version doesnt upload google store [\#953](https://github.com/kivy/buildozer/issues/953) +- buildozer using wrong kivy version [\#943](https://github.com/kivy/buildozer/issues/943) +- buildozer ndk-api=21 error [\#942](https://github.com/kivy/buildozer/issues/942) +- app crash [\#939](https://github.com/kivy/buildozer/issues/939) +- ERROR: JAVA\_HOME is set to an invalid directory: /usr/lib/jvm/java-8-oracle [\#929](https://github.com/kivy/buildozer/issues/929) +- no-issue [\#926](https://github.com/kivy/buildozer/issues/926) +- can't find file to patch toggle\_jpg\_png\_webp.patch [\#921](https://github.com/kivy/buildozer/issues/921) +- No matching distribution found for io [\#915](https://github.com/kivy/buildozer/issues/915) +- Error: No matching distribution found for io\(from -r requirements.txt \(line 5\)\) [\#914](https://github.com/kivy/buildozer/issues/914) +- I'm also experiencing the same issue, using macOS 10.14.3 and no Docker -- just plain Buildozer from the PyPi repo. What is the recommended course of action to fix this? [\#913](https://github.com/kivy/buildozer/issues/913) +- app crash [\#911](https://github.com/kivy/buildozer/issues/911) +- warnings while building apk using buildozer [\#910](https://github.com/kivy/buildozer/issues/910) +- buildozer error please help me!! [\#906](https://github.com/kivy/buildozer/issues/906) +- My application does not open in android but if it works in geany, help please. [\#903](https://github.com/kivy/buildozer/issues/903) +- \[Docker image\] Can't compile apk: /bin/tar ... "Cannot utime: Operation not permitted" [\#902](https://github.com/kivy/buildozer/issues/902) +- Fails to call numpy fft on android [\#899](https://github.com/kivy/buildozer/issues/899) +- Command failed: /usr/bin/python3 -m pythonforandroid.toolchain [\#898](https://github.com/kivy/buildozer/issues/898) +- error: C compiler cannot create executables [\#897](https://github.com/kivy/buildozer/issues/897) +- stopped at "Installing/updating SDK platform tools if necessary" [\#896](https://github.com/kivy/buildozer/issues/896) +- ValueError: Tried to access ndk\_ver but it has not been set - this should not happen, something went wrong! [\#893](https://github.com/kivy/buildozer/issues/893) +- Buildozer not installing from custom source folders [\#892](https://github.com/kivy/buildozer/issues/892) +- Error with build requirements PIL \(Python 3\) [\#890](https://github.com/kivy/buildozer/issues/890) +- rocker plants.cvs -an active -r -p rco.png [\#889](https://github.com/kivy/buildozer/issues/889) +- rocker plants.cvs -an active -r -p rco.png [\#888](https://github.com/kivy/buildozer/issues/888) +- rocker plants.cvs -an active -r -p rco.png [\#887](https://github.com/kivy/buildozer/issues/887) +- rocker plants.cvs -an active -r -p rco.png [\#886](https://github.com/kivy/buildozer/issues/886) +- rocker plants.cvs -an active -r -p rco.png [\#885](https://github.com/kivy/buildozer/issues/885) +- host=arm-linux-androideabi [\#884](https://github.com/kivy/buildozer/issues/884) +- Buildozer has attribute errors and will not run [\#883](https://github.com/kivy/buildozer/issues/883) +- buildozer on osx not working [\#879](https://github.com/kivy/buildozer/issues/879) +- Android API Issue [\#877](https://github.com/kivy/buildozer/issues/877) +- What is the way to add folders and files? [\#875](https://github.com/kivy/buildozer/issues/875) +- Buildozer Error [\#874](https://github.com/kivy/buildozer/issues/874) +- Problem with "Unpacking sdl2\_image" and "Parent module 'pythonforandroid.recipes' not found" [\#872](https://github.com/kivy/buildozer/issues/872) +- buildozer android debug deploy is stuck [\#870](https://github.com/kivy/buildozer/issues/870) +- running autoreconf -vif [\#866](https://github.com/kivy/buildozer/issues/866) +- Exception in thread "main" java.lang.NoClassDefFoundError: javax/xml/bind/annotation/XmlSchema [\#862](https://github.com/kivy/buildozer/issues/862) +- Can not build x86 APK file [\#861](https://github.com/kivy/buildozer/issues/861) +- But then a lot more issues, deep in the weeds now. [\#858](https://github.com/kivy/buildozer/issues/858) +- Create tests for PR fix bug when logcat value which cannot be decode to utf-8 [\#857](https://github.com/kivy/buildozer/issues/857) +- Can't Cythonize pyjnius: TypeError: can't pickle Argument objects [\#850](https://github.com/kivy/buildozer/issues/850) +- buildozer -h complains about NoneType target [\#849](https://github.com/kivy/buildozer/issues/849) +- Building failes at compiling openssl\[armeabi-v7a\] [\#841](https://github.com/kivy/buildozer/issues/841) +- Applying patches for libffi\[armeabi-v7a\] [\#840](https://github.com/kivy/buildozer/issues/840) +- Errors while running buildozer android debug deploy [\#832](https://github.com/kivy/buildozer/issues/832) +- "Command failed" when running "buildozer android debug deploy run" [\#829](https://github.com/kivy/buildozer/issues/829) +- Build Python3 for Android with Azure-keyvault and merest [\#828](https://github.com/kivy/buildozer/issues/828) +- Outdated Android NDK download URL in Kivy Virtual Machine [\#827](https://github.com/kivy/buildozer/issues/827) +- App crashes on startup [\#826](https://github.com/kivy/buildozer/issues/826) +- I found the origin of the problem mentioned in issue \#694 \(Aidl not found\) [\#824](https://github.com/kivy/buildozer/issues/824) +- download\(\) is not CI log friendly [\#823](https://github.com/kivy/buildozer/issues/823) +- Application stuck in portrait mode [\#820](https://github.com/kivy/buildozer/issues/820) +- Orientation landscape when set to portrait [\#818](https://github.com/kivy/buildozer/issues/818) +- Certain python requirements fail due to `\_ctypes` [\#815](https://github.com/kivy/buildozer/issues/815) +- Running buildozer help crashes [\#813](https://github.com/kivy/buildozer/issues/813) +- \[Buildozer 0.40.dev0, Buildozer 0.39\] AttributeError: 'Buildozer' object has no attribute 'translate\_target' [\#812](https://github.com/kivy/buildozer/issues/812) +- Version number failing to be added to apk [\#810](https://github.com/kivy/buildozer/issues/810) +- Virtual machine out of date [\#764](https://github.com/kivy/buildozer/issues/764) +- Docker doesn't build [\#751](https://github.com/kivy/buildozer/issues/751) +- Buildozer exit code on recipe fail [\#674](https://github.com/kivy/buildozer/issues/674) +- not able to release apk in release mode - build failed [\#363](https://github.com/kivy/buildozer/issues/363) +- Issue with NDK r11b [\#308](https://github.com/kivy/buildozer/issues/308) +- Feature: Create a compat.py for correct PY3 support and implement it [\#300](https://github.com/kivy/buildozer/issues/300) +- Curl missing in KivyVM [\#296](https://github.com/kivy/buildozer/issues/296) +- Virtual machine image could have better support for international keyboards [\#295](https://github.com/kivy/buildozer/issues/295) +- Virtual machine image does not have enough disk space [\#294](https://github.com/kivy/buildozer/issues/294) +- Not quite an issue, just some dependency stuff [\#247](https://github.com/kivy/buildozer/issues/247) +- \# Command failed: ./distribute.sh -m "kivy" -d "stapp" [\#204](https://github.com/kivy/buildozer/issues/204) +- Command ./distribute.sh failed -- no buildozer.spec found when it exists [\#203](https://github.com/kivy/buildozer/issues/203) +- buildozer -v android debug : Compile fails at fcntlmodule.c [\#196](https://github.com/kivy/buildozer/issues/196) +- Build errors on OSX 10.10 \(for android\) [\#165](https://github.com/kivy/buildozer/issues/165) +- Feature Request: Vagrantfile [\#154](https://github.com/kivy/buildozer/issues/154) + +**Merged pull requests:** + +- Feature GitHub actions [\#1005](https://github.com/kivy/buildozer/pull/1005) ([tito](https://github.com/tito)) +- Fixes test\_p4a\_recommended\_android\_ndk\_found\(\) mocking [\#983](https://github.com/kivy/buildozer/pull/983) ([AndreMiras](https://github.com/AndreMiras)) +- Fixes packaging for current p4a develop branch [\#978](https://github.com/kivy/buildozer/pull/978) ([misl6](https://github.com/misl6)) +- Updates install instructions and troubleshooting [\#976](https://github.com/kivy/buildozer/pull/976) ([AndreMiras](https://github.com/AndreMiras)) +- fix: show output of sdk update if auto\_accept\_license is false [\#970](https://github.com/kivy/buildozer/pull/970) ([tshirtman](https://github.com/tshirtman)) +- Add libs only for current arch [\#969](https://github.com/kivy/buildozer/pull/969) ([misl6](https://github.com/misl6)) +- Rename final apk with arch in the name [\#967](https://github.com/kivy/buildozer/pull/967) ([tito](https://github.com/tito)) +- Code improvements around NDK download [\#961](https://github.com/kivy/buildozer/pull/961) ([inclement](https://github.com/inclement)) +- Separate build per android.arch [\#957](https://github.com/kivy/buildozer/pull/957) ([tito](https://github.com/tito)) +- spec file: cast paths in source.exclude\_dirs to lowercase [\#956](https://github.com/kivy/buildozer/pull/956) ([SomberNight](https://github.com/SomberNight)) +- added cmake to requirements [\#950](https://github.com/kivy/buildozer/pull/950) ([mcroni](https://github.com/mcroni)) +- Add ability to get p4a's recommended android's NDK version [\#947](https://github.com/kivy/buildozer/pull/947) ([opacam](https://github.com/opacam)) +- New feature: allow to use a p4a fork [\#940](https://github.com/kivy/buildozer/pull/940) ([opacam](https://github.com/opacam)) +- Minor linter fix [\#937](https://github.com/kivy/buildozer/pull/937) ([AndreMiras](https://github.com/AndreMiras)) +- Update installation.rst [\#936](https://github.com/kivy/buildozer/pull/936) ([yairlempert](https://github.com/yairlempert)) +- clarify overriding of config tokens [\#935](https://github.com/kivy/buildozer/pull/935) ([brentpicasso](https://github.com/brentpicasso)) +- Extend add\_libs to arm64-v8a [\#934](https://github.com/kivy/buildozer/pull/934) ([misl6](https://github.com/misl6)) +- Doc/installation updates [\#932](https://github.com/kivy/buildozer/pull/932) ([tshirtman](https://github.com/tshirtman)) +- customizability options [\#919](https://github.com/kivy/buildozer/pull/919) ([zworkb](https://github.com/zworkb)) +- disable orientation and window option for service\_only bootstrap [\#912](https://github.com/kivy/buildozer/pull/912) ([zworkb](https://github.com/zworkb)) +- Unit test unicode decode on command output, fixes \#857 [\#905](https://github.com/kivy/buildozer/pull/905) ([AndreMiras](https://github.com/AndreMiras)) +- Made sure to print all lines of license question by always flushing [\#904](https://github.com/kivy/buildozer/pull/904) ([inclement](https://github.com/inclement)) +- Exits with error code on build exception, fixes \#674 [\#882](https://github.com/kivy/buildozer/pull/882) ([AndreMiras](https://github.com/AndreMiras)) +- Download Apache ANT at the same path as the buildozer.spec android.ant\_path option [\#860](https://github.com/kivy/buildozer/pull/860) ([robertpro](https://github.com/robertpro)) +- Fix for bug when logcat value which cannot be decode to utf-8 [\#856](https://github.com/kivy/buildozer/pull/856) ([Draqun](https://github.com/Draqun)) +- Changes default log level to debug \(2\) [\#855](https://github.com/kivy/buildozer/pull/855) ([AndreMiras](https://github.com/AndreMiras)) +- Handles unknown command/target error gracefully, closes \#812 [\#853](https://github.com/kivy/buildozer/pull/853) ([AndreMiras](https://github.com/AndreMiras)) +- Updates system requirements needed to build recipes [\#852](https://github.com/kivy/buildozer/pull/852) ([AndreMiras](https://github.com/AndreMiras)) +- Various Dockerfile improvements [\#848](https://github.com/kivy/buildozer/pull/848) ([AndreMiras](https://github.com/AndreMiras)) +- Support p4a uses-library argument [\#846](https://github.com/kivy/buildozer/pull/846) ([pax0r](https://github.com/pax0r)) +- Removed reference to Kivy VM [\#845](https://github.com/kivy/buildozer/pull/845) ([Megalex42](https://github.com/Megalex42)) +- Removes extra log\_env\(\) call [\#843](https://github.com/kivy/buildozer/pull/843) ([AndreMiras](https://github.com/AndreMiras)) +- add missing android arch to spec file [\#839](https://github.com/kivy/buildozer/pull/839) ([OptimusGREEN](https://github.com/OptimusGREEN)) +- feat: make unzip quiet [\#836](https://github.com/kivy/buildozer/pull/836) ([mkg20001](https://github.com/mkg20001)) +- fix: drop release-unsigned from release output path [\#835](https://github.com/kivy/buildozer/pull/835) ([mkg20001](https://github.com/mkg20001)) +- Add ci\_mode to toggle download progress [\#833](https://github.com/kivy/buildozer/pull/833) ([mkg20001](https://github.com/mkg20001)) +- Unit test logger [\#831](https://github.com/kivy/buildozer/pull/831) ([AndreMiras](https://github.com/AndreMiras)) +- refactor auto accept license [\#822](https://github.com/kivy/buildozer/pull/822) ([AndreMiras](https://github.com/AndreMiras)) +- Unit tests buildozer --help command, refs \#813 [\#821](https://github.com/kivy/buildozer/pull/821) ([AndreMiras](https://github.com/AndreMiras)) +- Use getbooldefault\(\) for boolean, fixes \#806 [\#817](https://github.com/kivy/buildozer/pull/817) ([AndreMiras](https://github.com/AndreMiras)) +- Update the issue template [\#814](https://github.com/kivy/buildozer/pull/814) ([AndreMiras](https://github.com/AndreMiras)) +- Show envs when run2 [\#802](https://github.com/kivy/buildozer/pull/802) ([maho](https://github.com/maho)) + +## [0.39](https://github.com/kivy/buildozer/tree/0.39) (2019-02-04) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.38...0.39) + +## [0.38](https://github.com/kivy/buildozer/tree/0.38) (2019-02-03) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.36...0.38) + +**Fixed bugs:** + +- TypeError: read\(\) takes exactly 2 arguments \(3 given\) [\#793](https://github.com/kivy/buildozer/issues/793) +- TypeError: read\(\) takes exactly 2 arguments \(3 given\) [\#792](https://github.com/kivy/buildozer/issues/792) +- AttributeError: 'str' object has no attribute 'decode' [\#756](https://github.com/kivy/buildozer/issues/756) + +**Closed issues:** + +- Openssl compilation fails, '\_\_atomic' undefined [\#800](https://github.com/kivy/buildozer/issues/800) +- --ndk-api appears twice [\#798](https://github.com/kivy/buildozer/issues/798) +- Error in toolchain.py and argument --ndk-api 9 [\#797](https://github.com/kivy/buildozer/issues/797) +- Target ndk-api is 19, but the \*\*python3 recipe supports only 21\*\* [\#796](https://github.com/kivy/buildozer/issues/796) +- Add support for --frameworks on iOS [\#790](https://github.com/kivy/buildozer/issues/790) +- Hello World impossible: checking whether the C compiler works... no [\#785](https://github.com/kivy/buildozer/issues/785) +- Buildozer 0.37 UnicodeDecodeError: 'ascii' codec can't decode byte 0xc2 [\#782](https://github.com/kivy/buildozer/issues/782) +- APK Crashes on start with no obvious cause [\#779](https://github.com/kivy/buildozer/issues/779) +- Outdated Android NDK download URL in Kivy Virtual Machine [\#778](https://github.com/kivy/buildozer/issues/778) +- Buildozer "gradlew failed!" [\#777](https://github.com/kivy/buildozer/issues/777) +- Conflicting documentation Python2 versus Python3 [\#774](https://github.com/kivy/buildozer/issues/774) +- No valid --ndk-api received, using the default of 0 = min\(android-api=0, default ndk-api=21\) [\#772](https://github.com/kivy/buildozer/issues/772) +- `default.spec` specifies `python3` but also incompatible NDK and API settings [\#770](https://github.com/kivy/buildozer/issues/770) +- Error: minsdk argument does not match the api that is compiled against [\#768](https://github.com/kivy/buildozer/issues/768) +- Need to support sensorLandscape [\#762](https://github.com/kivy/buildozer/issues/762) +- minsdk argument does not match the api that is compiled against [\#761](https://github.com/kivy/buildozer/issues/761) +- Need to be able to specify the NDK API target [\#758](https://github.com/kivy/buildozer/issues/758) +- Buildozer cannot import name pythran\_is\_numpy\_func\_supported [\#753](https://github.com/kivy/buildozer/issues/753) +- Python3 recipe not building, error with self.ctx.python\_recipe [\#752](https://github.com/kivy/buildozer/issues/752) +- compile error [\#744](https://github.com/kivy/buildozer/issues/744) +- APK build failed "ndk\_platform doesn't exist" \(Python3.6; android.api = 27\) [\#742](https://github.com/kivy/buildozer/issues/742) +- Logo Design Proposal [\#734](https://github.com/kivy/buildozer/issues/734) +- `android clean` not working \(FileNotFoundError: \[Errno 2\] No such file or directory\) [\#732](https://github.com/kivy/buildozer/issues/732) +- Can't Make an APK via Buildozer [\#712](https://github.com/kivy/buildozer/issues/712) +- App build Failed [\#707](https://github.com/kivy/buildozer/issues/707) +- ERROR: /home/kivy/.buildozer/android/platform/apache-ant-1.9.4/bin/ant failed! [\#706](https://github.com/kivy/buildozer/issues/706) +- APK using txt file in main.py issue. [\#704](https://github.com/kivy/buildozer/issues/704) +- "Copying APK" fails because of wrong path used [\#699](https://github.com/kivy/buildozer/issues/699) +- Cannot build [\#697](https://github.com/kivy/buildozer/issues/697) +- Fail on build [\#692](https://github.com/kivy/buildozer/issues/692) +- Cannot build "Hello World" APK [\#687](https://github.com/kivy/buildozer/issues/687) +- Error when building an android APK using Kivy [\#684](https://github.com/kivy/buildozer/issues/684) +- can't get audio stream from a server [\#683](https://github.com/kivy/buildozer/issues/683) +- Continuous integration testing [\#679](https://github.com/kivy/buildozer/issues/679) +- Post build fails because copying built apk fails [\#671](https://github.com/kivy/buildozer/issues/671) +- Buildozer can't compile for Android on Manjaro Linux [\#670](https://github.com/kivy/buildozer/issues/670) +- Icon/Logo Proposal [\#669](https://github.com/kivy/buildozer/issues/669) +- Buildozer cant compile platform Ubuntu 16.04 [\#668](https://github.com/kivy/buildozer/issues/668) +- Buildozer gets stuck at unpacking kivy [\#667](https://github.com/kivy/buildozer/issues/667) +- Kivy window [\#666](https://github.com/kivy/buildozer/issues/666) +- Failed building wheel for pyaudio [\#665](https://github.com/kivy/buildozer/issues/665) +- Buildozer compilation gets stuck [\#663](https://github.com/kivy/buildozer/issues/663) +- IOError: \[Errno 2\] No such file or directory: u'/home/samurai-girl/python2/.buildozer/android/platform/build/dists/test/build/outputs/apk/test-debug.apk' [\#662](https://github.com/kivy/buildozer/issues/662) +- Unable to build \_ctypes.so [\#660](https://github.com/kivy/buildozer/issues/660) +- error while packaging for android on macOS [\#659](https://github.com/kivy/buildozer/issues/659) +- Command failed: /usr/bin/python2 -m pythonforandroid.toolchain [\#658](https://github.com/kivy/buildozer/issues/658) +- Cannot build apk with buildozer android debug [\#656](https://github.com/kivy/buildozer/issues/656) +- How to correctly build an APK package with Python 3? [\#655](https://github.com/kivy/buildozer/issues/655) +- SDK does not have any Build Tools installed. [\#652](https://github.com/kivy/buildozer/issues/652) +- Error building Kivy app with opencv\(and numpy\) - raise CommandNotFoundError\(path\) [\#651](https://github.com/kivy/buildozer/issues/651) +- FileNotFoundError: \[Errno 2\] [\#649](https://github.com/kivy/buildozer/issues/649) +- The python3crystax recipe can only be built when using the CrystaX NDK. Exiting. [\#648](https://github.com/kivy/buildozer/issues/648) +- build apk : error can not find the build/output folder [\#647](https://github.com/kivy/buildozer/issues/647) +- help me building cryptography [\#646](https://github.com/kivy/buildozer/issues/646) +- sslv3 alert handshake failure on Addroid [\#643](https://github.com/kivy/buildozer/issues/643) +- Opening Android SDK Manager to download "Google Repository" and "Google Play services" [\#642](https://github.com/kivy/buildozer/issues/642) +- kivy python host failed [\#641](https://github.com/kivy/buildozer/issues/641) +- python.host failed [\#640](https://github.com/kivy/buildozer/issues/640) +- Kivy Crashing [\#638](https://github.com/kivy/buildozer/issues/638) +- .so lib files missing debug symbols [\#637](https://github.com/kivy/buildozer/issues/637) +- Kivy app doesn't run [\#634](https://github.com/kivy/buildozer/issues/634) +- "\* daemon not running. starting it now on port 5037 \* \* daemon started successfully \*" [\#633](https://github.com/kivy/buildozer/issues/633) +- FileNotFoundError: \[Errno 2\] No such file or directory: '/home/alex/Documents/pengame/.buildozer/android/platform/build/dists/helloworldapp/build/outputs/apk/helloworldapp-debug.apk' [\#632](https://github.com/kivy/buildozer/issues/632) +- Compiling Kivy app to apk fails with a pip error [\#631](https://github.com/kivy/buildozer/issues/631) +- ERROR: /home/ubuntu/.buildozer/android/platform/apache-ant-1.9.4/bin/ant failed! [\#625](https://github.com/kivy/buildozer/issues/625) +- Build error when building Kivy app [\#624](https://github.com/kivy/buildozer/issues/624) +- buildozer debug is not running this error is showing [\#622](https://github.com/kivy/buildozer/issues/622) +- unexpected e\_machine: 3 [\#621](https://github.com/kivy/buildozer/issues/621) +- buildozer matplotlib [\#617](https://github.com/kivy/buildozer/issues/617) +- IOError: \[Errno 2\] No such file or directory: u'/Users/gauravgupta/kivy/.buildozer/android/platform/build/dists/myellipse/build/outputs/apk/myellipse-debug.apk' [\#613](https://github.com/kivy/buildozer/issues/613) +- Build failing in final ANT stage [\#610](https://github.com/kivy/buildozer/issues/610) +- Cannot build apk [\#606](https://github.com/kivy/buildozer/issues/606) +- Installing buildozer for Python 3 failed [\#604](https://github.com/kivy/buildozer/issues/604) +- FileNotFoundError at end of build when copying APK file. [\#602](https://github.com/kivy/buildozer/issues/602) +- BUILD FAILED [\#601](https://github.com/kivy/buildozer/issues/601) +- Error with buildozer [\#591](https://github.com/kivy/buildozer/issues/591) +- ImportError: No module named Cython.Distutils While running buildozer android debug [\#587](https://github.com/kivy/buildozer/issues/587) +- python 2.7 compile with NDK 15c [\#584](https://github.com/kivy/buildozer/issues/584) +- Problem upgrade to newest version [\#583](https://github.com/kivy/buildozer/issues/583) +- Simple Kivy test app can't compile to android apk [\#581](https://github.com/kivy/buildozer/issues/581) +- Apple Watch Support [\#574](https://github.com/kivy/buildozer/issues/574) +- Skip error or better error message: requirements with extra comma [\#562](https://github.com/kivy/buildozer/issues/562) +- failed to build numpy on Mac [\#557](https://github.com/kivy/buildozer/issues/557) +- p4a.source\_dir set and android clean --\> crash [\#556](https://github.com/kivy/buildozer/issues/556) +- Kivy python android build error? [\#555](https://github.com/kivy/buildozer/issues/555) +- python3 import networkx needs lib2to3 but cannot be imported [\#554](https://github.com/kivy/buildozer/issues/554) +- buildozer android debug deploy run ERROR [\#550](https://github.com/kivy/buildozer/issues/550) +- App Crashes [\#541](https://github.com/kivy/buildozer/issues/541) +- Requirements: Requests, Ssl [\#539](https://github.com/kivy/buildozer/issues/539) +- Error: Aidl cannot be executed [\#532](https://github.com/kivy/buildozer/issues/532) +- Kivy and Beautifulsoup with Buildozer fail on Python 3. [\#529](https://github.com/kivy/buildozer/issues/529) +- I'm having similar problem [\#528](https://github.com/kivy/buildozer/issues/528) +- Awkward error while building apk [\#526](https://github.com/kivy/buildozer/issues/526) +- The "android" command is deprecated [\#519](https://github.com/kivy/buildozer/issues/519) +- Kivy: Permission denied \(when running buildozer android on Mac OS X\) [\#517](https://github.com/kivy/buildozer/issues/517) +- Extraction of "Kivy2.7z" failed [\#516](https://github.com/kivy/buildozer/issues/516) +- buildozer still download ndk from wrong path [\#506](https://github.com/kivy/buildozer/issues/506) +- android-new landscape error on restart [\#482](https://github.com/kivy/buildozer/issues/482) +- Add possibility to choose pip version for modules in the requirements section [\#475](https://github.com/kivy/buildozer/issues/475) +- Bulldozer android\_new fails when ffmpeg is added to the requirements: No C Compiler found! [\#463](https://github.com/kivy/buildozer/issues/463) +- CalledProcessError: Command '\['python', 'package\_app.py', '--displayname=Music Favorites'\]' returned non-zero exit status 1 [\#448](https://github.com/kivy/buildozer/issues/448) +- \[Errno 13\] Permission denied; Buildozer on Flask Server [\#445](https://github.com/kivy/buildozer/issues/445) +- Error while compiling a kivy app for android \(undefined reference to 'SDL\_main' collect2: error: ld returned 1 exit status\) [\#438](https://github.com/kivy/buildozer/issues/438) +- AsyncImage not loading in .apk [\#433](https://github.com/kivy/buildozer/issues/433) +- Big Bug, ORMs cant find sqlite3.so [\#426](https://github.com/kivy/buildozer/issues/426) +- cp: cannot stat '~/.buildozer/android/platform/python-for-android/src/libs/armeabi/\*.so': No such file or directory [\#422](https://github.com/kivy/buildozer/issues/422) +- using android\_new causes missing packages errors [\#420](https://github.com/kivy/buildozer/issues/420) +- Buildozer fails with "Command '\['ant', 'debug'\]' returned non-zero exit status 1" [\#419](https://github.com/kivy/buildozer/issues/419) +- Buildozer fails to build when ffmpeg is added [\#418](https://github.com/kivy/buildozer/issues/418) +- Build on arm64 android [\#412](https://github.com/kivy/buildozer/issues/412) +- Building with "android\_new" target fails due to missing architecture on pythonforandroid.toolchain apk [\#407](https://github.com/kivy/buildozer/issues/407) +- Command '\['ant', 'debug'\]' returned non-zero exit status 1 when building PIL library [\#406](https://github.com/kivy/buildozer/issues/406) +- Android, Pygame backend: buildozer android debug deploy run tries to launch wrong Activity [\#401](https://github.com/kivy/buildozer/issues/401) +- AttributeError: 'NoneType' object has no attribute 'splitlines' [\#400](https://github.com/kivy/buildozer/issues/400) +- Pyzmq import fails on Android with python2 / kivy 1.9 / cython 0.23 [\#397](https://github.com/kivy/buildozer/issues/397) +- Python 3: \# Aidl cannot be executed AttributeError: 'module' object has no attribute 'maxint' [\#392](https://github.com/kivy/buildozer/issues/392) +- subprocess.CalledProcessError [\#391](https://github.com/kivy/buildozer/issues/391) +- python.host could not find platform independent libraries [\#390](https://github.com/kivy/buildozer/issues/390) +- Buildozer android\_new fails [\#388](https://github.com/kivy/buildozer/issues/388) +- toolchain.py: error: argument --private is required [\#384](https://github.com/kivy/buildozer/issues/384) +- Android fails on running Kivy apk [\#381](https://github.com/kivy/buildozer/issues/381) +- ImportError: No module named plyer with android\_new as a target [\#379](https://github.com/kivy/buildozer/issues/379) +- buildozer -v android debug runs into problem [\#376](https://github.com/kivy/buildozer/issues/376) +- subprocess.CalledProcessError [\#372](https://github.com/kivy/buildozer/issues/372) +- Can't install Pillow , return errors [\#371](https://github.com/kivy/buildozer/issues/371) +- failed to load ctypes as android app built on OSX [\#368](https://github.com/kivy/buildozer/issues/368) +- old\_toolchain not shows images [\#367](https://github.com/kivy/buildozer/issues/367) +- Buildozer deployment dlopen failed libSDL2.so has unexpected e\_machine: 40 [\#365](https://github.com/kivy/buildozer/issues/365) +- Rebuild only selected packages [\#226](https://github.com/kivy/buildozer/issues/226) + +**Merged pull requests:** + +- Updated README for Python 3 [\#809](https://github.com/kivy/buildozer/pull/809) ([inclement](https://github.com/inclement)) +- Update README.md to add opencollective [\#808](https://github.com/kivy/buildozer/pull/808) ([tito](https://github.com/tito)) +- Made buildozer respect user sdk dir when accessing sdkmanager [\#807](https://github.com/kivy/buildozer/pull/807) ([inclement](https://github.com/inclement)) +- Removed android\_old and updated SDK management to work with current SDK [\#806](https://github.com/kivy/buildozer/pull/806) ([inclement](https://github.com/inclement)) +- Removed redundant --ndk-api argument and fixed default value [\#805](https://github.com/kivy/buildozer/pull/805) ([inclement](https://github.com/inclement)) +- Update for p4a master [\#803](https://github.com/kivy/buildozer/pull/803) ([inclement](https://github.com/inclement)) +- Unit tests read spec file, refs \#793 [\#799](https://github.com/kivy/buildozer/pull/799) ([AndreMiras](https://github.com/AndreMiras)) +- Support the --add-frameworks flag in kivy-ios [\#795](https://github.com/kivy/buildozer/pull/795) ([hackalog](https://github.com/hackalog)) +- Introduces CI and tox testing, fixes \#679 [\#794](https://github.com/kivy/buildozer/pull/794) ([AndreMiras](https://github.com/AndreMiras)) +- Fix python 2 crash when reading config file [\#791](https://github.com/kivy/buildozer/pull/791) ([etc0de](https://github.com/etc0de)) +- Handle buildozer.spec with unicode chars [\#789](https://github.com/kivy/buildozer/pull/789) ([guysoft](https://github.com/guysoft)) +- Specify GitHub branches in buildozer.spec [\#787](https://github.com/kivy/buildozer/pull/787) ([hackalog](https://github.com/hackalog)) +- Fixes minor comment typo introduced in \#759 [\#786](https://github.com/kivy/buildozer/pull/786) ([AndreMiras](https://github.com/AndreMiras)) +- Docker from current git [\#775](https://github.com/kivy/buildozer/pull/775) ([maho](https://github.com/maho)) +- Fix output directory for gradle [\#766](https://github.com/kivy/buildozer/pull/766) ([wo01](https://github.com/wo01)) +- support sensorLandscape mode. resolves \#762 [\#763](https://github.com/kivy/buildozer/pull/763) ([brentpicasso](https://github.com/brentpicasso)) +- allow specifying of Android NDK API. Resolves \#758 [\#759](https://github.com/kivy/buildozer/pull/759) ([brentpicasso](https://github.com/brentpicasso)) +- Update cython version from the docs [\#757](https://github.com/kivy/buildozer/pull/757) ([AndreMiras](https://github.com/AndreMiras)) +- fix android sdk/ndk information link [\#755](https://github.com/kivy/buildozer/pull/755) ([avere001](https://github.com/avere001)) +- this allows to build with ndk 17c \(and other recent-ish ones\) [\#754](https://github.com/kivy/buildozer/pull/754) ([tshirtman](https://github.com/tshirtman)) +- fix ndk URLs [\#747](https://github.com/kivy/buildozer/pull/747) ([zworkb](https://github.com/zworkb)) + +## [0.36](https://github.com/kivy/buildozer/tree/0.36) (2018-11-21) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.35...0.36) + +**Fixed bugs:** + +- Error in debug, install platform, Indexerror. [\#731](https://github.com/kivy/buildozer/issues/731) + +**Closed issues:** + +- Buildozer looking for nonexistent script [\#749](https://github.com/kivy/buildozer/issues/749) +- Can not build APK file with buildozer. UnicodeDecodeError [\#746](https://github.com/kivy/buildozer/issues/746) +- CrystaX no longer the preferred NDK for Python3 [\#745](https://github.com/kivy/buildozer/issues/745) +- AKP build fails \(android.py line 524\) [\#738](https://github.com/kivy/buildozer/issues/738) +- Buildozer 0.35 with IndexError: list index out of range on Mac OS [\#737](https://github.com/kivy/buildozer/issues/737) +- cannot build pyjnius for armeabi-v7a [\#735](https://github.com/kivy/buildozer/issues/735) +- I can't package my app the way i always did. [\#733](https://github.com/kivy/buildozer/issues/733) +- OSError: \[Errno 18\] Invalid cross-device link [\#644](https://github.com/kivy/buildozer/issues/644) + +**Merged pull requests:** + +- Correctly write out a sub-process's `stdout` when using Python 3. [\#743](https://github.com/kivy/buildozer/pull/743) ([dbrnz](https://github.com/dbrnz)) +- Explicitly run `pip3` when using Python 3 [\#741](https://github.com/kivy/buildozer/pull/741) ([dbrnz](https://github.com/dbrnz)) +- workaround for working in WSL [\#740](https://github.com/kivy/buildozer/pull/740) ([tshirtman](https://github.com/tshirtman)) +- Allow for `install\_reqs` line having whitespace at start. [\#736](https://github.com/kivy/buildozer/pull/736) ([dbrnz](https://github.com/dbrnz)) +- Fix Support config file name [\#730](https://github.com/kivy/buildozer/pull/730) ([dessant](https://github.com/dessant)) + +## [0.35](https://github.com/kivy/buildozer/tree/0.35) (2018-10-24) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.34...0.35) + +**Fixed bugs:** + +- Buildozer should not regex/hack install\_reqs [\#722](https://github.com/kivy/buildozer/issues/722) + +**Closed issues:** + +- Command failed: /usr/bin/python -m pythonforandroid.toolchain create... [\#727](https://github.com/kivy/buildozer/issues/727) +- buildozer fails at installing platfrom [\#726](https://github.com/kivy/buildozer/issues/726) +- Unable to build APK [\#725](https://github.com/kivy/buildozer/issues/725) +- Buildozer APK not working: OS X, Docker, or fresh Ubuntu 16.04 [\#723](https://github.com/kivy/buildozer/issues/723) +- gcc Segmentation Fault [\#720](https://github.com/kivy/buildozer/issues/720) +- Error debugging for android [\#719](https://github.com/kivy/buildozer/issues/719) +- ndk\_platform doesn't exist \(when switching to API = 28\) [\#717](https://github.com/kivy/buildozer/issues/717) +- FileNotFoundError: \[Errno 2\] \(SOLVED\) - It was Encoding Error [\#715](https://github.com/kivy/buildozer/issues/715) +- SDL\_JAVA\_PACKAGE\_PATH error? [\#714](https://github.com/kivy/buildozer/issues/714) +- Python3Crystax instructions in README [\#710](https://github.com/kivy/buildozer/issues/710) +- Bug or support request? [\#708](https://github.com/kivy/buildozer/issues/708) +- build requirement failed [\#701](https://github.com/kivy/buildozer/issues/701) +- I am using Virtual Machine provided in the official kivy website for building android APK, this is the part of log file [\#696](https://github.com/kivy/buildozer/issues/696) +- App minimizes instantly after launching. [\#695](https://github.com/kivy/buildozer/issues/695) +- Aidl not detected [\#694](https://github.com/kivy/buildozer/issues/694) +- python 3 ssl [\#690](https://github.com/kivy/buildozer/issues/690) +- Is there something like multiprocessing? [\#685](https://github.com/kivy/buildozer/issues/685) +- Kivy in pyCharm [\#682](https://github.com/kivy/buildozer/issues/682) +- Toolchain error. Help,please! [\#678](https://github.com/kivy/buildozer/issues/678) +- virtual machine does not compile apk Fails with errors [\#677](https://github.com/kivy/buildozer/issues/677) +- An error in Buildozer Apk \(Python Kivy\) Help PLZ! [\#676](https://github.com/kivy/buildozer/issues/676) +- App crashes on android but works fine in idle window using buildozer [\#675](https://github.com/kivy/buildozer/issues/675) +- Pyjnius exception Adbuddiz class not found [\#661](https://github.com/kivy/buildozer/issues/661) +- Buildozer failed to execute the last command \#/usr/bin/python3 -m pythonforandroid.toolchain create --dist\_name=blueb --bootstrap=sdl2 --requirements=python3crystax,kivy --arch x86 --copy-libs --color=always --storage-dir=/home/jp/Desktop/BlueB/.buildozer/android/platform/build [\#654](https://github.com/kivy/buildozer/issues/654) +- java.lang.ClassNotFoundException: sun.misc.BASE64Encoder [\#639](https://github.com/kivy/buildozer/issues/639) +- IOError: \[Errno 2\] No such file or directory: [\#636](https://github.com/kivy/buildozer/issues/636) +- App crashes with aws boto3 [\#635](https://github.com/kivy/buildozer/issues/635) +- APK Immediately Closes After Opening in Debug, Release, and Zipaligned & Signed Versions [\#629](https://github.com/kivy/buildozer/issues/629) +- Using Python/Buildozer to compile Kivy for Android [\#628](https://github.com/kivy/buildozer/issues/628) +- "configure: error: C compiler cannot create executables See `config.log' for more details" \(\# Command failed: ./distribute.sh -m "kivy" -d "pollygot"\) [\#627](https://github.com/kivy/buildozer/issues/627) +- "OSError: \[Errno 30\] Read-only file system" \(\# Command failed: virtualenv --python=python2.7 ./venv\) [\#626](https://github.com/kivy/buildozer/issues/626) +- Java compiling issue: buildozer uses obsolete source value 1.5 [\#619](https://github.com/kivy/buildozer/issues/619) +- Cloning into 'python-for-android-new-toolchain'... [\#618](https://github.com/kivy/buildozer/issues/618) +- Failed to build application: 'WindowInfoX11' is not a type identifier [\#616](https://github.com/kivy/buildozer/issues/616) +- Sudo issue\[closed\] [\#615](https://github.com/kivy/buildozer/issues/615) +- buildozer-vm-2 installed from scratch ABENDS [\#611](https://github.com/kivy/buildozer/issues/611) +- ASCII decode problem [\#608](https://github.com/kivy/buildozer/issues/608) +- error: could not delete 'build/lib.linux-x86\_64-2.7/buildozer/targets/android.py': Permission denied [\#607](https://github.com/kivy/buildozer/issues/607) +- APK Build Failing with Python 3.6 [\#605](https://github.com/kivy/buildozer/issues/605) +- IOError: \[Errno 2\] No such file or directory: u'/home/kivy/buildozer/.buildozer/android/platform/build/... [\#603](https://github.com/kivy/buildozer/issues/603) +- Feature Request: Dockerfile or image in dockerhub [\#589](https://github.com/kivy/buildozer/issues/589) +- Buildozer debug error. [\#545](https://github.com/kivy/buildozer/issues/545) + +**Merged pull requests:** + +- Fix \#645 [\#729](https://github.com/kivy/buildozer/pull/729) ([tito](https://github.com/tito)) +- Enable Support app [\#728](https://github.com/kivy/buildozer/pull/728) ([dessant](https://github.com/dessant)) +- Updates p4a deps parsing [\#724](https://github.com/kivy/buildozer/pull/724) ([AndreMiras](https://github.com/AndreMiras)) +- Improved error handling if p4a setup.py can't be read [\#721](https://github.com/kivy/buildozer/pull/721) ([inclement](https://github.com/inclement)) +- Various spelling corrections [\#718](https://github.com/kivy/buildozer/pull/718) ([Zen-CODE](https://github.com/Zen-CODE)) +- Updates Python 3 install instructions, fixes \#710 [\#711](https://github.com/kivy/buildozer/pull/711) ([AndreMiras](https://github.com/AndreMiras)) +- Remove obsolete sh package dependency [\#705](https://github.com/kivy/buildozer/pull/705) ([Cheaterman](https://github.com/Cheaterman)) +- Changes how is\_gradle\_build is detected. [\#700](https://github.com/kivy/buildozer/pull/700) ([rammie](https://github.com/rammie)) +- Cleans installation docs [\#689](https://github.com/kivy/buildozer/pull/689) ([AndreMiras](https://github.com/AndreMiras)) +- Adds issue template with basic required info [\#688](https://github.com/kivy/buildozer/pull/688) ([AndreMiras](https://github.com/AndreMiras)) +- buildozer Dockerfile, fixes \#589 [\#681](https://github.com/kivy/buildozer/pull/681) ([AndreMiras](https://github.com/AndreMiras)) +- Removes few unused variables [\#680](https://github.com/kivy/buildozer/pull/680) ([AndreMiras](https://github.com/AndreMiras)) +- Fixed a typo pointed out by AndreMiras [\#664](https://github.com/kivy/buildozer/pull/664) ([inclement](https://github.com/inclement)) +- Updates installation.rst with Ubuntu 18.04 instructions [\#657](https://github.com/kivy/buildozer/pull/657) ([AndreMiras](https://github.com/AndreMiras)) +- fix prerelease version [\#653](https://github.com/kivy/buildozer/pull/653) ([marceloneil](https://github.com/marceloneil)) +- allow setting launchMode for the main activity in the manifest [\#650](https://github.com/kivy/buildozer/pull/650) ([SomberNight](https://github.com/SomberNight)) +- Fixes Invalid cross-device link [\#645](https://github.com/kivy/buildozer/pull/645) ([robertpro](https://github.com/robertpro)) +- Quote --storage-dir value [\#630](https://github.com/kivy/buildozer/pull/630) ([ghost](https://github.com/ghost)) +- Allow adding Java activities to the manifest [\#612](https://github.com/kivy/buildozer/pull/612) ([bauerj](https://github.com/bauerj)) +- add comment re p4a.port in default.spec [\#600](https://github.com/kivy/buildozer/pull/600) ([replabrobin](https://github.com/replabrobin)) +- Spelling [\#592](https://github.com/kivy/buildozer/pull/592) ([jsoref](https://github.com/jsoref)) + +## [0.34](https://github.com/kivy/buildozer/tree/0.34) (2017-12-15) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.33...0.34) + +**Closed issues:** + +- IOERROR invalid directory [\#599](https://github.com/kivy/buildozer/issues/599) +- Buidozer 0.33 AttributeError: 'module' object has no attribute 'directory' [\#598](https://github.com/kivy/buildozer/issues/598) +- Issu with buildozer packing [\#596](https://github.com/kivy/buildozer/issues/596) +- Gradle: path may not be null or empty string. path='null' [\#595](https://github.com/kivy/buildozer/issues/595) +- ERROR: Trying to release a package that starts with org.test; what can I do? [\#593](https://github.com/kivy/buildozer/issues/593) +- App crash with python3 [\#590](https://github.com/kivy/buildozer/issues/590) +- Problem running buildozer android debug first time [\#586](https://github.com/kivy/buildozer/issues/586) +- buildozer download some content failed [\#585](https://github.com/kivy/buildozer/issues/585) +- complie platform failed [\#580](https://github.com/kivy/buildozer/issues/580) +- Module OS - buildozer.spec [\#579](https://github.com/kivy/buildozer/issues/579) +- Buildozer doesn't compile app with cryptography requirement [\#578](https://github.com/kivy/buildozer/issues/578) +- Buildozer x psycopg2 [\#575](https://github.com/kivy/buildozer/issues/575) +- Problem with Android API 23 [\#573](https://github.com/kivy/buildozer/issues/573) +- App crashing on startup- ImportError: dlopen failed: \_imaging.so is 64-bit [\#568](https://github.com/kivy/buildozer/issues/568) +- Buildozer issue with latest Xcode/macOS [\#566](https://github.com/kivy/buildozer/issues/566) +- Requests SSL error [\#565](https://github.com/kivy/buildozer/issues/565) +- buildozer failed for `Broken toolchain` when building numpy with python.host [\#564](https://github.com/kivy/buildozer/issues/564) +- Encountered a bad program behavior [\#563](https://github.com/kivy/buildozer/issues/563) +- error at using pycypto in the requirements [\#558](https://github.com/kivy/buildozer/issues/558) +- Websocket error: SSL not available. [\#552](https://github.com/kivy/buildozer/issues/552) +- "crystax\_python does not exist" with python3crystax [\#551](https://github.com/kivy/buildozer/issues/551) +- App crashes after build [\#549](https://github.com/kivy/buildozer/issues/549) +- Installing CyLP on windows [\#548](https://github.com/kivy/buildozer/issues/548) +- Service notification launch intent causes app crash [\#547](https://github.com/kivy/buildozer/issues/547) +- Application crashes on start [\#546](https://github.com/kivy/buildozer/issues/546) +- New android target is unable to produce a python 4 android service [\#543](https://github.com/kivy/buildozer/issues/543) +- Buildozer Build Error [\#538](https://github.com/kivy/buildozer/issues/538) +- \# Aidl not found, please install it. [\#537](https://github.com/kivy/buildozer/issues/537) +- Error compiling Cython file on Ubuntu 14.0.4 with python 2.7 and 3.4/5 [\#536](https://github.com/kivy/buildozer/issues/536) +- Failed compilation on ubuntu with python 2.7 configure: error: C compiler cannot create executables [\#535](https://github.com/kivy/buildozer/issues/535) +- Remove app permissions added by default [\#534](https://github.com/kivy/buildozer/issues/534) +- Buildozer error while packaging [\#531](https://github.com/kivy/buildozer/issues/531) +- Buildozer failing to pack .apk [\#530](https://github.com/kivy/buildozer/issues/530) +- toolchain fails to recognize option --sdk [\#524](https://github.com/kivy/buildozer/issues/524) +- \# Command failed: /usr/bin/python -m pythonforandroid.toolchain create --dist\_name=myapp --bootstrap=sdl2 --requirements=kivy --arch armeabi-v7a --copy-libs --color=always --storage-dir=/home/abhipso/thembapp/.buildozer/android/platform/build [\#521](https://github.com/kivy/buildozer/issues/521) +- apk way too large - 800mb [\#520](https://github.com/kivy/buildozer/issues/520) +- \[features\] Snapcraft implementation [\#514](https://github.com/kivy/buildozer/issues/514) +- Possibility of building in kivy virtual machine all locally [\#513](https://github.com/kivy/buildozer/issues/513) +- Python3\(crystax ndk\) builds broken [\#511](https://github.com/kivy/buildozer/issues/511) +- build fails in virtualenv [\#509](https://github.com/kivy/buildozer/issues/509) +- password for the virtual machine? [\#507](https://github.com/kivy/buildozer/issues/507) +- Failed to build APK with python 3.6 : \[sh.CommandNotFound: python3.5\] [\#504](https://github.com/kivy/buildozer/issues/504) +- Don't Unpacking opencv for armeabi-v7a [\#503](https://github.com/kivy/buildozer/issues/503) +- Fails to package app on OSX Sierra 10.12.4 \(hdiutil: attach failed - image not recognized keka\) [\#494](https://github.com/kivy/buildozer/issues/494) +- File missing building release APK [\#469](https://github.com/kivy/buildozer/issues/469) +- Building APK using Buildozer/Kivy [\#459](https://github.com/kivy/buildozer/issues/459) +- buildozer failed to build apk: subprocess.CalledProcessError: Command '\['ant', 'debug'\]' returned non-zero exit status 1 [\#373](https://github.com/kivy/buildozer/issues/373) +- AttributeError: 'Context' object has no attribute 'hostpython' in recipe [\#361](https://github.com/kivy/buildozer/issues/361) +- Cant compile apk with sqlite3 \(using python3 crystax\) [\#359](https://github.com/kivy/buildozer/issues/359) + +**Merged pull requests:** + +- Imported os to fix ImportError [\#594](https://github.com/kivy/buildozer/pull/594) ([inclement](https://github.com/inclement)) +- add p4a.port config option; to allow specifiying webview port [\#588](https://github.com/kivy/buildozer/pull/588) ([replabrobin](https://github.com/replabrobin)) +- Fix Py3 utf-8 encode error [\#582](https://github.com/kivy/buildozer/pull/582) ([Zen-CODE](https://github.com/Zen-CODE)) +- Fixes `p4a.branch` comment [\#577](https://github.com/kivy/buildozer/pull/577) ([AndreMiras](https://github.com/AndreMiras)) +- Fix old toolchain index error [\#576](https://github.com/kivy/buildozer/pull/576) ([Zen-CODE](https://github.com/Zen-CODE)) +- Some fixes in old android target [\#572](https://github.com/kivy/buildozer/pull/572) ([rnixx](https://github.com/rnixx)) +- Removed --sdk argument for p4a [\#571](https://github.com/kivy/buildozer/pull/571) ([inclement](https://github.com/inclement)) +- Update specifications.rst [\#560](https://github.com/kivy/buildozer/pull/560) ([crajun](https://github.com/crajun)) +- Changed p4a directory name for current toolchain [\#527](https://github.com/kivy/buildozer/pull/527) ([inclement](https://github.com/inclement)) +- Update android.py, updated recreate the project.properties section [\#525](https://github.com/kivy/buildozer/pull/525) ([mokhoo](https://github.com/mokhoo)) +- Fix unicode coding error in android build target [\#518](https://github.com/kivy/buildozer/pull/518) ([jamalex](https://github.com/jamalex)) +- Add 404 status code handling on kivy download [\#508](https://github.com/kivy/buildozer/pull/508) ([SecretObsession](https://github.com/SecretObsession)) +- Use dmg instead of 7z [\#505](https://github.com/kivy/buildozer/pull/505) ([shivan1b](https://github.com/shivan1b)) + +## [0.33](https://github.com/kivy/buildozer/tree/0.33) (2017-05-15) +[Full Changelog](https://github.com/kivy/buildozer/compare/v0.32...0.33) + +**Fixed bugs:** + +- Installation of python for android is missing dependencies [\#501](https://github.com/kivy/buildozer/issues/501) + +**Closed issues:** + +- Break buildozer if the user try to release a version with "org.test" as a domain [\#500](https://github.com/kivy/buildozer/issues/500) +- Migrate p4a options to its own subkey [\#499](https://github.com/kivy/buildozer/issues/499) +- Use stable branch from python-for-android [\#498](https://github.com/kivy/buildozer/issues/498) +- Migrate android to android\_new, and add android\_old [\#497](https://github.com/kivy/buildozer/issues/497) +- sh.CommandNotFound: cmake [\#496](https://github.com/kivy/buildozer/issues/496) +- Need Help Fatal signal 11 \(SIGSEGV\) at 0x00000000 \(code=1\), thread 4579 \(SDLThread\) [\#495](https://github.com/kivy/buildozer/issues/495) +- Buildozer APK Cannot LAUNCH [\#493](https://github.com/kivy/buildozer/issues/493) +- Buildozer Error [\#492](https://github.com/kivy/buildozer/issues/492) +- android\_new target hardcodes python2 support for p4a [\#491](https://github.com/kivy/buildozer/issues/491) +- android.arch ignored [\#488](https://github.com/kivy/buildozer/issues/488) +- fail to install distribute [\#486](https://github.com/kivy/buildozer/issues/486) +- sh.py raise a exception and fail to build [\#485](https://github.com/kivy/buildozer/issues/485) +- some functionality lost when debugged with android\_new command [\#481](https://github.com/kivy/buildozer/issues/481) +- Problem when deploy to android device [\#480](https://github.com/kivy/buildozer/issues/480) +- dlopen failed: python2.7/site-packages/grpc/\_cython/cygrpc.so not 32-bit: 2 [\#479](https://github.com/kivy/buildozer/issues/479) +- Cannot build APK with python3crystax and flask - conflicting dependencies [\#477](https://github.com/kivy/buildozer/issues/477) +- Buildozer can't download NDK [\#474](https://github.com/kivy/buildozer/issues/474) +- websocket-client "SSL not available." [\#473](https://github.com/kivy/buildozer/issues/473) +- Using Cython with Kivy-iOS and Buildozer [\#472](https://github.com/kivy/buildozer/issues/472) +- android.requirements does not merge with app.requirements [\#471](https://github.com/kivy/buildozer/issues/471) +- buildozer fails to find Android SDK [\#468](https://github.com/kivy/buildozer/issues/468) +- Crash of APK on start [\#467](https://github.com/kivy/buildozer/issues/467) +- App not launching [\#461](https://github.com/kivy/buildozer/issues/461) +- sqlite3 not working with android\_new [\#457](https://github.com/kivy/buildozer/issues/457) +- how to set path for p4a [\#454](https://github.com/kivy/buildozer/issues/454) +- TypeError: write\(\) argument 1 must be unicode, not str [\#452](https://github.com/kivy/buildozer/issues/452) +- New toolchain - lxml included but not able to import [\#451](https://github.com/kivy/buildozer/issues/451) +- sqlite3 with python2.7 and buildozer 0.33dev and new toolchain not working [\#450](https://github.com/kivy/buildozer/issues/450) +- Update the Virtual Machine @ https://kivy.org/\#download [\#449](https://github.com/kivy/buildozer/issues/449) +- “No module named setuptools” after installing setuptools [\#444](https://github.com/kivy/buildozer/issues/444) +- how to add --arch=armeabi-v7a to buildozer spec [\#443](https://github.com/kivy/buildozer/issues/443) +- `buildozer android debug` fails with `jinja2.exceptions.TemplateNotFound: build.xml` [\#442](https://github.com/kivy/buildozer/issues/442) +- buildozer.spec - requirements - kivy == master [\#440](https://github.com/kivy/buildozer/issues/440) +- Buildozer can't find zlib [\#437](https://github.com/kivy/buildozer/issues/437) +- Expose kivy download source? [\#435](https://github.com/kivy/buildozer/issues/435) +- compiling crash [\#431](https://github.com/kivy/buildozer/issues/431) +- Buildozer unable to make apk [\#430](https://github.com/kivy/buildozer/issues/430) +- Crash APK on start [\#429](https://github.com/kivy/buildozer/issues/429) +- More like a noob question [\#428](https://github.com/kivy/buildozer/issues/428) +- keka failed to download \(OS X El Capitan\) [\#427](https://github.com/kivy/buildozer/issues/427) +- Buildozer fails with pure python library pint [\#425](https://github.com/kivy/buildozer/issues/425) +- Invalid argument to arm-linux-androideabi-gcc [\#424](https://github.com/kivy/buildozer/issues/424) +- dlopen failed: \_clock.so is 64-bit instead of 32-bit [\#423](https://github.com/kivy/buildozer/issues/423) +- how to solve the build error for "java"? [\#421](https://github.com/kivy/buildozer/issues/421) +- Problems in patching files during building for android\_new [\#416](https://github.com/kivy/buildozer/issues/416) +- Buildozer doesn't work with multiple first-class directories [\#415](https://github.com/kivy/buildozer/issues/415) +- Buildozer suddenly not working, Linux, Python 2.7 \(build.xml: Failed to find version-tag string\) [\#414](https://github.com/kivy/buildozer/issues/414) +- Buildozer not finding aidl [\#413](https://github.com/kivy/buildozer/issues/413) +- buildozer android created apk fails if application source kept in multiple files [\#411](https://github.com/kivy/buildozer/issues/411) +- Python 3 unicode print \(\) / copy to clipboard crashes app on Android [\#404](https://github.com/kivy/buildozer/issues/404) +- checking whether the C compiler works... no [\#402](https://github.com/kivy/buildozer/issues/402) +- configure: error: C compiler cannot create executables [\#395](https://github.com/kivy/buildozer/issues/395) +- ConfigParser.NoOptionError: No option 'p4a.local\_recipes' in section: 'app' \(android\_new\) [\#394](https://github.com/kivy/buildozer/issues/394) +- Google has changed the type of archive the new NDK [\#393](https://github.com/kivy/buildozer/issues/393) +- Why does buildozer build and pull python for android from old\_toolchain branch ? [\#389](https://github.com/kivy/buildozer/issues/389) +- buildozer android\_new does not show the presplash [\#387](https://github.com/kivy/buildozer/issues/387) +- Error when using buildozer android\_new with python3crystax [\#386](https://github.com/kivy/buildozer/issues/386) +- Command failed: tar xzf android-sdk\_r20-linux.tgz [\#383](https://github.com/kivy/buildozer/issues/383) +- When will you add requests lib to recipes? [\#382](https://github.com/kivy/buildozer/issues/382) +- Presplash does not work with "android\_new" as target. [\#380](https://github.com/kivy/buildozer/issues/380) +- Build for Android is Inconsistent with the Linux Version [\#378](https://github.com/kivy/buildozer/issues/378) +- \[question\] What are the supported OS ? [\#369](https://github.com/kivy/buildozer/issues/369) +- AttributeError: 'AnsiCodes' object has no attribute 'LIGHTBLUE\_EX' [\#366](https://github.com/kivy/buildozer/issues/366) +- splash image not hide after kivy loaded [\#364](https://github.com/kivy/buildozer/issues/364) +- app always crash in android [\#360](https://github.com/kivy/buildozer/issues/360) +- Plyer not available in buildozer android\_new [\#358](https://github.com/kivy/buildozer/issues/358) +- Runs empty directory instead of binary \(android\_new\) [\#357](https://github.com/kivy/buildozer/issues/357) +- App built with buildozer does not open on android [\#356](https://github.com/kivy/buildozer/issues/356) +- Error when running buildozer android\_new debug [\#354](https://github.com/kivy/buildozer/issues/354) +- ios list\_identities returns no identities [\#353](https://github.com/kivy/buildozer/issues/353) +- buildozer not working [\#350](https://github.com/kivy/buildozer/issues/350) +- error: Cython does not appear to be installed [\#349](https://github.com/kivy/buildozer/issues/349) +- AttributeError: 'Context' object has no attribute 'hostpython' [\#347](https://github.com/kivy/buildozer/issues/347) +- osx packaging results in venv error [\#345](https://github.com/kivy/buildozer/issues/345) +- Requirement example requirements = kivy,requests fails [\#344](https://github.com/kivy/buildozer/issues/344) +- Unavailability of important packages [\#343](https://github.com/kivy/buildozer/issues/343) +- no way to change bootstrap [\#341](https://github.com/kivy/buildozer/issues/341) +- Apk built with buildozer and multiple python file crashes [\#331](https://github.com/kivy/buildozer/issues/331) +- Please upgrade the documentation [\#255](https://github.com/kivy/buildozer/issues/255) +- Buildozer doesn't recognize "profile" option anymore [\#254](https://github.com/kivy/buildozer/issues/254) +- Try to build with caldav requirement fails [\#248](https://github.com/kivy/buildozer/issues/248) +- Trouble building for older android versions [\#240](https://github.com/kivy/buildozer/issues/240) +- removing old apk file seems to fail before installing the new one [\#238](https://github.com/kivy/buildozer/issues/238) +- Build fails due to python-distribute.org being down [\#200](https://github.com/kivy/buildozer/issues/200) +- I am struggling with building an apk [\#153](https://github.com/kivy/buildozer/issues/153) +- fresh android sdk install requires sdk update [\#151](https://github.com/kivy/buildozer/issues/151) +- FYI - Ubuntu 14.04 Necessary Java Path Adjustment [\#141](https://github.com/kivy/buildozer/issues/141) +- Cannot compile `iri2uri.py` in `httplib2` [\#135](https://github.com/kivy/buildozer/issues/135) +- can't add django to requirement [\#130](https://github.com/kivy/buildozer/issues/130) +- add an ssh target [\#1](https://github.com/kivy/buildozer/issues/1) + +**Merged pull requests:** + +- close \#452 as suggested by SpotlightKid [\#489](https://github.com/kivy/buildozer/pull/489) ([pat1](https://github.com/pat1)) +- Update README.rst [\#487](https://github.com/kivy/buildozer/pull/487) ([matletix](https://github.com/matletix)) +- Made buildozer run p4a using the current sys.executable [\#484](https://github.com/kivy/buildozer/pull/484) ([inclement](https://github.com/inclement)) +- ios: refactor deprecated PackageApplication command [\#483](https://github.com/kivy/buildozer/pull/483) ([kived](https://github.com/kived)) +- android\_new: change skip\_update to skip all updates [\#465](https://github.com/kivy/buildozer/pull/465) ([ZingBallyhoo](https://github.com/ZingBallyhoo)) +- android\_new: add "android.arch" config option [\#458](https://github.com/kivy/buildozer/pull/458) ([ZingBallyhoo](https://github.com/ZingBallyhoo)) +- Fix Py3 Incompatable str + bytes issue. [\#456](https://github.com/kivy/buildozer/pull/456) ([FeralBytes](https://github.com/FeralBytes)) +- spec file: dont use fullscreen by default [\#447](https://github.com/kivy/buildozer/pull/447) ([rafalo1333](https://github.com/rafalo1333)) +- spec file: use portrait orientation by default [\#446](https://github.com/kivy/buildozer/pull/446) ([rafalo1333](https://github.com/rafalo1333)) +- Add presplash background color support for android\_new toolchain [\#436](https://github.com/kivy/buildozer/pull/436) ([rnixx](https://github.com/rnixx)) +- Fix file\_matches to never return None [\#432](https://github.com/kivy/buildozer/pull/432) ([inclement](https://github.com/inclement)) +- Fixed 64 bit detection \(it failed under python3\) [\#409](https://github.com/kivy/buildozer/pull/409) ([inclement](https://github.com/inclement)) +- Added p4a.local\_recipes to default.spec and handled its absence [\#405](https://github.com/kivy/buildozer/pull/405) ([inclement](https://github.com/inclement)) +- Adding README.rst entries for how to use buildozer with python3 [\#403](https://github.com/kivy/buildozer/pull/403) ([andyDoucette](https://github.com/andyDoucette)) +- Update installation.rst \(Ubuntu16.04\) [\#399](https://github.com/kivy/buildozer/pull/399) ([FermiParadox](https://github.com/FermiParadox)) +- Update quickstart.rst [\#398](https://github.com/kivy/buildozer/pull/398) ([FermiParadox](https://github.com/FermiParadox)) +- Add p4a.local\_recipes to buildozer.spec to specify a local recipe dir… [\#385](https://github.com/kivy/buildozer/pull/385) ([cidermole](https://github.com/cidermole)) +- Always pass required args to p4a in android\_new [\#375](https://github.com/kivy/buildozer/pull/375) ([inclement](https://github.com/inclement)) +- Changed p4a command order to work with argparse [\#374](https://github.com/kivy/buildozer/pull/374) ([inclement](https://github.com/inclement)) +- buildozer has no attribute builddir [\#351](https://github.com/kivy/buildozer/pull/351) ([nilutz](https://github.com/nilutz)) +- throw error early if running in venv [\#346](https://github.com/kivy/buildozer/pull/346) ([kived](https://github.com/kived)) +- allow selection of bootstrap for android\_new [\#342](https://github.com/kivy/buildozer/pull/342) ([kived](https://github.com/kived)) +- bump version to 0.33dev [\#340](https://github.com/kivy/buildozer/pull/340) ([kived](https://github.com/kived)) +- trying to fix Kivy install for OS X builds [\#316](https://github.com/kivy/buildozer/pull/316) ([derPinguin](https://github.com/derPinguin)) +- update installation info [\#256](https://github.com/kivy/buildozer/pull/256) ([kiok46](https://github.com/kiok46)) + +## [v0.32](https://github.com/kivy/buildozer/tree/v0.32) (2016-05-09) +[Full Changelog](https://github.com/kivy/buildozer/compare/v0.31...v0.32) + +**Closed issues:** + +- When is the support coming to build windows .exe using buildozer? [\#333](https://github.com/kivy/buildozer/issues/333) +- outdated openssl [\#332](https://github.com/kivy/buildozer/issues/332) +- ios deployment fails \(buildozer --verbose ios debug deploy\) [\#330](https://github.com/kivy/buildozer/issues/330) +- Can't add uuid pytz datetime time dbf to requirements [\#329](https://github.com/kivy/buildozer/issues/329) +- AttributeError: 'NoneType' object has no attribute 'startswith' [\#326](https://github.com/kivy/buildozer/issues/326) +- android.p4a\_dir use old toolchain? [\#325](https://github.com/kivy/buildozer/issues/325) +- Switch from pygame to sdl2 easily [\#313](https://github.com/kivy/buildozer/issues/313) +- IOError: \[Errno 2\] No such file or directory: "/home/andrew/CODE/Python/kivy-test-android/.buildozer/android/platform/python-for-android/dist/helloworld/bin/HelloWorld-'1.0'-debug.apk" [\#312](https://github.com/kivy/buildozer/issues/312) +- Marshmallow sdk not found [\#310](https://github.com/kivy/buildozer/issues/310) +- Install Buildozer: Finished processing dependencies for buildozer==0.32dev [\#304](https://github.com/kivy/buildozer/issues/304) +- Bump default min SDK to 13: Fix crash on orientation change bug [\#302](https://github.com/kivy/buildozer/issues/302) +- Disable "Open with file manager" when USB cable is connected in virtual machine [\#299](https://github.com/kivy/buildozer/issues/299) +- Check presence of main.py during build time [\#298](https://github.com/kivy/buildozer/issues/298) +- Py3: 'Buildozer' object has no attribute 'critical' [\#297](https://github.com/kivy/buildozer/issues/297) +- The splash screen isn't automatically resized [\#292](https://github.com/kivy/buildozer/issues/292) +- buildozer don't work if whitespace in path [\#287](https://github.com/kivy/buildozer/issues/287) +- buildozer help fail [\#285](https://github.com/kivy/buildozer/issues/285) +- Buildozer.spec 's title of your application can not be a Chinese character [\#284](https://github.com/kivy/buildozer/issues/284) +- How to build apk with a cython file [\#283](https://github.com/kivy/buildozer/issues/283) +- pip no longer has a --download-cache option, so downloading requirements has stopped working [\#279](https://github.com/kivy/buildozer/issues/279) +- Cython2 not recognized in Fedora23 ? [\#278](https://github.com/kivy/buildozer/issues/278) +- Buildozer VIrtual Machine Error: /jni/application/src/': Not a directory [\#277](https://github.com/kivy/buildozer/issues/277) +- buildozer android debug deploy run hangs [\#275](https://github.com/kivy/buildozer/issues/275) +- Is it possible to move the .buildozer folder somewhere else? [\#273](https://github.com/kivy/buildozer/issues/273) +- configure: error: C compiler cannot create executables [\#272](https://github.com/kivy/buildozer/issues/272) +- buildozer deploy error [\#271](https://github.com/kivy/buildozer/issues/271) +- Cannot set Android API version [\#268](https://github.com/kivy/buildozer/issues/268) +- Support python3 [\#265](https://github.com/kivy/buildozer/issues/265) +- App crash when changing orientation [\#264](https://github.com/kivy/buildozer/issues/264) +- Broken update command [\#261](https://github.com/kivy/buildozer/issues/261) +- error while deploying android [\#257](https://github.com/kivy/buildozer/issues/257) +- jnius/jnius.c: No such file or directory [\#251](https://github.com/kivy/buildozer/issues/251) +- Implement source.include\_patterns [\#245](https://github.com/kivy/buildozer/issues/245) +- Buildozer Python 3 Compatability Issues [\#175](https://github.com/kivy/buildozer/issues/175) + +**Merged pull requests:** + +- prepare for release 0.32 [\#339](https://github.com/kivy/buildozer/pull/339) ([kived](https://github.com/kived)) +- use p4a --color argument [\#338](https://github.com/kivy/buildozer/pull/338) ([kived](https://github.com/kived)) +- fix changing android branch [\#337](https://github.com/kivy/buildozer/pull/337) ([kived](https://github.com/kived)) +- use cp -a not cp -r [\#336](https://github.com/kivy/buildozer/pull/336) ([akshayaurora](https://github.com/akshayaurora)) +- improve build directory handling, add values to default.spec [\#335](https://github.com/kivy/buildozer/pull/335) ([kived](https://github.com/kived)) +- fix incorrect api/minapi values [\#334](https://github.com/kivy/buildozer/pull/334) ([kived](https://github.com/kived)) +- fix bad placement of expanduser\(\) [\#328](https://github.com/kivy/buildozer/pull/328) ([kived](https://github.com/kived)) +- use custom source dirs for android\_new [\#324](https://github.com/kivy/buildozer/pull/324) ([kived](https://github.com/kived)) +- use p4a revamp --storage-dir option [\#323](https://github.com/kivy/buildozer/pull/323) ([kived](https://github.com/kived)) +- add adb and p4a commands to android/android\_new [\#322](https://github.com/kivy/buildozer/pull/322) ([kived](https://github.com/kived)) +- fix py3 str has no decode issue [\#321](https://github.com/kivy/buildozer/pull/321) ([kived](https://github.com/kived)) +- let p4a revamp handle pure python requirements [\#320](https://github.com/kivy/buildozer/pull/320) ([kived](https://github.com/kived)) +- fix icons for ios target [\#319](https://github.com/kivy/buildozer/pull/319) ([kived](https://github.com/kived)) +- support using custom kivy-ios source dir [\#318](https://github.com/kivy/buildozer/pull/318) ([kived](https://github.com/kived)) +- disable bitcode for ios target [\#317](https://github.com/kivy/buildozer/pull/317) ([kived](https://github.com/kived)) +- Add window option for target android\_new [\#315](https://github.com/kivy/buildozer/pull/315) ([pythonic64](https://github.com/pythonic64)) +- fix usage exception [\#311](https://github.com/kivy/buildozer/pull/311) ([kived](https://github.com/kived)) +- add python3 compatibility to verbose output for android build \(\#221\) [\#303](https://github.com/kivy/buildozer/pull/303) ([pohmelie](https://github.com/pohmelie)) +- Allow app title to contain Unicode characters [\#293](https://github.com/kivy/buildozer/pull/293) ([udiboy1209](https://github.com/udiboy1209)) +- use ios-deploy version 1.7.0 [\#291](https://github.com/kivy/buildozer/pull/291) ([cbenhagen](https://github.com/cbenhagen)) +- Add spec option to skip automated update of installed android package [\#290](https://github.com/kivy/buildozer/pull/290) ([pastcompute](https://github.com/pastcompute)) +- Fix issues with android.p4a\_dir spec file property [\#288](https://github.com/kivy/buildozer/pull/288) ([pastcompute](https://github.com/pastcompute)) +- Remove pip --download-cache flag \(fixes \#279\) [\#282](https://github.com/kivy/buildozer/pull/282) ([cbenhagen](https://github.com/cbenhagen)) +- put bin/ in builddir if specified in buildozer.spec [\#274](https://github.com/kivy/buildozer/pull/274) ([jabbalaci](https://github.com/jabbalaci)) +- Implement source.include\_patterns [\#269](https://github.com/kivy/buildozer/pull/269) ([udiboy1209](https://github.com/udiboy1209)) +- Updated Licence Year [\#266](https://github.com/kivy/buildozer/pull/266) ([CodeMaxx](https://github.com/CodeMaxx)) +- fix android.branch option [\#250](https://github.com/kivy/buildozer/pull/250) ([tshirtman](https://github.com/tshirtman)) + +## [v0.31](https://github.com/kivy/buildozer/tree/v0.31) (2016-01-07) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.30...v0.31) + +**Closed issues:** + +- Logo aspect ratio problem [\#263](https://github.com/kivy/buildozer/issues/263) +- Is there a way to seperate building environment and building apk? [\#259](https://github.com/kivy/buildozer/issues/259) +- buildozer must be ran with sudo [\#258](https://github.com/kivy/buildozer/issues/258) +- Invalid NDK platform [\#253](https://github.com/kivy/buildozer/issues/253) +- Q:compile error [\#252](https://github.com/kivy/buildozer/issues/252) +- Please update SDK url [\#249](https://github.com/kivy/buildozer/issues/249) +- java.lang.NoSuchMethodException: isSupportChangeBadgeByCallMethod \[\] [\#243](https://github.com/kivy/buildozer/issues/243) +- AttributeError: 'NoneType' object has no attribute 'group' [\#242](https://github.com/kivy/buildozer/issues/242) +- Error: Flag '-a' is not valid for 'list sdk'. [\#241](https://github.com/kivy/buildozer/issues/241) +- Provide custom path for android SDK to buildozer [\#237](https://github.com/kivy/buildozer/issues/237) +- kivy examples seem to need \_\_version\_\_ [\#236](https://github.com/kivy/buildozer/issues/236) +- pyliblo [\#235](https://github.com/kivy/buildozer/issues/235) + +**Merged pull requests:** + +- OS X Target for Bulldozer [\#262](https://github.com/kivy/buildozer/pull/262) ([akshayaurora](https://github.com/akshayaurora)) +- kill easy\_install [\#244](https://github.com/kivy/buildozer/pull/244) ([techtonik](https://github.com/techtonik)) +- install requires virtualenv [\#239](https://github.com/kivy/buildozer/pull/239) ([cbenhagen](https://github.com/cbenhagen)) +- Fixed Space in app path issue. Fixes \#13 [\#231](https://github.com/kivy/buildozer/pull/231) ([dvenkatsagar](https://github.com/dvenkatsagar)) + +## [0.30](https://github.com/kivy/buildozer/tree/0.30) (2015-10-04) +[Full Changelog](https://github.com/kivy/buildozer/compare/v0.29...0.30) + +**Closed issues:** + +- subprocess.CalledProcessError: Command '\['ant', 'debug'\]' returned non-zero exit status 1 [\#234](https://github.com/kivy/buildozer/issues/234) +- Cannot use numpy with buildozer [\#232](https://github.com/kivy/buildozer/issues/232) +- Problem downloading ndk version \> r9d [\#229](https://github.com/kivy/buildozer/issues/229) +- Error likely to missing 32 bit packages [\#228](https://github.com/kivy/buildozer/issues/228) +- Bulldozer can't download new ndks 10x... [\#227](https://github.com/kivy/buildozer/issues/227) +- Error while trying to install Buildozer in Windows 10 [\#225](https://github.com/kivy/buildozer/issues/225) +- Making reverse engineering .apk harder [\#224](https://github.com/kivy/buildozer/issues/224) +- Buildozer wont compile libraries with cython 0.23 or 0.22 [\#223](https://github.com/kivy/buildozer/issues/223) +- These are the errors I get when I try to package the file... [\#222](https://github.com/kivy/buildozer/issues/222) +- Buildozer installs platform despite setting ndk & sdk paths [\#220](https://github.com/kivy/buildozer/issues/220) +- Can't find config.ini buildozer solution [\#219](https://github.com/kivy/buildozer/issues/219) +- Ant error: SDK does not have any Build Tools installed [\#218](https://github.com/kivy/buildozer/issues/218) +- Buildozer fails because of build-tools package name [\#217](https://github.com/kivy/buildozer/issues/217) +- ImportError: No module named pygments [\#216](https://github.com/kivy/buildozer/issues/216) +- buildozer android camera [\#215](https://github.com/kivy/buildozer/issues/215) +- Error when first time Building apk [\#212](https://github.com/kivy/buildozer/issues/212) +- cannot import name spawnu [\#211](https://github.com/kivy/buildozer/issues/211) +- Buildozer recompiles p4a when a custom for of plyer is used. [\#210](https://github.com/kivy/buildozer/issues/210) +- Add android.ant\_path to default.spec [\#209](https://github.com/kivy/buildozer/issues/209) +- Problems with adding wav, ogg and ttf files [\#208](https://github.com/kivy/buildozer/issues/208) +- cython issue with kivy and buildozer development versions [\#207](https://github.com/kivy/buildozer/issues/207) +- subprocess.CalledProcessError: Command '\['ant', 'debug'\]' returned non-zero exit status 1 [\#205](https://github.com/kivy/buildozer/issues/205) +- Buildozer isn't building if I try to include some requirements [\#195](https://github.com/kivy/buildozer/issues/195) +- Cant build APK for android.api = 10 [\#193](https://github.com/kivy/buildozer/issues/193) +- Doc error: "buildozer clean" does not exist [\#189](https://github.com/kivy/buildozer/issues/189) +- Can't install pillow requirement [\#188](https://github.com/kivy/buildozer/issues/188) +- \#error from Cython compilation [\#150](https://github.com/kivy/buildozer/issues/150) +- Space in app path path name causes ./distribute -m kivy to fail [\#13](https://github.com/kivy/buildozer/issues/13) + +**Merged pull requests:** + +- Changed p4a download to pull old\_toolchain branch [\#233](https://github.com/kivy/buildozer/pull/233) ([inclement](https://github.com/inclement)) +- Added support for downloading and handling android ndk r10 versions. Fixes \#229 and \#227 [\#230](https://github.com/kivy/buildozer/pull/230) ([dvenkatsagar](https://github.com/dvenkatsagar)) +- make \_read\_version\_subdir return parse\('0'\) instead of \[0\], otherwise… [\#206](https://github.com/kivy/buildozer/pull/206) ([denys-duchier](https://github.com/denys-duchier)) + +## [v0.29](https://github.com/kivy/buildozer/tree/v0.29) (2015-06-01) +[Full Changelog](https://github.com/kivy/buildozer/compare/v0.27...v0.29) + +**Fixed bugs:** + +- version problem with split [\#201](https://github.com/kivy/buildozer/issues/201) + +**Closed issues:** + +- buildozer android release hangs at "compile platform" [\#199](https://github.com/kivy/buildozer/issues/199) +- Hang up at Fetching https://dl-ssl.google.com/android/repository/addons\_list-2.xml [\#198](https://github.com/kivy/buildozer/issues/198) +- Python 3 Import error on urllib.request. [\#187](https://github.com/kivy/buildozer/issues/187) + +**Merged pull requests:** + +- needs testing, should fix \#201 using pypa implementation of PEP440 [\#202](https://github.com/kivy/buildozer/pull/202) ([tshirtman](https://github.com/tshirtman)) +- check for complete dist instead of dist dir [\#197](https://github.com/kivy/buildozer/pull/197) ([kived](https://github.com/kived)) +- fix ios targets xcode command [\#194](https://github.com/kivy/buildozer/pull/194) ([cbenhagen](https://github.com/cbenhagen)) +- Windows fix [\#192](https://github.com/kivy/buildozer/pull/192) ([jaynakus](https://github.com/jaynakus)) +- some python 3 compatibility [\#191](https://github.com/kivy/buildozer/pull/191) ([pohmelie](https://github.com/pohmelie)) +- allow custom source folders in buildozer.spec [\#185](https://github.com/kivy/buildozer/pull/185) ([kived](https://github.com/kived)) +- use upstream pexpect instead of shipping it [\#176](https://github.com/kivy/buildozer/pull/176) ([tshirtman](https://github.com/tshirtman)) + +## [v0.27](https://github.com/kivy/buildozer/tree/v0.27) (2015-03-08) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.26...v0.27) + +**Closed issues:** + +- subprocess.CalledProcessError: Command '\['ant', 'debug'\]' returned non-zero exit status 1 [\#183](https://github.com/kivy/buildozer/issues/183) +- Buildozer get error during packaging for android [\#182](https://github.com/kivy/buildozer/issues/182) +- Bug with android.p4a\_whitelist in buildozer.spec file. [\#180](https://github.com/kivy/buildozer/issues/180) +- You need an option for git https [\#178](https://github.com/kivy/buildozer/issues/178) +- Buildozer .apk file creation issue [\#177](https://github.com/kivy/buildozer/issues/177) +- sudo buildozer Fails [\#174](https://github.com/kivy/buildozer/issues/174) +- Buildozer iOS Apps Won't Open [\#171](https://github.com/kivy/buildozer/issues/171) +- always show python-for-android output on failure [\#170](https://github.com/kivy/buildozer/issues/170) +- Buildozer tries to install android sdk every time you try to compile an android application. [\#169](https://github.com/kivy/buildozer/issues/169) +- automatic installation of android sdk fails due to unicode parsing error [\#166](https://github.com/kivy/buildozer/issues/166) +- Move from fruitstrap to ios-deploy [\#107](https://github.com/kivy/buildozer/issues/107) +- buildozer ios debug build fails on MacOS Mavericks [\#83](https://github.com/kivy/buildozer/issues/83) +- gdb doesn't work anymore with Xcode 5 [\#54](https://github.com/kivy/buildozer/issues/54) +- buildozer ios debug deploy fails on running fruitstrap at 70% with error AMDeviceInstallApplication failed [\#9](https://github.com/kivy/buildozer/issues/9) + +**Merged pull requests:** + +- fix black text in log [\#184](https://github.com/kivy/buildozer/pull/184) ([kived](https://github.com/kived)) + +## [0.26](https://github.com/kivy/buildozer/tree/0.26) (2015-01-28) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.25...0.26) + +**Merged pull requests:** + +- ensure whitelist always has a list [\#172](https://github.com/kivy/buildozer/pull/172) ([kived](https://github.com/kived)) + +## [0.25](https://github.com/kivy/buildozer/tree/0.25) (2015-01-27) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.24...0.25) + +## [0.24](https://github.com/kivy/buildozer/tree/0.24) (2015-01-27) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.23...0.24) + +## [0.23](https://github.com/kivy/buildozer/tree/0.23) (2015-01-27) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.22...0.23) + +## [0.22](https://github.com/kivy/buildozer/tree/0.22) (2015-01-27) +[Full Changelog](https://github.com/kivy/buildozer/compare/v0.21...0.22) + +## [v0.21](https://github.com/kivy/buildozer/tree/v0.21) (2015-01-14) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.19...v0.21) + +**Merged pull requests:** + +- removed some indentation in example info, added to actual comments inste... [\#168](https://github.com/kivy/buildozer/pull/168) ([chozabu](https://github.com/chozabu)) + +## [0.19](https://github.com/kivy/buildozer/tree/0.19) (2014-12-17) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.18...0.19) + +## [0.18](https://github.com/kivy/buildozer/tree/0.18) (2014-12-17) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.17...0.18) + +**Closed issues:** + +- buildozer can't download python libs due to ssl certificate check fail [\#164](https://github.com/kivy/buildozer/issues/164) +- Buildozer feature redirect .buildozer folder outside your project [\#162](https://github.com/kivy/buildozer/issues/162) +- Buildozer fails on clean build [\#161](https://github.com/kivy/buildozer/issues/161) +- pjnius build fails on Arch Linux when requiring netifaces [\#159](https://github.com/kivy/buildozer/issues/159) +- error compiling with buildozer [\#158](https://github.com/kivy/buildozer/issues/158) +- C compiler cannot create executables [\#152](https://github.com/kivy/buildozer/issues/152) +- Requirements needing commas instead of spaces \(like p4a\) is non-obvious [\#147](https://github.com/kivy/buildozer/issues/147) + +**Merged pull requests:** + +- fix build error and allow redirecting build folder [\#163](https://github.com/kivy/buildozer/pull/163) ([olymk2](https://github.com/olymk2)) +- Remove duplicated checkbin\(\). [\#160](https://github.com/kivy/buildozer/pull/160) ([attakei](https://github.com/attakei)) +- added note about buildozer not having anything to do with buildozer.io [\#157](https://github.com/kivy/buildozer/pull/157) ([nickyspag](https://github.com/nickyspag)) +- Fixed logic to compare with “non installed” with “minor version upped" [\#156](https://github.com/kivy/buildozer/pull/156) ([attakei](https://github.com/attakei)) +- Set "UTF-8" to java file.encoding for android update command explicitly [\#155](https://github.com/kivy/buildozer/pull/155) ([attakei](https://github.com/attakei)) +- added example to default.spec requirements showing comma seperation [\#148](https://github.com/kivy/buildozer/pull/148) ([chozabu](https://github.com/chozabu)) + +## [0.17](https://github.com/kivy/buildozer/tree/0.17) (2014-09-22) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.16...0.17) + +## [0.16](https://github.com/kivy/buildozer/tree/0.16) (2014-09-22) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.15...0.16) + +**Closed issues:** + +- `install\_android\_packages` is too slow to run in china. [\#143](https://github.com/kivy/buildozer/issues/143) +- Buildozer setup.py fails with Module ImportError [\#140](https://github.com/kivy/buildozer/issues/140) +- buildozer downloads Android SDK 20 during every call to deploy app [\#137](https://github.com/kivy/buildozer/issues/137) +- Buildozerv0.15: lib/pexpect.py is not Python 3 compatable [\#131](https://github.com/kivy/buildozer/issues/131) +- Keep on gettting version error [\#129](https://github.com/kivy/buildozer/issues/129) +- arm-linux-androideabi-gcc: fatal error: no input files [\#127](https://github.com/kivy/buildozer/issues/127) +- I am new to python and buildozer, using buildozer to compile my first android app [\#125](https://github.com/kivy/buildozer/issues/125) +- I am new to python and buildozer, using buildozer to compile my first android app, [\#124](https://github.com/kivy/buildozer/issues/124) +- Command Failed [\#122](https://github.com/kivy/buildozer/issues/122) +- Exception: Cython cythonnot found [\#120](https://github.com/kivy/buildozer/issues/120) +- Enable use for packaging OSX apps [\#114](https://github.com/kivy/buildozer/issues/114) +- Errors on 'buildozer android debug deploy run' [\#113](https://github.com/kivy/buildozer/issues/113) +- Fail to download Android SDK in Linux and Python 3.3 [\#110](https://github.com/kivy/buildozer/issues/110) +- Unable to add "requirements" buildozer.spec [\#109](https://github.com/kivy/buildozer/issues/109) +- TypeError: 'encoding' is an invalid keyword argument for this function [\#106](https://github.com/kivy/buildozer/issues/106) +- Custom activity [\#33](https://github.com/kivy/buildozer/issues/33) +- Buildozer fails to install on Windows [\#27](https://github.com/kivy/buildozer/issues/27) +- support blacklist changes in python-for-android [\#17](https://github.com/kivy/buildozer/issues/17) + +**Merged pull requests:** + +- Test in file\_rename if target directory exists. [\#144](https://github.com/kivy/buildozer/pull/144) ([droundy](https://github.com/droundy)) +- Fix for android.library\_references path issue [\#139](https://github.com/kivy/buildozer/pull/139) ([excessivedemon](https://github.com/excessivedemon)) +- Specs doc revision [\#134](https://github.com/kivy/buildozer/pull/134) ([dessant](https://github.com/dessant)) +- Make pexpect.py Python 3 Compatable [\#133](https://github.com/kivy/buildozer/pull/133) ([FeralBytes](https://github.com/FeralBytes)) +- Added check for buildozer running as root [\#128](https://github.com/kivy/buildozer/pull/128) ([inclement](https://github.com/inclement)) +- Add link to the right android python project [\#119](https://github.com/kivy/buildozer/pull/119) ([techtonik](https://github.com/techtonik)) +- Execute buildozer as "python -m buildozer" [\#118](https://github.com/kivy/buildozer/pull/118) ([techtonik](https://github.com/techtonik)) +- Fix \#115 [\#116](https://github.com/kivy/buildozer/pull/116) ([manuelbua](https://github.com/manuelbua)) + +## [0.15](https://github.com/kivy/buildozer/tree/0.15) (2014-06-02) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.14...0.15) + +**Closed issues:** + +- Do not set permissions \(ug+x\) if already set [\#115](https://github.com/kivy/buildozer/issues/115) +- UTF-8 Encoding Error, \_\_init.py\_\_ 0.15-dev [\#108](https://github.com/kivy/buildozer/issues/108) +- incorrect minapi android manifest value [\#93](https://github.com/kivy/buildozer/issues/93) +- libpython wait4 linker error [\#92](https://github.com/kivy/buildozer/issues/92) +- fcntl import error [\#88](https://github.com/kivy/buildozer/issues/88) +- No Python 3 Support [\#84](https://github.com/kivy/buildozer/issues/84) +- Uncaught exception on missing cython [\#80](https://github.com/kivy/buildozer/issues/80) +- Where are custom python-for-android recipes meant to go? [\#76](https://github.com/kivy/buildozer/issues/76) +- Error compiling Cython file: [\#73](https://github.com/kivy/buildozer/issues/73) +- Zlib still giving issues on Ubuntu 13.04 [\#72](https://github.com/kivy/buildozer/issues/72) +- DBAccessError permission denied in app [\#71](https://github.com/kivy/buildozer/issues/71) +- Selective update of depencencies [\#70](https://github.com/kivy/buildozer/issues/70) +- 32-bit SDK installed on 64-bit system [\#69](https://github.com/kivy/buildozer/issues/69) +- wrong version regex [\#67](https://github.com/kivy/buildozer/issues/67) +- sdk update fails on license question [\#66](https://github.com/kivy/buildozer/issues/66) +- x86 and armeabi-v7 libs [\#63](https://github.com/kivy/buildozer/issues/63) +- Missing dependenced during compilation [\#59](https://github.com/kivy/buildozer/issues/59) +- Bad magic number when reading generated state.db file in VMware Ubuntu guest [\#42](https://github.com/kivy/buildozer/issues/42) +- x86 apk support on buildozer [\#11](https://github.com/kivy/buildozer/issues/11) + +**Merged pull requests:** + +- Ignore UTF-8 decoding errors. Closes \#108 [\#112](https://github.com/kivy/buildozer/pull/112) ([cbenhagen](https://github.com/cbenhagen)) +- chmod ug+x android\_cmd [\#111](https://github.com/kivy/buildozer/pull/111) ([cbenhagen](https://github.com/cbenhagen)) +- p4a whitelist [\#98](https://github.com/kivy/buildozer/pull/98) ([b3ni](https://github.com/b3ni)) + +## [0.14](https://github.com/kivy/buildozer/tree/0.14) (2014-04-20) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.13...0.14) + +## [0.13](https://github.com/kivy/buildozer/tree/0.13) (2014-04-20) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.12...0.13) + +## [0.12](https://github.com/kivy/buildozer/tree/0.12) (2014-04-20) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.11...0.12) + +## [0.11](https://github.com/kivy/buildozer/tree/0.11) (2014-04-20) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.10...0.11) + +**Closed issues:** + +- Text provider [\#105](https://github.com/kivy/buildozer/issues/105) +- No installation instructions [\#104](https://github.com/kivy/buildozer/issues/104) + +## [0.10](https://github.com/kivy/buildozer/tree/0.10) (2014-04-09) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.9...0.10) + +**Closed issues:** + +- Android SDK installation not working anymore [\#101](https://github.com/kivy/buildozer/issues/101) +- Buildozer almost completes and then errors saying file exists [\#99](https://github.com/kivy/buildozer/issues/99) +- Java compilernot found [\#95](https://github.com/kivy/buildozer/issues/95) +- Absolute path problem [\#91](https://github.com/kivy/buildozer/issues/91) +- Error when running: buildozer --verbose android debug deploy run [\#89](https://github.com/kivy/buildozer/issues/89) +- buildozer.spec passing requirements [\#87](https://github.com/kivy/buildozer/issues/87) +- debugging "Command failed" is tedious [\#86](https://github.com/kivy/buildozer/issues/86) +- No module named sqlite3 [\#56](https://github.com/kivy/buildozer/issues/56) +- Garden packages are unsupported [\#39](https://github.com/kivy/buildozer/issues/39) +- python-for-android repo is hard-coded in buildozer [\#37](https://github.com/kivy/buildozer/issues/37) +- virtualenv-2.7 hardcoded [\#22](https://github.com/kivy/buildozer/issues/22) +- Buildozer error no build.py [\#21](https://github.com/kivy/buildozer/issues/21) + +**Merged pull requests:** + +- Fixed garden install for newer virtualenvs [\#100](https://github.com/kivy/buildozer/pull/100) ([brousch](https://github.com/brousch)) +- fix ln if soft link existed [\#96](https://github.com/kivy/buildozer/pull/96) ([pengjia](https://github.com/pengjia)) +- Added realpath modifier to p4a\_dir token [\#94](https://github.com/kivy/buildozer/pull/94) ([inclement](https://github.com/inclement)) +- Documented env var checking and fixed a bug in the p4a\_dir check [\#85](https://github.com/kivy/buildozer/pull/85) ([inclement](https://github.com/inclement)) +- Delete dist dir if running distribute.sh [\#81](https://github.com/kivy/buildozer/pull/81) ([inclement](https://github.com/inclement)) +- implement the `clean` command. [\#79](https://github.com/kivy/buildozer/pull/79) ([akshayaurora](https://github.com/akshayaurora)) +- Garden requirements [\#41](https://github.com/kivy/buildozer/pull/41) ([Ian-Foote](https://github.com/Ian-Foote)) + +## [0.9](https://github.com/kivy/buildozer/tree/0.9) (2014-02-13) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.8...0.9) + +**Closed issues:** + +- Command failed: ./distribute.sh -m "kivy" error message [\#77](https://github.com/kivy/buildozer/issues/77) +- Error importing \_scproxy [\#68](https://github.com/kivy/buildozer/issues/68) +- Package names beginning with a number cause an obscure crash with an unclear error message [\#64](https://github.com/kivy/buildozer/issues/64) +- failing to compile sample android app with buildozer [\#61](https://github.com/kivy/buildozer/issues/61) +- Default android.sdk setting causes sensor rotate on Android to fail [\#32](https://github.com/kivy/buildozer/issues/32) +- Add wakelock to options [\#31](https://github.com/kivy/buildozer/issues/31) + +**Merged pull requests:** + +- Updated Android NDK default version to 9c [\#82](https://github.com/kivy/buildozer/pull/82) ([brousch](https://github.com/brousch)) +- Add 'bin' to suggested default directory excludes [\#78](https://github.com/kivy/buildozer/pull/78) ([joseph-jnl](https://github.com/joseph-jnl)) +- Clarified wording in README [\#75](https://github.com/kivy/buildozer/pull/75) ([inclement](https://github.com/inclement)) +- Check for package name starting with number [\#65](https://github.com/kivy/buildozer/pull/65) ([inclement](https://github.com/inclement)) +- \[FIX\] Detect 32/64 bit on Windows, to download Android NDK [\#62](https://github.com/kivy/buildozer/pull/62) ([alanjds](https://github.com/alanjds)) +- Added --private and --dir Android storage option [\#58](https://github.com/kivy/buildozer/pull/58) ([brousch](https://github.com/brousch)) +- Added a 'serve' command to serve bin/ over SimpleHTTPServer [\#49](https://github.com/kivy/buildozer/pull/49) ([brousch](https://github.com/brousch)) + +## [0.8](https://github.com/kivy/buildozer/tree/0.8) (2013-10-29) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.7...0.8) + +**Fixed bugs:** + +- \_patch\_application\_sources breaks from \_\_future\_\_ imports [\#35](https://github.com/kivy/buildozer/issues/35) + +**Closed issues:** + +- unresolved domain: pygame.org [\#34](https://github.com/kivy/buildozer/issues/34) + +**Merged pull requests:** + +- Add ability to choose python-for-android directory [\#60](https://github.com/kivy/buildozer/pull/60) ([inclement](https://github.com/inclement)) +- Update default Android NDK to r9 [\#53](https://github.com/kivy/buildozer/pull/53) ([brousch](https://github.com/brousch)) +- Added android.wakelock option [\#51](https://github.com/kivy/buildozer/pull/51) ([brousch](https://github.com/brousch)) +- Fixed another 'Unknown' typo [\#48](https://github.com/kivy/buildozer/pull/48) ([brousch](https://github.com/brousch)) +- Fixed spelling of 'Unknown' [\#47](https://github.com/kivy/buildozer/pull/47) ([brousch](https://github.com/brousch)) +- Fixed missing 'r' on ANDROIDNDKVER environment export [\#46](https://github.com/kivy/buildozer/pull/46) ([brousch](https://github.com/brousch)) +- make sure android.branch works with fresh clone [\#44](https://github.com/kivy/buildozer/pull/44) ([akshayaurora](https://github.com/akshayaurora)) +- Fixed a typo in setdefault description [\#40](https://github.com/kivy/buildozer/pull/40) ([nithin-bose](https://github.com/nithin-bose)) +- Package paths [\#38](https://github.com/kivy/buildozer/pull/38) ([Ian-Foote](https://github.com/Ian-Foote)) +- add applibs in path for service too [\#26](https://github.com/kivy/buildozer/pull/26) ([tshirtman](https://github.com/tshirtman)) +- fix distribute install before installing every dependencies, fix a few i... [\#25](https://github.com/kivy/buildozer/pull/25) ([tshirtman](https://github.com/tshirtman)) + +## [0.7](https://github.com/kivy/buildozer/tree/0.7) (2013-09-11) +[Full Changelog](https://github.com/kivy/buildozer/compare/0.2...0.7) + +**Closed issues:** + +- Builds fail on Ubuntu 13.04 with zlib.h missing [\#18](https://github.com/kivy/buildozer/issues/18) +- "buildozer android update" fails with an error about android.branch [\#12](https://github.com/kivy/buildozer/issues/12) +- Problem Ubuntu compilation on network drive [\#10](https://github.com/kivy/buildozer/issues/10) +- \[app\] "android.permission" contain an unknown permission [\#6](https://github.com/kivy/buildozer/issues/6) +- buildozer on ios fails at: Command failed: tools/build-all.sh [\#5](https://github.com/kivy/buildozer/issues/5) +- Automatically installing Android SDK fails in file\_rename called from \_install\_android\_sdk [\#4](https://github.com/kivy/buildozer/issues/4) +- buildozer does not support ~ in android.sdk\_path [\#3](https://github.com/kivy/buildozer/issues/3) + +**Merged pull requests:** + +- Fix typo 'versionning' -\> 'versioning'. [\#29](https://github.com/kivy/buildozer/pull/29) ([Ian-Foote](https://github.com/Ian-Foote)) +- Fixed hard-coded Android API 14 [\#23](https://github.com/kivy/buildozer/pull/23) ([brousch](https://github.com/brousch)) +- Fixed \#18: Builds fail on Ubuntu 13.04 with zlib.h missing. [\#20](https://github.com/kivy/buildozer/pull/20) ([roskakori](https://github.com/roskakori)) +- Europython sprint updates [\#19](https://github.com/kivy/buildozer/pull/19) ([fabiankreutz](https://github.com/fabiankreutz)) +- copy the generated apk back from remote [\#16](https://github.com/kivy/buildozer/pull/16) ([akshayaurora](https://github.com/akshayaurora)) +- android.add\_jars config option [\#15](https://github.com/kivy/buildozer/pull/15) ([bob-the-hamster](https://github.com/bob-the-hamster)) +- Ouya support [\#14](https://github.com/kivy/buildozer/pull/14) ([bob-the-hamster](https://github.com/bob-the-hamster)) + +## [0.2](https://github.com/kivy/buildozer/tree/0.2) (2012-12-20) + + +\* *This Change Log was automatically generated by [github_changelog_generator](https://github.com/skywinder/Github-Changelog-Generator)* diff --git a/venv/lib/python3.8/site-packages/buildozer-1.4.0.dist-info/RECORD b/venv/lib/python3.8/site-packages/buildozer-1.4.0.dist-info/RECORD new file mode 100644 index 0000000..b325a55 --- /dev/null +++ b/venv/lib/python3.8/site-packages/buildozer-1.4.0.dist-info/RECORD @@ -0,0 +1,41 @@ +../../../bin/buildozer,sha256=3v8JD1yLtbpzUBvrolWQcP88Mn7igBHdDpexCZP1Rtg,269 +../../../bin/buildozer-remote,sha256=NKfV1Ue_6Dg3FHU2gwVqHyh7LE2IGdz-8LCYqXWERcs,269 +buildozer-1.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +buildozer-1.4.0.dist-info/LICENSE,sha256=KCs9hzXvbL3PjMSAlH1Q4K6krRzJMdQfP2prwjlW8tY,1081 +buildozer-1.4.0.dist-info/METADATA,sha256=tkwTgymbQvI90RabHJrb9hJnQDLFR60IHJS7QZw89OQ,128007 +buildozer-1.4.0.dist-info/RECORD,, +buildozer-1.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +buildozer-1.4.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +buildozer-1.4.0.dist-info/entry_points.txt,sha256=JjYRllYhlszUEBEiW4E4aWMwwN1iKfnUrFMjIKte2HM,109 +buildozer-1.4.0.dist-info/top_level.txt,sha256=4zmm_Khy4jSZ3QS6rQy41W8KwWziNKksqmsLmspFJDc,10 +buildozer/__init__.py,sha256=4cAfOIk_OLexnHEmtqdHR95t-EZNd03bf9QHoWgFz8M,43985 +buildozer/__main__.py,sha256=YTu2ChwR3UZjhPkk_JrIhSdjx3M1uvr5ZOJLkJgq3uQ,81 +buildozer/__pycache__/__init__.cpython-38.pyc,, +buildozer/__pycache__/__main__.cpython-38.pyc,, +buildozer/__pycache__/jsonstore.cpython-38.pyc,, +buildozer/__pycache__/sitecustomize.cpython-38.pyc,, +buildozer/__pycache__/target.cpython-38.pyc,, +buildozer/default.spec,sha256=MIFGb7fWFajHPHJSYvUOw1g34oLMWIF8ZMCP31-IlJ8,15328 +buildozer/jsonstore.py,sha256=8KH53__2GAhLjUOnK9ISuUYoWT_yhEAQ1ctiZPbhiIs,1128 +buildozer/libs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +buildozer/libs/__pycache__/__init__.cpython-38.pyc,, +buildozer/libs/__pycache__/_structures.cpython-38.pyc,, +buildozer/libs/__pycache__/version.cpython-38.pyc,, +buildozer/libs/_structures.py,sha256=xDfyXyKc2mSwCuro6Bq_c8OunhZ-cHCiSSgPTDnpF-U,1729 +buildozer/libs/version.py,sha256=YfFXeZtBTZp1ysl6YppiViTelJU4Dkzk1BolPfNR54g,11817 +buildozer/scripts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +buildozer/scripts/__pycache__/__init__.cpython-38.pyc,, +buildozer/scripts/__pycache__/client.cpython-38.pyc,, +buildozer/scripts/__pycache__/remote.cpython-38.pyc,, +buildozer/scripts/client.py,sha256=VfoqN9FiScr_QLE6nro9tp8CNBmLsLim2cNXryUJ-PA,518 +buildozer/scripts/remote.py,sha256=ZUCKA6KLVURSb6G6dCeoANk43i-J0xs-VEqiT8w_4z0,9250 +buildozer/sitecustomize.py,sha256=G4B7pY3wsq15VX5O8I2jlXXR6duS_tCXAGDqPFG4rjo,98 +buildozer/target.py,sha256=6nZpjiKUs9fmHMuH5-WT7qzHDBEyZkloFwoFV9O5wm0,9745 +buildozer/targets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +buildozer/targets/__pycache__/__init__.cpython-38.pyc,, +buildozer/targets/__pycache__/android.cpython-38.pyc,, +buildozer/targets/__pycache__/ios.cpython-38.pyc,, +buildozer/targets/__pycache__/osx.cpython-38.pyc,, +buildozer/targets/android.py,sha256=hsX80uMKe4h2-pMAf3qI5gUYwfZ2ep1zJ7lVj_MAT-I,62212 +buildozer/targets/ios.py,sha256=HpzUlcTO0ij21BelQFY6WIA1mrr8Jup46rtb77xgwgQ,17880 +buildozer/targets/osx.py,sha256=FPd_YN7Lg4b3zL-vuZY79HjJyMbJYMd5TJBmArnyboA,8697 diff --git a/venv/lib/python3.8/site-packages/buildozer-1.4.0.dist-info/REQUESTED b/venv/lib/python3.8/site-packages/buildozer-1.4.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.8/site-packages/buildozer-1.4.0.dist-info/WHEEL b/venv/lib/python3.8/site-packages/buildozer-1.4.0.dist-info/WHEEL new file mode 100644 index 0000000..becc9a6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/buildozer-1.4.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.8/site-packages/buildozer-1.4.0.dist-info/entry_points.txt b/venv/lib/python3.8/site-packages/buildozer-1.4.0.dist-info/entry_points.txt new file mode 100644 index 0000000..648805a --- /dev/null +++ b/venv/lib/python3.8/site-packages/buildozer-1.4.0.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +buildozer = buildozer.scripts.client:main +buildozer-remote = buildozer.scripts.remote:main diff --git a/venv/lib/python3.8/site-packages/buildozer-1.4.0.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/buildozer-1.4.0.dist-info/top_level.txt new file mode 100644 index 0000000..e32d89f --- /dev/null +++ b/venv/lib/python3.8/site-packages/buildozer-1.4.0.dist-info/top_level.txt @@ -0,0 +1 @@ +buildozer diff --git a/venv/lib/python3.8/site-packages/buildozer/__init__.py b/venv/lib/python3.8/site-packages/buildozer/__init__.py new file mode 100644 index 0000000..128018a --- /dev/null +++ b/venv/lib/python3.8/site-packages/buildozer/__init__.py @@ -0,0 +1,1239 @@ +''' +Buildozer +========= + +Generic Python packager for Android / iOS. Desktop later. + +''' + +__version__ = '1.4.0' + +import os +import re +import sys +import select +import codecs +import textwrap +import warnings +from buildozer.jsonstore import JsonStore +from sys import stdout, stderr, exit +from re import search +from os.path import join, exists, dirname, realpath, splitext, expanduser +from subprocess import Popen, PIPE, TimeoutExpired +from os import environ, unlink, walk, sep, listdir, makedirs +from copy import copy +from shutil import copyfile, rmtree, copytree, move +from fnmatch import fnmatch + +from pprint import pformat + +from urllib.request import FancyURLopener +from configparser import ConfigParser +try: + import fcntl +except ImportError: + # on windows, no fcntl + fcntl = None +try: + # if installed, it can give color to windows as well + import colorama + colorama.init() + + RESET_SEQ = colorama.Fore.RESET + colorama.Style.RESET_ALL + COLOR_SEQ = lambda x: x # noqa: E731 + BOLD_SEQ = '' + if sys.platform == 'win32': + BLACK = colorama.Fore.BLACK + colorama.Style.DIM + else: + BLACK = colorama.Fore.BLACK + colorama.Style.BRIGHT + RED = colorama.Fore.RED + BLUE = colorama.Fore.CYAN + USE_COLOR = 'NO_COLOR' not in environ + +except ImportError: + if sys.platform != 'win32': + RESET_SEQ = "\033[0m" + COLOR_SEQ = lambda x: "\033[1;{}m".format(30 + x) # noqa: E731 + BOLD_SEQ = "\033[1m" + BLACK = 0 + RED = 1 + BLUE = 4 + USE_COLOR = 'NO_COLOR' not in environ + else: + RESET_SEQ = '' + COLOR_SEQ = '' + BOLD_SEQ = '' + RED = BLUE = BLACK = 0 + USE_COLOR = False + +# error, info, debug +LOG_LEVELS_C = (RED, BLUE, BLACK) +LOG_LEVELS_T = 'EID' +SIMPLE_HTTP_SERVER_PORT = 8000 + + +class ChromeDownloader(FancyURLopener): + version = ( + 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 ' + '(KHTML, like Gecko) Chrome/28.0.1500.71 Safari/537.36') + + +urlretrieve = ChromeDownloader().retrieve + + +class BuildozerException(Exception): + ''' + Exception raised for general situations buildozer cannot process. + ''' + pass + + +class BuildozerCommandException(BuildozerException): + ''' + Exception raised when an external command failed. + + See: `Buildozer.cmd()`. + ''' + pass + + +class Buildozer: + + ERROR = 0 + INFO = 1 + DEBUG = 2 + + standard_cmds = ('distclean', 'update', 'debug', 'release', + 'deploy', 'run', 'serve') + + def __init__(self, filename='buildozer.spec', target=None): + self.log_level = 2 + self.environ = {} + self.specfilename = filename + self.state = None + self.build_id = None + self.config_profile = '' + self.config = ConfigParser(allow_no_value=True) + self.config.optionxform = lambda value: value + self.config.getlist = self._get_config_list + self.config.getlistvalues = self._get_config_list_values + self.config.getdefault = self._get_config_default + self.config.getbooldefault = self._get_config_bool + self.config.getrawdefault = self._get_config_raw_default + + if exists(filename): + self.config.read(filename, "utf-8") + self.check_configuration_tokens() + + # Check all section/tokens for env vars, and replace the + # config value if a suitable env var exists. + set_config_from_envs(self.config) + + try: + self.log_level = int(self.config.getdefault( + 'buildozer', 'log_level', '2')) + except Exception: + pass + + self.user_bin_dir = self.config.getdefault('buildozer', 'bin_dir', None) + if self.user_bin_dir: + self.user_bin_dir = realpath(join(self.root_dir, self.user_bin_dir)) + + self.targetname = None + self.target = None + if target: + self.set_target(target) + + def set_target(self, target): + '''Set the target to use (one of buildozer.targets, such as "android") + ''' + self.targetname = target + m = __import__('buildozer.targets.{0}'.format(target), + fromlist=['buildozer']) + self.target = m.get_target(self) + self.check_build_layout() + self.check_configuration_tokens() + + def prepare_for_build(self): + '''Prepare the build. + ''' + assert(self.target is not None) + if hasattr(self.target, '_build_prepared'): + return + + self.info('Preparing build') + + self.info('Check requirements for {0}'.format(self.targetname)) + self.target.check_requirements() + + self.info('Install platform') + self.target.install_platform() + + self.info('Check application requirements') + self.check_application_requirements() + + self.check_garden_requirements() + + self.info('Compile platform') + self.target.compile_platform() + + # flag to prevent multiple build + self.target._build_prepared = True + + def build(self): + '''Do the build. + + The target can set build_mode to 'release' or 'debug' before calling + this method. + + (:meth:`prepare_for_build` must have been call before.) + ''' + assert(self.target is not None) + assert(hasattr(self.target, '_build_prepared')) + + if hasattr(self.target, '_build_done'): + return + + # increment the build number + self.build_id = int(self.state.get('cache.build_id', '0')) + 1 + self.state['cache.build_id'] = str(self.build_id) + + self.info('Build the application #{}'.format(self.build_id)) + self.build_application() + + self.info('Package the application') + self.target.build_package() + + # flag to prevent multiple build + self.target._build_done = True + + # + # Log functions + # + + def log(self, level, msg): + if level > self.log_level: + return + if USE_COLOR: + color = COLOR_SEQ(LOG_LEVELS_C[level]) + print(''.join((RESET_SEQ, color, '# ', msg, RESET_SEQ))) + else: + print('{} {}'.format(LOG_LEVELS_T[level], msg)) + + def debug(self, msg): + self.log(self.DEBUG, msg) + + def log_env(self, level, env): + """dump env into debug logger in readable format""" + self.log(level, "ENVIRONMENT:") + for k, v in env.items(): + self.log(level, " {} = {}".format(k, pformat(v))) + + def info(self, msg): + self.log(self.INFO, msg) + + def error(self, msg): + self.log(self.ERROR, msg) + + # + # Internal check methods + # + + def checkbin(self, msg, fn): + self.debug('Search for {0}'.format(msg)) + if exists(fn): + return realpath(fn) + for dn in environ['PATH'].split(':'): + rfn = realpath(join(dn, fn)) + if exists(rfn): + self.debug(' -> found at {0}'.format(rfn)) + return rfn + self.error('{} not found, please install it.'.format(msg)) + exit(1) + + def cmd(self, command, **kwargs): + # prepare the environ, based on the system + our own env + env = environ.copy() + env.update(self.environ) + + # prepare the process + kwargs.setdefault('env', env) + kwargs.setdefault('stdout', PIPE) + kwargs.setdefault('stderr', PIPE) + kwargs.setdefault('close_fds', True) + kwargs.setdefault('shell', True) + kwargs.setdefault('show_output', self.log_level > 1) + + show_output = kwargs.pop('show_output') + get_stdout = kwargs.pop('get_stdout', False) + get_stderr = kwargs.pop('get_stderr', False) + break_on_error = kwargs.pop('break_on_error', True) + sensible = kwargs.pop('sensible', False) + run_condition = kwargs.pop('run_condition', None) + quiet = kwargs.pop('quiet', False) + + if not quiet: + if not sensible: + self.debug('Run {0!r}'.format(command)) + else: + if isinstance(command, (list, tuple)): + self.debug('Run {0!r} ...'.format(command[0])) + else: + self.debug('Run {0!r} ...'.format(command.split()[0])) + self.debug('Cwd {}'.format(kwargs.get('cwd'))) + + # open the process + if sys.platform == 'win32': + kwargs.pop('close_fds', None) + process = Popen(command, **kwargs) + + # prepare fds + fd_stdout = process.stdout.fileno() + fd_stderr = process.stderr.fileno() + if fcntl: + fcntl.fcntl( + fd_stdout, fcntl.F_SETFL, + fcntl.fcntl(fd_stdout, fcntl.F_GETFL) | os.O_NONBLOCK) + fcntl.fcntl( + fd_stderr, fcntl.F_SETFL, + fcntl.fcntl(fd_stderr, fcntl.F_GETFL) | os.O_NONBLOCK) + + ret_stdout = [] if get_stdout else None + ret_stderr = [] if get_stderr else None + while not run_condition or run_condition(): + try: + readx = select.select([fd_stdout, fd_stderr], [], [], 1)[0] + except select.error: + break + if fd_stdout in readx: + chunk = process.stdout.read() + if not chunk: + break + if get_stdout: + ret_stdout.append(chunk) + if show_output: + stdout.write(chunk.decode('utf-8', 'replace')) + if fd_stderr in readx: + chunk = process.stderr.read() + if not chunk: + break + if get_stderr: + ret_stderr.append(chunk) + if show_output: + stderr.write(chunk.decode('utf-8', 'replace')) + + stdout.flush() + stderr.flush() + + try: + process.communicate( + timeout=(1 if run_condition and not run_condition() else None) + ) + except TimeoutExpired: + pass + + if process.returncode != 0 and break_on_error: + self.error('Command failed: {0}'.format(command)) + self.log_env(self.ERROR, kwargs['env']) + self.error('') + self.error('Buildozer failed to execute the last command') + if self.log_level <= self.INFO: + self.error('If the error is not obvious, please raise the log_level to 2') + self.error('and retry the latest command.') + else: + self.error('The error might be hidden in the log above this error') + self.error('Please read the full log, and search for it before') + self.error('raising an issue with buildozer itself.') + self.error('In case of a bug report, please add a full log with log_level = 2') + raise BuildozerCommandException() + + if ret_stdout: + ret_stdout = b''.join(ret_stdout) + if ret_stderr: + ret_stderr = b''.join(ret_stderr) + + return (ret_stdout.decode('utf-8', 'ignore') if ret_stdout else None, + ret_stderr.decode('utf-8') if ret_stderr else None, + process.returncode) + + def cmd_expect(self, command, **kwargs): + from pexpect import spawnu + + # prepare the environ, based on the system + our own env + env = environ.copy() + env.update(self.environ) + + # prepare the process + kwargs.setdefault('env', env) + kwargs.setdefault('show_output', self.log_level > 1) + sensible = kwargs.pop('sensible', False) + show_output = kwargs.pop('show_output') + + if show_output: + kwargs['logfile'] = codecs.getwriter('utf8')(stdout.buffer) + + if not sensible: + self.debug('Run (expect) {0!r}'.format(command)) + else: + self.debug('Run (expect) {0!r} ...'.format(command.split()[0])) + + self.debug('Cwd {}'.format(kwargs.get('cwd'))) + return spawnu(command, **kwargs) + + def check_configuration_tokens(self): + '''Ensure the spec file is 'correct'. + ''' + self.info('Check configuration tokens') + self.migrate_configuration_tokens() + get = self.config.getdefault + errors = [] + adderror = errors.append + if not get('app', 'title', ''): + adderror('[app] "title" is missing') + if not get('app', 'source.dir', ''): + adderror('[app] "source.dir" is missing') + + package_name = get('app', 'package.name', '') + if not package_name: + adderror('[app] "package.name" is missing') + elif package_name[0] in map(str, range(10)): + adderror('[app] "package.name" may not start with a number.') + + version = get('app', 'version', '') + version_regex = get('app', 'version.regex', '') + if not version and not version_regex: + adderror('[app] One of "version" or "version.regex" must be set') + if version and version_regex: + adderror('[app] Conflict between "version" and "version.regex"' + ', only one can be used.') + if version_regex and not get('app', 'version.filename', ''): + adderror('[app] "version.filename" is missing' + ', required by "version.regex"') + + orientation = get('app', 'orientation', 'landscape') + if orientation not in ('landscape', 'portrait', 'all', 'sensorLandscape'): + adderror('[app] "orientation" have an invalid value') + + if errors: + self.error('{0} error(s) found in the buildozer.spec'.format( + len(errors))) + for error in errors: + print(error) + exit(1) + + def migrate_configuration_tokens(self): + config = self.config + if config.has_section("app"): + migration = ( + ("android.p4a_dir", "p4a.source_dir"), + ("android.p4a_whitelist", "android.whitelist"), + ("android.bootstrap", "p4a.bootstrap"), + ("android.branch", "p4a.branch"), + ("android.p4a_whitelist_src", "android.whitelist_src"), + ("android.p4a_blacklist_src", "android.blacklist_src") + ) + for entry_old, entry_new in migration: + if not config.has_option("app", entry_old): + continue + value = config.get("app", entry_old) + config.set("app", entry_new, value) + config.remove_option("app", entry_old) + self.error("In section [app]: {} is deprecated, rename to {}!".format( + entry_old, entry_new)) + + def check_build_layout(self): + '''Ensure the build (local and global) directory layout and files are + ready. + ''' + self.info('Ensure build layout') + + if not exists(self.specfilename): + print('No {0} found in the current directory. Abandon.'.format( + self.specfilename)) + exit(1) + + # create global dir + self.mkdir(self.global_buildozer_dir) + self.mkdir(self.global_cache_dir) + + # create local .buildozer/ dir + self.mkdir(self.buildozer_dir) + # create local bin/ dir + self.mkdir(self.bin_dir) + + self.mkdir(self.applibs_dir) + self.state = JsonStore(join(self.buildozer_dir, 'state.db')) + + target = self.targetname + if target: + self.mkdir(join(self.global_platform_dir, target, 'platform')) + self.mkdir(join(self.buildozer_dir, target, 'platform')) + self.mkdir(join(self.buildozer_dir, target, 'app')) + + def check_application_requirements(self): + '''Ensure the application requirements are all available and ready to be + packaged as well. + ''' + requirements = self.config.getlist('app', 'requirements', '') + target_available_packages = self.target.get_available_packages() + if target_available_packages is True: + # target handles all packages! + return + + # remove all the requirements that the target can compile + onlyname = lambda x: x.split('==')[0] # noqa: E731 + requirements = [x for x in requirements if onlyname(x) not in + target_available_packages] + + if requirements and hasattr(sys, 'real_prefix'): + e = self.error + e('virtualenv is needed to install pure-Python modules, but') + e('virtualenv does not support nesting, and you are running') + e('buildozer in one. Please run buildozer outside of a') + e('virtualenv instead.') + exit(1) + + # did we already installed the libs ? + if ( + exists(self.applibs_dir) and + self.state.get('cache.applibs', '') == requirements + ): + self.debug('Application requirements already installed, pass') + return + + # recreate applibs + self.rmdir(self.applibs_dir) + self.mkdir(self.applibs_dir) + + # ok now check the availability of all requirements + for requirement in requirements: + self._install_application_requirement(requirement) + + # everything goes as expected, save this state! + self.state['cache.applibs'] = requirements + + def _install_application_requirement(self, module): + self._ensure_virtualenv() + self.debug('Install requirement {} in virtualenv'.format(module)) + self.cmd('pip install --target={} {}'.format(self.applibs_dir, module), + env=self.env_venv, + cwd=self.buildozer_dir) + + def check_garden_requirements(self): + garden_requirements = self.config.getlist('app', + 'garden_requirements', '') + if garden_requirements: + warnings.warn("`garden_requirements` settings is deprecated, use `requirements` instead", DeprecationWarning) + + def _ensure_virtualenv(self): + if hasattr(self, 'venv'): + return + self.venv = join(self.buildozer_dir, 'venv') + if not self.file_exists(self.venv): + self.cmd('python3 -m venv ./venv', + cwd=self.buildozer_dir) + + # read virtualenv output and parse it + output = self.cmd('bash -c "source venv/bin/activate && env"', + get_stdout=True, + cwd=self.buildozer_dir) + self.env_venv = copy(self.environ) + for line in output[0].splitlines(): + args = line.split('=', 1) + if len(args) != 2: + continue + key, value = args + if key in ('VIRTUAL_ENV', 'PATH'): + self.env_venv[key] = value + if 'PYTHONHOME' in self.env_venv: + del self.env_venv['PYTHONHOME'] + + # ensure any sort of compilation will fail + self.env_venv['CC'] = '/bin/false' + self.env_venv['CXX'] = '/bin/false' + + def mkdir(self, dn): + if exists(dn): + return + self.debug('Create directory {0}'.format(dn)) + makedirs(dn) + + def rmdir(self, dn): + if not exists(dn): + return + self.debug('Remove directory and subdirectory {}'.format(dn)) + rmtree(dn) + + def file_matches(self, patterns): + from glob import glob + result = [] + for pattern in patterns: + matches = glob(expanduser(pattern.strip())) + result.extend(matches) + return result + + def file_exists(self, *args): + return exists(join(*args)) + + def file_rename(self, source, target, cwd=None): + if cwd: + source = join(cwd, source) + target = join(cwd, target) + self.debug('Rename {0} to {1}'.format(source, target)) + if not os.path.isdir(os.path.dirname(target)): + self.error(('Rename {0} to {1} fails because {2} is not a ' + 'directory').format(source, target, target)) + move(source, target) + + def file_copy(self, source, target, cwd=None): + if cwd: + source = join(cwd, source) + target = join(cwd, target) + self.debug('Copy {0} to {1}'.format(source, target)) + copyfile(source, target) + + def file_extract(self, archive, cwd=None): + if archive.endswith('.tgz') or archive.endswith('.tar.gz'): + self.cmd('tar xzf {0}'.format(archive), cwd=cwd) + return + + if archive.endswith('.tbz2') or archive.endswith('.tar.bz2'): + # XXX same as before + self.cmd('tar xjf {0}'.format(archive), cwd=cwd) + return + + if archive.endswith('.bin'): + # To process the bin files for linux and darwin systems + self.cmd('chmod a+x {0}'.format(archive), cwd=cwd) + self.cmd('./{0}'.format(archive), cwd=cwd) + return + + if archive.endswith('.zip'): + self.cmd('unzip -q {}'.format(join(cwd, archive)), cwd=cwd) + return + + raise Exception('Unhandled extraction for type {0}'.format(archive)) + + def file_copytree(self, src, dest): + print('copy {} to {}'.format(src, dest)) + if os.path.isdir(src): + if not os.path.isdir(dest): + os.makedirs(dest) + files = os.listdir(src) + for f in files: + self.file_copytree( + os.path.join(src, f), + os.path.join(dest, f)) + else: + copyfile(src, dest) + + def clean_platform(self): + self.info('Clean the platform build directory') + if not exists(self.platform_dir): + return + rmtree(self.platform_dir) + + def download(self, url, filename, cwd=None): + def report_hook(index, blksize, size): + if size <= 0: + progression = '{0} bytes'.format(index * blksize) + else: + progression = '{0:.2f}%'.format( + index * blksize * 100. / float(size)) + if "CI" not in environ: + stdout.write('- Download {}\r'.format(progression)) + stdout.flush() + + url = url + filename + if cwd: + filename = join(cwd, filename) + if self.file_exists(filename): + unlink(filename) + + self.debug('Downloading {0}'.format(url)) + urlretrieve(url, filename, report_hook) + return filename + + def get_version(self): + c = self.config + has_version = c.has_option('app', 'version') + has_regex = c.has_option('app', 'version.regex') + has_filename = c.has_option('app', 'version.filename') + + # version number specified + if has_version: + if has_regex or has_filename: + raise Exception( + 'version.regex and version.filename conflict with version') + return c.get('app', 'version') + + # search by regex + if has_regex or has_filename: + if has_regex and not has_filename: + raise Exception('version.filename is missing') + if has_filename and not has_regex: + raise Exception('version.regex is missing') + + fn = c.get('app', 'version.filename') + with open(fn) as fd: + data = fd.read() + regex = c.get('app', 'version.regex') + match = search(regex, data) + if not match: + raise Exception( + 'Unable to find capture version in {0}\n' + ' (looking for `{1}`)'.format(fn, regex)) + version = match.groups()[0] + self.debug('Captured version: {0}'.format(version)) + return version + + raise Exception('Missing version or version.regex + version.filename') + + def build_application(self): + self._copy_application_sources() + self._copy_application_libs() + self._add_sitecustomize() + + def _copy_application_sources(self): + # XXX clean the inclusion/exclusion algo. + source_dir = realpath(expanduser(self.config.getdefault('app', 'source.dir', '.'))) + include_exts = self.config.getlist('app', 'source.include_exts', '') + exclude_exts = self.config.getlist('app', 'source.exclude_exts', '') + exclude_dirs = self.config.getlist('app', 'source.exclude_dirs', '') + exclude_patterns = self.config.getlist('app', 'source.exclude_patterns', '') + include_patterns = self.config.getlist('app', + 'source.include_patterns', + '') + app_dir = self.app_dir + + include_exts = [ext.lower() for ext in include_exts] + exclude_exts = [ext.lower() for ext in exclude_exts] + exclude_dirs = [dir.lower() for dir in exclude_dirs] + exclude_patterns = [pat.lower() for pat in exclude_patterns] + include_patterns = [pat.lower() for pat in include_patterns] + + self.debug('Copy application source from {}'.format(source_dir)) + + rmtree(self.app_dir) + + for root, dirs, files in walk(source_dir, followlinks=True): + # avoid hidden directory + if True in [x.startswith('.') for x in root.split(sep)]: + continue + + # need to have sort-of normalization. Let's say you want to exclude + # image directory but not images, the filtered_root must have a / at + # the end, same for the exclude_dir. And then we can safely compare + filtered_root = root[len(source_dir) + 1:].lower() + if filtered_root: + filtered_root += '/' + + # manual exclude_dirs approach + is_excluded = False + for exclude_dir in exclude_dirs: + if exclude_dir[-1] != '/': + exclude_dir += '/' + if filtered_root.startswith(exclude_dir): + is_excluded = True + break + + # pattern matching + if not is_excluded: + # match pattern if not ruled out by exclude_dirs + for pattern in exclude_patterns: + if fnmatch(filtered_root, pattern): + is_excluded = True + break + for pattern in include_patterns: + if fnmatch(filtered_root, pattern): + is_excluded = False + break + + if is_excluded: + continue + + for fn in files: + # avoid hidden files + if fn.startswith('.'): + continue + + # pattern matching + is_excluded = False + dfn = fn.lower() + if filtered_root: + dfn = join(filtered_root, fn) + for pattern in exclude_patterns: + if fnmatch(dfn, pattern): + is_excluded = True + break + for pattern in include_patterns: + if fnmatch(dfn, pattern): + is_excluded = False + break + if is_excluded: + continue + + # filter based on the extension + # TODO more filters + basename, ext = splitext(fn) + if ext: + ext = ext[1:].lower() + if include_exts and ext not in include_exts: + continue + if exclude_exts and ext in exclude_exts: + continue + + sfn = join(root, fn) + rfn = realpath(join(app_dir, root[len(source_dir) + 1:], fn)) + + # ensure the directory exists + dfn = dirname(rfn) + self.mkdir(dfn) + + # copy! + self.debug('Copy {0}'.format(sfn)) + copyfile(sfn, rfn) + + def _copy_application_libs(self): + # copy also the libs + copytree(self.applibs_dir, join(self.app_dir, '_applibs')) + + def _add_sitecustomize(self): + copyfile(join(dirname(__file__), 'sitecustomize.py'), + join(self.app_dir, 'sitecustomize.py')) + + main_py = join(self.app_dir, 'service', 'main.py') + if not self.file_exists(main_py): + return + + header = (b'import sys, os; ' + b'sys.path = [os.path.join(os.getcwd(),' + b'"..", "_applibs")] + sys.path\n') + with open(main_py, 'rb') as fd: + data = fd.read() + data = header + data + with open(main_py, 'wb') as fd: + fd.write(data) + self.info('Patched service/main.py to include applibs') + + def namify(self, name): + '''Return a "valid" name from a name with lot of invalid chars + (allowed characters: a-z, A-Z, 0-9, -, _) + ''' + return re.sub(r'[^a-zA-Z0-9_\-]', '_', name) + + @property + def root_dir(self): + return realpath(expanduser(dirname(self.specfilename))) + + @property + def user_build_dir(self): + """The user-provided build dir, if any.""" + # Check for a user-provided build dir + # Check the (deprecated) builddir token, for backwards compatibility + build_dir = self.config.getdefault('buildozer', 'builddir', None) + if build_dir is not None: + # for backwards compatibility, append .buildozer to builddir + build_dir = join(build_dir, '.buildozer') + build_dir = self.config.getdefault('buildozer', 'build_dir', build_dir) + + if build_dir is not None: + build_dir = realpath(join(self.root_dir, expanduser(build_dir))) + + return build_dir + + @property + def buildozer_dir(self): + '''The directory in which to run the app build.''' + if self.user_build_dir is not None: + return self.user_build_dir + return join(self.root_dir, '.buildozer') + + @property + def bin_dir(self): + if self.user_bin_dir: + return self.user_bin_dir + return join(self.root_dir, 'bin') + + @property + def platform_dir(self): + return join(self.buildozer_dir, self.targetname, 'platform') + + @property + def app_dir(self): + return join(self.buildozer_dir, self.targetname, 'app') + + @property + def applibs_dir(self): + return join(self.buildozer_dir, 'applibs') + + @property + def global_buildozer_dir(self): + return join(expanduser('~'), '.buildozer') + + @property + def global_platform_dir(self): + return join(self.global_buildozer_dir, self.targetname, 'platform') + + @property + def global_packages_dir(self): + return join(self.global_buildozer_dir, self.targetname, 'packages') + + @property + def global_cache_dir(self): + return join(self.global_buildozer_dir, 'cache') + + @property + def package_full_name(self): + package_name = self.config.getdefault('app', 'package.name', '') + package_domain = self.config.getdefault('app', 'package.domain', '') + if package_domain == '': + return package_name + return '{}.{}'.format(package_domain, package_name) + + # + # command line invocation + # + + def targets(self): + for fn in listdir(join(dirname(__file__), 'targets')): + if fn.startswith('.') or fn.startswith('__'): + continue + if not fn.endswith('.py'): + continue + target = fn[:-3] + try: + m = __import__('buildozer.targets.{0}'.format(target), + fromlist=['buildozer']) + yield target, m + except NotImplementedError: + pass + except: + raise + pass + + def usage(self): + print('Usage:') + print(' buildozer [--profile ] [--verbose] [target] ...') + print(' buildozer --version') + print('') + print('Available targets:') + targets = list(self.targets()) + for target, m in targets: + try: + doc = m.__doc__.strip().splitlines()[0].strip() + except Exception: + doc = '' + print(' {0:<18} {1}'.format(target, doc)) + + print('') + print('Global commands (without target):') + cmds = [x for x in dir(self) if x.startswith('cmd_')] + for cmd in cmds: + name = cmd[4:] + meth = getattr(self, cmd) + + if not meth.__doc__: + continue + doc = list(meth.__doc__.strip().splitlines())[0].strip() + print(' {0:<18} {1}'.format(name, doc)) + + print('') + print('Target commands:') + print(' clean Clean the target environment') + print(' update Update the target dependencies') + print(' debug Build the application in debug mode') + print(' release Build the application in release mode') + print(' deploy Deploy the application on the device') + print(' run Run the application on the device') + print(' serve Serve the bin directory via SimpleHTTPServer') + + for target, m in targets: + mt = m.get_target(self) + commands = mt.get_custom_commands() + if not commands: + continue + print('') + print('Target "{0}" commands:'.format(target)) + for command, doc in commands: + if not doc: + continue + doc = textwrap.fill(textwrap.dedent(doc).strip(), 59, + subsequent_indent=' ' * 21) + print(' {0:<18} {1}'.format(command, doc)) + + print('') + + def run_default(self): + self.check_build_layout() + if 'buildozer:defaultcommand' not in self.state: + print('No default command set.') + print('Use "buildozer setdefault "') + print('Use "buildozer help" for a list of all commands"') + exit(1) + cmd = self.state['buildozer:defaultcommand'] + self.run_command(cmd) + + def run_command(self, args): + while args: + if not args[0].startswith('-'): + break + arg = args.pop(0) + + if arg in ('-v', '--verbose'): + self.log_level = 2 + + elif arg in ('-h', '--help'): + self.usage() + exit(0) + + elif arg in ('-p', '--profile'): + self.config_profile = args.pop(0) + + elif arg == '--version': + print('Buildozer {0}'.format(__version__)) + exit(0) + + self._merge_config_profile() + + self.check_root() + + if not args: + self.run_default() + return + + command, args = args[0], args[1:] + cmd = 'cmd_{0}'.format(command) + + # internal commands ? + if hasattr(self, cmd): + getattr(self, cmd)(*args) + return + + # maybe it's a target? + targets = [x[0] for x in self.targets()] + if command not in targets: + print('Unknown command/target {}'.format(command)) + exit(1) + + self.set_target(command) + self.target.run_commands(args) + + def check_root(self): + '''If effective user id is 0, display a warning and require + user input to continue (or to cancel)''' + + warn_on_root = self.config.getdefault('buildozer', 'warn_on_root', '1') + try: + euid = os.geteuid() == 0 + except AttributeError: + if sys.platform == 'win32': + import ctypes + euid = ctypes.windll.shell32.IsUserAnAdmin() != 0 + if warn_on_root == '1' and euid: + print('\033[91m\033[1mBuildozer is running as root!\033[0m') + print('\033[91mThis is \033[1mnot\033[0m \033[91mrecommended, and may lead to problems later.\033[0m') + cont = None + while cont not in ('y', 'n'): + cont = input('Are you sure you want to continue [y/n]? ') + + if cont == 'n': + sys.exit() + + def cmd_init(self, *args): + '''Create a initial buildozer.spec in the current directory + ''' + if exists('buildozer.spec'): + print('ERROR: You already have a buildozer.spec file.') + exit(1) + copyfile(join(dirname(__file__), 'default.spec'), 'buildozer.spec') + print('File buildozer.spec created, ready to customize!') + + def cmd_distclean(self, *args): + '''Clean the whole Buildozer environment. + ''' + print("Warning: Your ndk, sdk and all other cached packages will be" + " removed. Continue? (y/n)") + if sys.stdin.readline().lower()[0] == 'y': + self.info('Clean the global build directory') + if not exists(self.global_buildozer_dir): + return + rmtree(self.global_buildozer_dir) + + def cmd_appclean(self, *args): + '''Clean the .buildozer folder in the app directory. + + This command specifically refuses to delete files in a + user-specified build directory, to avoid accidentally deleting + more than the user intends. + ''' + if self.user_build_dir is not None: + self.error( + ('Failed: build_dir is specified as {} in the buildozer config. `appclean` will ' + 'not attempt to delete files in a user-specified build directory.').format(self.user_build_dir)) + elif exists(self.buildozer_dir): + self.info('Deleting {}'.format(self.buildozer_dir)) + rmtree(self.buildozer_dir) + else: + self.error('{} already deleted, skipping.'.format(self.buildozer_dir)) + + def cmd_help(self, *args): + '''Show the Buildozer help. + ''' + self.usage() + + def cmd_setdefault(self, *args): + '''Set the default command to run when no arguments are given + ''' + self.check_build_layout() + self.state['buildozer:defaultcommand'] = args + + def cmd_version(self, *args): + '''Show the Buildozer version + ''' + print('Buildozer {0}'.format(__version__)) + + def cmd_serve(self, *args): + '''Serve the bin directory via SimpleHTTPServer + ''' + try: + from http.server import SimpleHTTPRequestHandler + from socketserver import TCPServer + except ImportError: + from SimpleHTTPServer import SimpleHTTPRequestHandler + from SocketServer import TCPServer + + os.chdir(self.bin_dir) + handler = SimpleHTTPRequestHandler + httpd = TCPServer(("", SIMPLE_HTTP_SERVER_PORT), handler) + print("Serving via HTTP at port {}".format(SIMPLE_HTTP_SERVER_PORT)) + print("Press Ctrl+c to quit serving.") + httpd.serve_forever() + + # + # Private + # + + def _merge_config_profile(self): + profile = self.config_profile + if not profile: + return + for section in self.config.sections(): + + # extract the profile part from the section name + # example: [app@default,hd] + parts = section.split('@', 1) + if len(parts) < 2: + continue + + # create a list that contain all the profiles of the current section + # ['default', 'hd'] + section_base, section_profiles = parts + section_profiles = section_profiles.split(',') + if profile not in section_profiles: + continue + + # the current profile is one available in the section + # merge with the general section, or make it one. + if not self.config.has_section(section_base): + self.config.add_section(section_base) + for name, value in self.config.items(section): + print('merged ({}, {}) into {} (profile is {})'.format(name, + value, section_base, profile)) + self.config.set(section_base, name, value) + + def _get_config_list_values(self, *args, **kwargs): + kwargs['with_values'] = True + return self._get_config_list(*args, **kwargs) + + def _get_config_list(self, section, token, default=None, with_values=False): + # monkey-patch method for ConfigParser + # get a key as a list of string, separated from the comma + + # check if an env var exists that should replace the file config + set_config_token_from_env(section, token, self.config) + + # if a section:token is defined, let's use the content as a list. + l_section = '{}:{}'.format(section, token) + if self.config.has_section(l_section): + values = self.config.options(l_section) + if with_values: + return ['{}={}'.format(key, self.config.get(l_section, key)) for + key in values] + else: + return [x.strip() for x in values] + + values = self.config.getdefault(section, token, '') + if not values: + return default + values = values.split(',') + if not values: + return default + return [x.strip() for x in values] + + def _get_config_default(self, section, token, default=None): + # monkey-patch method for ConfigParser + # get an appropriate env var if it exists, else + # get a key in a section, or the default + + # check if an env var exists that should replace the file config + set_config_token_from_env(section, token, self.config) + + if not self.config.has_section(section): + return default + if not self.config.has_option(section, token): + return default + return self.config.get(section, token) + + def _get_config_bool(self, section, token, default=False): + # monkey-patch method for ConfigParser + # get a key in a section, or the default + + # check if an env var exists that should replace the file config + set_config_token_from_env(section, token, self.config) + + if not self.config.has_section(section): + return default + if not self.config.has_option(section, token): + return default + return self.config.getboolean(section, token) + + def _get_config_raw_default(self, section, token, default=None, section_sep="=", split_char=" "): + l_section = '{}:{}'.format(section, token) + if self.config.has_section(l_section): + return [section_sep.join(item) for item in self.config.items(l_section)] + if not self.config.has_option(section, token): + return default.split(split_char) + return self.config.get(section, token).split(split_char) + + +def set_config_from_envs(config): + '''Takes a ConfigParser, and checks every section/token for an + environment variable of the form SECTION_TOKEN, with any dots + replaced by underscores. If the variable exists, sets the config + variable to the env value. + ''' + for section in config.sections(): + for token in config.options(section): + set_config_token_from_env(section, token, config) + + +def set_config_token_from_env(section, token, config): + '''Given a config section and token, checks for an appropriate + environment variable. If the variable exists, sets the config entry to + its value. + + The environment variable checked is of the form SECTION_TOKEN, all + upper case, with any dots replaced by underscores. + + Returns True if the environment variable exists and was used, or + False otherwise. + + ''' + env_var_name = ''.join([section.upper(), '_', + token.upper().replace('.', '_')]) + env_var = os.environ.get(env_var_name) + if env_var is None: + return False + config.set(section, token, env_var) + return True diff --git a/venv/lib/python3.8/site-packages/buildozer/__main__.py b/venv/lib/python3.8/site-packages/buildozer/__main__.py new file mode 100644 index 0000000..2252057 --- /dev/null +++ b/venv/lib/python3.8/site-packages/buildozer/__main__.py @@ -0,0 +1,4 @@ +from buildozer.scripts.client import main + +if __name__ == '__main__': + main() diff --git a/venv/lib/python3.8/site-packages/buildozer/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/buildozer/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..b164cab Binary files /dev/null and b/venv/lib/python3.8/site-packages/buildozer/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/buildozer/__pycache__/__main__.cpython-38.pyc b/venv/lib/python3.8/site-packages/buildozer/__pycache__/__main__.cpython-38.pyc new file mode 100644 index 0000000..8501cfe Binary files /dev/null and b/venv/lib/python3.8/site-packages/buildozer/__pycache__/__main__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/buildozer/__pycache__/jsonstore.cpython-38.pyc b/venv/lib/python3.8/site-packages/buildozer/__pycache__/jsonstore.cpython-38.pyc new file mode 100644 index 0000000..5886e03 Binary files /dev/null and b/venv/lib/python3.8/site-packages/buildozer/__pycache__/jsonstore.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/buildozer/__pycache__/sitecustomize.cpython-38.pyc b/venv/lib/python3.8/site-packages/buildozer/__pycache__/sitecustomize.cpython-38.pyc new file mode 100644 index 0000000..2321846 Binary files /dev/null and b/venv/lib/python3.8/site-packages/buildozer/__pycache__/sitecustomize.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/buildozer/__pycache__/target.cpython-38.pyc b/venv/lib/python3.8/site-packages/buildozer/__pycache__/target.cpython-38.pyc new file mode 100644 index 0000000..c000674 Binary files /dev/null and b/venv/lib/python3.8/site-packages/buildozer/__pycache__/target.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/buildozer/default.spec b/venv/lib/python3.8/site-packages/buildozer/default.spec new file mode 100644 index 0000000..3b9ce3d --- /dev/null +++ b/venv/lib/python3.8/site-packages/buildozer/default.spec @@ -0,0 +1,430 @@ +[app] + +# (str) Title of your application +title = My Application + +# (str) Package name +package.name = myapp + +# (str) Package domain (needed for android/ios packaging) +package.domain = org.test + +# (str) Source code where the main.py live +source.dir = . + +# (list) Source files to include (let empty to include all the files) +source.include_exts = py,png,jpg,kv,atlas + +# (list) List of inclusions using pattern matching +#source.include_patterns = assets/*,images/*.png + +# (list) Source files to exclude (let empty to not exclude anything) +#source.exclude_exts = spec + +# (list) List of directory to exclude (let empty to not exclude anything) +#source.exclude_dirs = tests, bin, venv + +# (list) List of exclusions using pattern matching +# Do not prefix with './' +#source.exclude_patterns = license,images/*/*.jpg + +# (str) Application versioning (method 1) +version = 0.1 + +# (str) Application versioning (method 2) +# version.regex = __version__ = ['"](.*)['"] +# version.filename = %(source.dir)s/main.py + +# (list) Application requirements +# comma separated e.g. requirements = sqlite3,kivy +requirements = python3,kivy + +# (str) Custom source folders for requirements +# Sets custom source for any requirements with recipes +# requirements.source.kivy = ../../kivy + +# (str) Presplash of the application +#presplash.filename = %(source.dir)s/data/presplash.png + +# (str) Icon of the application +#icon.filename = %(source.dir)s/data/icon.png + +# (str) Supported orientation (one of landscape, sensorLandscape, portrait or all) +orientation = portrait + +# (list) List of service to declare +#services = NAME:ENTRYPOINT_TO_PY,NAME2:ENTRYPOINT2_TO_PY + +# +# OSX Specific +# + +# +# author = © Copyright Info + +# change the major version of python used by the app +osx.python_version = 3 + +# Kivy version to use +osx.kivy_version = 1.9.1 + +# +# Android specific +# + +# (bool) Indicate if the application should be fullscreen or not +fullscreen = 0 + +# (string) Presplash background color (for android toolchain) +# Supported formats are: #RRGGBB #AARRGGBB or one of the following names: +# red, blue, green, black, white, gray, cyan, magenta, yellow, lightgray, +# darkgray, grey, lightgrey, darkgrey, aqua, fuchsia, lime, maroon, navy, +# olive, purple, silver, teal. +#android.presplash_color = #FFFFFF + +# (string) Presplash animation using Lottie format. +# see https://lottiefiles.com/ for examples and https://airbnb.design/lottie/ +# for general documentation. +# Lottie files can be created using various tools, like Adobe After Effect or Synfig. +#android.presplash_lottie = "path/to/lottie/file.json" + +# (str) Adaptive icon of the application (used if Android API level is 26+ at runtime) +#icon.adaptive_foreground.filename = %(source.dir)s/data/icon_fg.png +#icon.adaptive_background.filename = %(source.dir)s/data/icon_bg.png + +# (list) Permissions +#android.permissions = INTERNET + +# (list) features (adds uses-feature -tags to manifest) +#android.features = android.hardware.usb.host + +# (int) Target Android API, should be as high as possible. +#android.api = 27 + +# (int) Minimum API your APK / AAB will support. +#android.minapi = 21 + +# (int) Android SDK version to use +#android.sdk = 20 + +# (str) Android NDK version to use +#android.ndk = 23b + +# (int) Android NDK API to use. This is the minimum API your app will support, it should usually match android.minapi. +#android.ndk_api = 21 + +# (bool) Use --private data storage (True) or --dir public storage (False) +#android.private_storage = True + +# (str) Android NDK directory (if empty, it will be automatically downloaded.) +#android.ndk_path = + +# (str) Android SDK directory (if empty, it will be automatically downloaded.) +#android.sdk_path = + +# (str) ANT directory (if empty, it will be automatically downloaded.) +#android.ant_path = + +# (bool) If True, then skip trying to update the Android sdk +# This can be useful to avoid excess Internet downloads or save time +# when an update is due and you just want to test/build your package +# android.skip_update = False + +# (bool) If True, then automatically accept SDK license +# agreements. This is intended for automation only. If set to False, +# the default, you will be shown the license when first running +# buildozer. +# android.accept_sdk_license = False + +# (str) Android entry point, default is ok for Kivy-based app +#android.entrypoint = org.kivy.android.PythonActivity + +# (str) Full name including package path of the Java class that implements Android Activity +# use that parameter together with android.entrypoint to set custom Java class instead of PythonActivity +#android.activity_class_name = org.kivy.android.PythonActivity + +# (str) Extra xml to write directly inside the element of AndroidManifest.xml +# use that parameter to provide a filename from where to load your custom XML code +#android.extra_manifest_xml = ./src/android/extra_manifest.xml + +# (str) Extra xml to write directly inside the tag of AndroidManifest.xml +# use that parameter to provide a filename from where to load your custom XML arguments: +#android.extra_manifest_application_arguments = ./src/android/extra_manifest_application_arguments.xml + +# (str) Full name including package path of the Java class that implements Python Service +# use that parameter to set custom Java class instead of PythonService +#android.service_class_name = org.kivy.android.PythonService + +# (str) Android app theme, default is ok for Kivy-based app +# android.apptheme = "@android:style/Theme.NoTitleBar" + +# (list) Pattern to whitelist for the whole project +#android.whitelist = + +# (str) Path to a custom whitelist file +#android.whitelist_src = + +# (str) Path to a custom blacklist file +#android.blacklist_src = + +# (list) List of Java .jar files to add to the libs so that pyjnius can access +# their classes. Don't add jars that you do not need, since extra jars can slow +# down the build process. Allows wildcards matching, for example: +# OUYA-ODK/libs/*.jar +#android.add_jars = foo.jar,bar.jar,path/to/more/*.jar + +# (list) List of Java files to add to the android project (can be java or a +# directory containing the files) +#android.add_src = + +# (list) Android AAR archives to add +#android.add_aars = + +# (list) Put these files or directories in the apk assets directory. +# Either form may be used, and assets need not be in 'source.include_exts'. +# 1) android.add_assets = source_asset_relative_path +# 2) android.add_assets = source_asset_path:destination_asset_relative_path +#android.add_assets = + +# (list) Gradle dependencies to add +#android.gradle_dependencies = + +# (bool) Enable AndroidX support. Enable when 'android.gradle_dependencies' +# contains an 'androidx' package, or any package from Kotlin source. +# android.enable_androidx requires android.api >= 28 +#android.enable_androidx = False + +# (list) add java compile options +# this can for example be necessary when importing certain java libraries using the 'android.gradle_dependencies' option +# see https://developer.android.com/studio/write/java8-support for further information +# android.add_compile_options = "sourceCompatibility = 1.8", "targetCompatibility = 1.8" + +# (list) Gradle repositories to add {can be necessary for some android.gradle_dependencies} +# please enclose in double quotes +# e.g. android.gradle_repositories = "maven { url 'https://kotlin.bintray.com/ktor' }" +#android.add_gradle_repositories = + +# (list) packaging options to add +# see https://google.github.io/android-gradle-dsl/current/com.android.build.gradle.internal.dsl.PackagingOptions.html +# can be necessary to solve conflicts in gradle_dependencies +# please enclose in double quotes +# e.g. android.add_packaging_options = "exclude 'META-INF/common.kotlin_module'", "exclude 'META-INF/*.kotlin_module'" +#android.add_packaging_options = + +# (list) Java classes to add as activities to the manifest. +#android.add_activities = com.example.ExampleActivity + +# (str) OUYA Console category. Should be one of GAME or APP +# If you leave this blank, OUYA support will not be enabled +#android.ouya.category = GAME + +# (str) Filename of OUYA Console icon. It must be a 732x412 png image. +#android.ouya.icon.filename = %(source.dir)s/data/ouya_icon.png + +# (str) XML file to include as an intent filters in tag +#android.manifest.intent_filters = + +# (str) launchMode to set for the main activity +#android.manifest.launch_mode = standard + +# (list) Android additional libraries to copy into libs/armeabi +#android.add_libs_armeabi = libs/android/*.so +#android.add_libs_armeabi_v7a = libs/android-v7/*.so +#android.add_libs_arm64_v8a = libs/android-v8/*.so +#android.add_libs_x86 = libs/android-x86/*.so +#android.add_libs_mips = libs/android-mips/*.so + +# (bool) Indicate whether the screen should stay on +# Don't forget to add the WAKE_LOCK permission if you set this to True +#android.wakelock = False + +# (list) Android application meta-data to set (key=value format) +#android.meta_data = + +# (list) Android library project to add (will be added in the +# project.properties automatically.) +#android.library_references = + +# (list) Android shared libraries which will be added to AndroidManifest.xml using tag +#android.uses_library = + +# (str) Android logcat filters to use +#android.logcat_filters = *:S python:D + +# (bool) Android logcat only display log for activity's pid +#android.logcat_pid_only = False + +# (str) Android additional adb arguments +#android.adb_args = -H host.docker.internal + +# (bool) Copy library instead of making a libpymodules.so +#android.copy_libs = 1 + +# (list) The Android archs to build for, choices: armeabi-v7a, arm64-v8a, x86, x86_64 +# In past, was `android.arch` as we weren't supporting builds for multiple archs at the same time. +android.archs = arm64-v8a, armeabi-v7a + +# (int) overrides automatic versionCode computation (used in build.gradle) +# this is not the same as app version and should only be edited if you know what you're doing +# android.numeric_version = 1 + +# (bool) enables Android auto backup feature (Android API >=23) +android.allow_backup = True + +# (str) XML file for custom backup rules (see official auto backup documentation) +# android.backup_rules = + +# (str) If you need to insert variables into your AndroidManifest.xml file, +# you can do so with the manifestPlaceholders property. +# This property takes a map of key-value pairs. (via a string) +# Usage example : android.manifest_placeholders = [myCustomUrl:\"org.kivy.customurl\"] +# android.manifest_placeholders = [:] + +# (bool) disables the compilation of py to pyc/pyo files when packaging +# android.no-compile-pyo = True + +# (str) The format used to package the app for release mode (aab or apk or aar). +# android.release_artifact = aab + +# (str) The format used to package the app for debug mode (apk or aar). +# android.debug_artifact = apk + +# +# Python for android (p4a) specific +# + +# (str) python-for-android URL to use for checkout +#p4a.url = + +# (str) python-for-android fork to use in case if p4a.url is not specified, defaults to upstream (kivy) +#p4a.fork = kivy + +# (str) python-for-android branch to use, defaults to master +#p4a.branch = master + +# (str) python-for-android specific commit to use, defaults to HEAD, must be within p4a.branch +#p4a.commit = HEAD + +# (str) python-for-android git clone directory (if empty, it will be automatically cloned from github) +#p4a.source_dir = + +# (str) The directory in which python-for-android should look for your own build recipes (if any) +#p4a.local_recipes = + +# (str) Filename to the hook for p4a +#p4a.hook = + +# (str) Bootstrap to use for android builds +# p4a.bootstrap = sdl2 + +# (int) port number to specify an explicit --port= p4a argument (eg for bootstrap flask) +#p4a.port = + +# Control passing the --use-setup-py vs --ignore-setup-py to p4a +# "in the future" --use-setup-py is going to be the default behaviour in p4a, right now it is not +# Setting this to false will pass --ignore-setup-py, true will pass --use-setup-py +# NOTE: this is general setuptools integration, having pyproject.toml is enough, no need to generate +# setup.py if you're using Poetry, but you need to add "toml" to source.include_exts. +#p4a.setup_py = false + +# (str) extra command line arguments to pass when invoking pythonforandroid.toolchain +#p4a.extra_args = + + +# +# iOS specific +# + +# (str) Path to a custom kivy-ios folder +#ios.kivy_ios_dir = ../kivy-ios +# Alternately, specify the URL and branch of a git checkout: +ios.kivy_ios_url = https://github.com/kivy/kivy-ios +ios.kivy_ios_branch = master + +# Another platform dependency: ios-deploy +# Uncomment to use a custom checkout +#ios.ios_deploy_dir = ../ios_deploy +# Or specify URL and branch +ios.ios_deploy_url = https://github.com/phonegap/ios-deploy +ios.ios_deploy_branch = 1.10.0 + +# (bool) Whether or not to sign the code +ios.codesign.allowed = false + +# (str) Name of the certificate to use for signing the debug version +# Get a list of available identities: buildozer ios list_identities +#ios.codesign.debug = "iPhone Developer: ()" + +# (str) The development team to use for signing the debug version +#ios.codesign.development_team.debug = + +# (str) Name of the certificate to use for signing the release version +#ios.codesign.release = %(ios.codesign.debug)s + +# (str) The development team to use for signing the release version +#ios.codesign.development_team.release = + +# (str) URL pointing to .ipa file to be installed +# This option should be defined along with `display_image_url` and `full_size_image_url` options. +#ios.manifest.app_url = + +# (str) URL pointing to an icon (57x57px) to be displayed during download +# This option should be defined along with `app_url` and `full_size_image_url` options. +#ios.manifest.display_image_url = + +# (str) URL pointing to a large icon (512x512px) to be used by iTunes +# This option should be defined along with `app_url` and `display_image_url` options. +#ios.manifest.full_size_image_url = + + +[buildozer] + +# (int) Log level (0 = error only, 1 = info, 2 = debug (with command output)) +log_level = 2 + +# (int) Display warning if buildozer is run as root (0 = False, 1 = True) +warn_on_root = 1 + +# (str) Path to build artifact storage, absolute or relative to spec file +# build_dir = ./.buildozer + +# (str) Path to build output (i.e. .apk, .aab, .ipa) storage +# bin_dir = ./bin + +# ----------------------------------------------------------------------------- +# List as sections +# +# You can define all the "list" as [section:key]. +# Each line will be considered as a option to the list. +# Let's take [app] / source.exclude_patterns. +# Instead of doing: +# +#[app] +#source.exclude_patterns = license,data/audio/*.wav,data/images/original/* +# +# This can be translated into: +# +#[app:source.exclude_patterns] +#license +#data/audio/*.wav +#data/images/original/* +# + + +# ----------------------------------------------------------------------------- +# Profiles +# +# You can extend section / key with a profile +# For example, you want to deploy a demo version of your application without +# HD content. You could first change the title to add "(demo)" in the name +# and extend the excluded directories to remove the HD content. +# +#[app@demo] +#title = My Application (demo) +# +#[app:source.exclude_patterns@demo] +#images/hd/* +# +# Then, invoke the command line with the "demo" profile: +# +#buildozer --profile demo android debug diff --git a/venv/lib/python3.8/site-packages/buildozer/jsonstore.py b/venv/lib/python3.8/site-packages/buildozer/jsonstore.py new file mode 100644 index 0000000..62b20d7 --- /dev/null +++ b/venv/lib/python3.8/site-packages/buildozer/jsonstore.py @@ -0,0 +1,47 @@ +""" +Replacement for shelve, using json. +This was needed to correctly support db between Python 2 and 3. +""" + +__all__ = ["JsonStore"] + +import io +from json import load, dump +from os.path import exists + + +class JsonStore: + + def __init__(self, filename): + self.filename = filename + self.data = {} + if exists(filename): + try: + with io.open(filename, encoding='utf-8') as fd: + self.data = load(fd) + except ValueError: + print("Unable to read the state.db, content will be replaced.") + + def __getitem__(self, key): + return self.data[key] + + def __setitem__(self, key, value): + self.data[key] = value + self.sync() + + def __delitem__(self, key): + del self.data[key] + self.sync() + + def __contains__(self, item): + return item in self.data + + def get(self, item, default=None): + return self.data.get(item, default) + + def keys(self): + return self.data.keys() + + def sync(self): + with open(self.filename, 'w') as fd: + dump(self.data, fd, ensure_ascii=False) diff --git a/venv/lib/python3.8/site-packages/buildozer/libs/__init__.py b/venv/lib/python3.8/site-packages/buildozer/libs/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.8/site-packages/buildozer/libs/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/buildozer/libs/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..ed5b279 Binary files /dev/null and b/venv/lib/python3.8/site-packages/buildozer/libs/__pycache__/__init__.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/buildozer/libs/__pycache__/_structures.cpython-38.pyc b/venv/lib/python3.8/site-packages/buildozer/libs/__pycache__/_structures.cpython-38.pyc new file mode 100644 index 0000000..868ffc5 Binary files /dev/null and b/venv/lib/python3.8/site-packages/buildozer/libs/__pycache__/_structures.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/buildozer/libs/__pycache__/version.cpython-38.pyc b/venv/lib/python3.8/site-packages/buildozer/libs/__pycache__/version.cpython-38.pyc new file mode 100644 index 0000000..d15d604 Binary files /dev/null and b/venv/lib/python3.8/site-packages/buildozer/libs/__pycache__/version.cpython-38.pyc differ diff --git a/venv/lib/python3.8/site-packages/buildozer/libs/_structures.py b/venv/lib/python3.8/site-packages/buildozer/libs/_structures.py new file mode 100644 index 0000000..8c88750 --- /dev/null +++ b/venv/lib/python3.8/site-packages/buildozer/libs/_structures.py @@ -0,0 +1,78 @@ +# Copyright 2014 Donald Stufft +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class Infinity: + + def __repr__(self): + return "Infinity" + + def __hash__(self): + return hash(repr(self)) + + def __lt__(self, other): + return False + + def __le__(self, other): + return False + + def __eq__(self, other): + return isinstance(other, self.__class__) + + def __ne__(self, other): + return not isinstance(other, self.__class__) + + def __gt__(self, other): + return True + + def __ge__(self, other): + return True + + def __neg__(self): + return NegativeInfinity + + +Infinity = Infinity() + + +class NegativeInfinity: + + def __repr__(self): + return "-Infinity" + + def __hash__(self): + return hash(repr(self)) + + def __lt__(self, other): + return True + + def __le__(self, other): + return True + + def __eq__(self, other): + return isinstance(other, self.__class__) + + def __ne__(self, other): + return not isinstance(other, self.__class__) + + def __gt__(self, other): + return False + + def __ge__(self, other): + return False + + def __neg__(self): + return Infinity + + +NegativeInfinity = NegativeInfinity() diff --git a/venv/lib/python3.8/site-packages/buildozer/libs/version.py b/venv/lib/python3.8/site-packages/buildozer/libs/version.py new file mode 100644 index 0000000..7ed99f1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/buildozer/libs/version.py @@ -0,0 +1,400 @@ +# Copyright 2014 Donald Stufft +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import collections +import itertools +import re + +from ._structures import Infinity + + +__all__ = [ + "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN" +] + + +_Version = collections.namedtuple( + "_Version", + ["epoch", "release", "dev", "pre", "post", "local"], +) + + +def parse(version): + """ + Parse the given version string and return either a :class:`Version` object + or a :class:`LegacyVersion` object depending on if the given version is + a valid PEP 440 version or a legacy version. + """ + try: + return Version(version) + except InvalidVersion: + return LegacyVersion(version) + + +class InvalidVersion(ValueError): + """ + An invalid version was found, users should refer to PEP 440. + """ + + +class _BaseVersion: + + def __hash__(self): + return hash(self._key) + + def __lt__(self, other): + return self._compare(other, lambda s, o: s < o) + + def __le__(self, other): + return self._compare(other, lambda s, o: s <= o) + + def __eq__(self, other): + return self._compare(other, lambda s, o: s == o) + + def __ge__(self, other): + return self._compare(other, lambda s, o: s >= o) + + def __gt__(self, other): + return self._compare(other, lambda s, o: s > o) + + def __ne__(self, other): + return self._compare(other, lambda s, o: s != o) + + def _compare(self, other, method): + if not isinstance(other, _BaseVersion): + return NotImplemented + + return method(self._key, other._key) + + +class LegacyVersion(_BaseVersion): + + def __init__(self, version): + self._version = str(version) + self._key = _legacy_cmpkey(self._version) + + def __str__(self): + return self._version + + def __repr__(self): + return "".format(repr(str(self))) + + @property + def public(self): + return self._version + + @property + def base_version(self): + return self._version + + @property + def local(self): + return None + + @property + def is_prerelease(self): + return False + + @property + def is_postrelease(self): + return False + + +_legacy_version_component_re = re.compile( + r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE, +) + +_legacy_version_replacement_map = { + "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@", +} + + +def _parse_version_parts(s): + for part in _legacy_version_component_re.split(s): + part = _legacy_version_replacement_map.get(part, part) + + if not part or part == ".": + continue + + if part[:1] in "0123456789": + # pad for numeric comparison + yield part.zfill(8) + else: + yield "*" + part + + # ensure that alpha/beta/candidate are before final + yield "*final" + + +def _legacy_cmpkey(version): + # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch + # greater than or equal to 0. This will effectively put the LegacyVersion, + # which uses the defacto standard originally implemented by setuptools, + # as before all PEP 440 versions. + epoch = -1 + + # This scheme is taken from pkg_resources.parse_version setuptools prior to + # it's adoption of the packaging library. + parts = [] + for part in _parse_version_parts(version.lower()): + if part.startswith("*"): + # remove "-" before a prerelease tag + if part < "*final": + while parts and parts[-1] == "*final-": + parts.pop() + + # remove trailing zeros from each series of numeric parts + while parts and parts[-1] == "00000000": + parts.pop() + + parts.append(part) + parts = tuple(parts) + + return epoch, parts + + +# Deliberately not anchored to the start and end of the string, to make it +# easier for 3rd party code to reuse +VERSION_PATTERN = r""" + v? + (?: + (?:(?P[0-9]+)!)? # epoch + (?P[0-9]+(?:\.[0-9]+)*) # release segment + (?P
                                          # pre-release
+            [-_\.]?
+            (?P(a|b|c|rc|alpha|beta|pre|preview))
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+        (?P                                         # post release
+            (?:-(?P[0-9]+))
+            |
+            (?:
+                [-_\.]?
+                (?Ppost|rev|r)
+                [-_\.]?
+                (?P[0-9]+)?
+            )
+        )?
+        (?P                                          # dev release
+            [-_\.]?
+            (?Pdev)
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+    )
+    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+"""
+
+
+class Version(_BaseVersion):
+
+    _regex = re.compile(
+        r"^\s*" + VERSION_PATTERN + r"\s*$",
+        re.VERBOSE | re.IGNORECASE,
+    )
+
+    def __init__(self, version):
+        # Validate the version and parse it into pieces
+        match = self._regex.search(version)
+        if not match:
+            raise InvalidVersion("Invalid version: '{0}'".format(version))
+
+        # Store the parsed out pieces of the version
+        self._version = _Version(
+            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+            release=tuple(int(i) for i in match.group("release").split(".")),
+            pre=_parse_letter_version(
+                match.group("pre_l"),
+                match.group("pre_n"),
+            ),
+            post=_parse_letter_version(
+                match.group("post_l"),
+                match.group("post_n1") or match.group("post_n2"),
+            ),
+            dev=_parse_letter_version(
+                match.group("dev_l"),
+                match.group("dev_n"),
+            ),
+            local=_parse_local_version(match.group("local")),
+        )
+
+        # Generate a key which will be used for sorting
+        self._key = _cmpkey(
+            self._version.epoch,
+            self._version.release,
+            self._version.pre,
+            self._version.post,
+            self._version.dev,
+            self._version.local,
+        )
+
+    def __repr__(self):
+        return "".format(repr(str(self)))
+
+    def __str__(self):
+        parts = []
+
+        # Epoch
+        if self._version.epoch != 0:
+            parts.append("{0}!".format(self._version.epoch))
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self._version.release))
+
+        # Pre-release
+        if self._version.pre is not None:
+            parts.append("-" + "".join(str(x) for x in self._version.pre))
+
+        # Post-release
+        if self._version.post is not None:
+            parts.append(".post{0}".format(self._version.post[1]))
+
+        # Development release
+        if self._version.dev is not None:
+            parts.append(".dev{0}".format(self._version.dev[1]))
+
+        # Local version segment
+        if self._version.local is not None:
+            parts.append(
+                "+{0}".format(".".join(str(x) for x in self._version.local))
+            )
+
+        return "".join(parts)
+
+    @property
+    def public(self):
+        return str(self).split("+", 1)[0]
+
+    @property
+    def base_version(self):
+        parts = []
+
+        # Epoch
+        if self._version.epoch != 0:
+            parts.append("{0}!".format(self._version.epoch))
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self._version.release))
+
+        return "".join(parts)
+
+    @property
+    def local(self):
+        version_string = str(self)
+        if "+" in version_string:
+            return version_string.split("+", 1)[1]
+
+    @property
+    def is_prerelease(self):
+        return bool(self._version.dev or self._version.pre)
+
+    @property
+    def is_postrelease(self):
+        return bool(self._version.post)
+
+
+def _parse_letter_version(letter, number):
+    if letter:
+        # We consider there to be an implicit 0 in a pre-release if there is
+        # not a numeral associated with it.
+        if number is None:
+            number = 0
+
+        # We normalize any letters to their lower case form
+        letter = letter.lower()
+
+        # We consider some words to be alternate spellings of other words and
+        # in those cases we want to normalize the spellings to our preferred
+        # spelling.
+        if letter == "alpha":
+            letter = "a"
+        elif letter == "beta":
+            letter = "b"
+        elif letter in ["c", "pre", "preview"]:
+            letter = "rc"
+
+        return letter, int(number)
+    if not letter and number:
+        # We assume if we are given a number, but we are not given a letter
+        # then this is using the implicit post release syntax (e.g. 1.0-1)
+        letter = "post"
+
+        return letter, int(number)
+
+
+_local_version_separators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local):
+    """
+    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+    """
+    if local is not None:
+        return tuple(
+            part.lower() if not part.isdigit() else int(part)
+            for part in _local_version_separators.split(local)
+        )
+
+
+def _cmpkey(epoch, release, pre, post, dev, local):
+    # When we compare a release version, we want to compare it with all of the
+    # trailing zeros removed. So we'll use a reverse the list, drop all the now
+    # leading zeros until we come to something non zero, then take the rest
+    # re-reverse it back into the correct order and make it a tuple and use
+    # that for our sorting key.
+    release = tuple(
+        reversed(list(
+            itertools.dropwhile(
+                lambda x: x == 0,
+                reversed(release),
+            )
+        ))
+    )
+
+    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+    # We'll do this by abusing the pre segment, but we _only_ want to do this
+    # if there is not a pre or a post segment. If we have one of those then
+    # the normal sorting rules will handle this case correctly.
+    if pre is None and post is None and dev is not None:
+        pre = -Infinity
+    # Versions without a pre-release (except as noted above) should sort after
+    # those with one.
+    elif pre is None:
+        pre = Infinity
+
+    # Versions without a post segment should sort before those with one.
+    if post is None:
+        post = -Infinity
+
+    # Versions without a development segment should sort after those with one.
+    if dev is None:
+        dev = Infinity
+
+    if local is None:
+        # Versions without a local segment should sort before those with one.
+        local = -Infinity
+    else:
+        # Versions with a local segment need that segment parsed to implement
+        # the sorting rules in PEP440.
+        # - Alpha numeric segments sort before numeric segments
+        # - Alpha numeric segments sort lexicographically
+        # - Numeric segments sort numerically
+        # - Shorter versions sort before longer versions when the prefixes
+        #   match exactly
+        local = tuple(
+            (i, "") if isinstance(i, int) else (-Infinity, i)
+            for i in local
+        )
+
+    return epoch, release, pre, post, dev, local
diff --git a/venv/lib/python3.8/site-packages/buildozer/scripts/__init__.py b/venv/lib/python3.8/site-packages/buildozer/scripts/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/venv/lib/python3.8/site-packages/buildozer/scripts/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/buildozer/scripts/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000..d9164b8
Binary files /dev/null and b/venv/lib/python3.8/site-packages/buildozer/scripts/__pycache__/__init__.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/buildozer/scripts/__pycache__/client.cpython-38.pyc b/venv/lib/python3.8/site-packages/buildozer/scripts/__pycache__/client.cpython-38.pyc
new file mode 100644
index 0000000..a969465
Binary files /dev/null and b/venv/lib/python3.8/site-packages/buildozer/scripts/__pycache__/client.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/buildozer/scripts/__pycache__/remote.cpython-38.pyc b/venv/lib/python3.8/site-packages/buildozer/scripts/__pycache__/remote.cpython-38.pyc
new file mode 100644
index 0000000..b448cad
Binary files /dev/null and b/venv/lib/python3.8/site-packages/buildozer/scripts/__pycache__/remote.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/buildozer/scripts/client.py b/venv/lib/python3.8/site-packages/buildozer/scripts/client.py
new file mode 100644
index 0000000..01200c8
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/buildozer/scripts/client.py
@@ -0,0 +1,24 @@
+'''
+Main Buildozer client
+=====================
+
+'''
+
+import sys
+from buildozer import Buildozer, BuildozerCommandException, BuildozerException
+
+
+def main():
+    try:
+        Buildozer().run_command(sys.argv[1:])
+    except BuildozerCommandException:
+        # don't show the exception in the command line. The log already show
+        # the command failed.
+        sys.exit(1)
+    except BuildozerException as error:
+        Buildozer().error('%s' % error)
+        sys.exit(1)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/venv/lib/python3.8/site-packages/buildozer/scripts/remote.py b/venv/lib/python3.8/site-packages/buildozer/scripts/remote.py
new file mode 100644
index 0000000..fd6eee2
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/buildozer/scripts/remote.py
@@ -0,0 +1,278 @@
+'''
+Buildozer remote
+================
+
+.. warning::
+
+    This is an experimental tool and not widely used. It might not fit for you.
+
+Pack and send the source code to a remote SSH server, bundle buildozer with it,
+and start the build on the remote.
+You need paramiko to make it work.
+'''
+
+__all__ = ["BuildozerRemote"]
+
+import socket
+import sys
+from buildozer import (
+    Buildozer, BuildozerCommandException, BuildozerException, __version__)
+from sys import stdout, stdin, exit
+from select import select
+from os.path import join, expanduser, realpath, exists, splitext
+from os import makedirs, walk, getcwd
+from configparser import ConfigParser
+try:
+    import termios
+    has_termios = True
+except ImportError:
+    has_termios = False
+try:
+    import paramiko
+except ImportError:
+    print('Paramiko missing: pip install paramiko')
+
+
+class BuildozerRemote(Buildozer):
+    def run_command(self, args):
+        while args:
+            if not args[0].startswith('-'):
+                break
+            arg = args.pop(0)
+
+            if arg in ('-v', '--verbose'):
+                self.log_level = 2
+
+            elif arg in ('-p', '--profile'):
+                self.config_profile = args.pop(0)
+
+            elif arg in ('-h', '--help'):
+                self.usage()
+                exit(0)
+
+            elif arg == '--version':
+                print('Buildozer (remote) {0}'.format(__version__))
+                exit(0)
+
+        self._merge_config_profile()
+
+        if len(args) < 2:
+            self.usage()
+            return
+
+        remote_name = args[0]
+        remote_section = 'remote:{}'.format(remote_name)
+        if not self.config.has_section(remote_section):
+            self.error('Unknown remote "{}", must be configured first.'.format(
+                remote_name))
+            return
+
+        self.remote_host = remote_host = self.config.get(
+                remote_section, 'host', '')
+        self.remote_port = self.config.get(
+                remote_section, 'port', '22')
+        self.remote_user = remote_user = self.config.get(
+                remote_section, 'user', '')
+        self.remote_build_dir = remote_build_dir = self.config.get(
+                remote_section, 'build_directory', '')
+        self.remote_identity = self.config.get(
+                remote_section, 'identity', '')
+        if not remote_host:
+            self.error('Missing "host = " for {}'.format(remote_section))
+            return
+        if not remote_user:
+            self.error('Missing "user = " for {}'.format(remote_section))
+            return
+        if not remote_build_dir:
+            self.error('Missing "build_directory = " for {}'.format(remote_section))
+            return
+
+        # fake the target
+        self.targetname = 'remote'
+        self.check_build_layout()
+
+        # prepare our source code
+        self.info('Prepare source code to sync')
+        self._copy_application_sources()
+        self._ssh_connect()
+        try:
+            self._ensure_buildozer()
+            self._sync_application_sources()
+            self._do_remote_commands(args[1:])
+            self._ssh_sync(getcwd(), mode='get')
+        finally:
+            self._ssh_close()
+
+    def _ssh_connect(self):
+        self.info('Connecting to {}'.format(self.remote_host))
+        self._ssh_client = client = paramiko.SSHClient()
+        client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
+        client.load_system_host_keys()
+        kwargs = {}
+        if self.remote_identity:
+            kwargs['key_filename'] = expanduser(self.remote_identity)
+        client.connect(self.remote_host, username=self.remote_user,
+                port=int(self.remote_port), **kwargs)
+        self._sftp_client = client.open_sftp()
+
+    def _ssh_close(self):
+        self.debug('Closing remote connection')
+        self._sftp_client.close()
+        self._ssh_client.close()
+
+    def _ensure_buildozer(self):
+        s = self._sftp_client
+        root_dir = s.normalize('.')
+        self.remote_build_dir = join(root_dir, self.remote_build_dir,
+                self.package_full_name)
+        self.debug('Remote build directory: {}'.format(self.remote_build_dir))
+        self._ssh_mkdir(self.remote_build_dir)
+        self._ssh_sync(__path__[0])  # noqa: F821 undefined name
+
+    def _sync_application_sources(self):
+        self.info('Synchronize application sources')
+        self._ssh_sync(self.app_dir)
+
+        # create custom buildozer.spec
+        self.info('Create custom buildozer.spec')
+        config = ConfigParser()
+        config.read('buildozer.spec')
+        config.set('app', 'source.dir', 'app')
+
+        fn = join(self.remote_build_dir, 'buildozer.spec')
+        fd = self._sftp_client.open(fn, 'wb')
+        config.write(fd)
+        fd.close()
+
+    def _do_remote_commands(self, args):
+        self.info('Execute remote buildozer')
+        cmd = (
+            'source ~/.profile;'
+            'cd {0};'
+            'env PYTHONPATH={0}:$PYTHONPATH '
+            'python -c "import buildozer, sys;'
+            'buildozer.Buildozer().run_command(sys.argv[1:])" {1} {2} 2>&1').format(
+            self.remote_build_dir,
+            '--verbose' if self.log_level == 2 else '',
+            ' '.join(args),
+        )
+        self._ssh_command(cmd)
+
+    def _ssh_mkdir(self, *args):
+        directory = join(*args)
+        self.debug('Create remote directory {}'.format(directory))
+        try:
+            self._sftp_client.mkdir(directory)
+        except IOError:
+            # already created?
+            try:
+                self._sftp_client.stat(directory)
+            except IOError:
+                self.error('Unable to create remote directory {}'.format(directory))
+                raise
+
+    def _ssh_sync(self, directory, mode='put'):
+        self.debug('Syncing {} directory'.format(directory))
+        directory = realpath(expanduser(directory))
+        base_strip = directory.rfind('/')
+        if mode == 'get':
+            local_dir = join(directory, 'bin')
+            remote_dir = join(self.remote_build_dir, 'bin')
+            if not exists(local_dir):
+                makedirs(local_dir)
+            for _file in self._sftp_client.listdir(path=remote_dir):
+                self._sftp_client.get(join(remote_dir, _file),
+                                      join(local_dir, _file))
+            return
+        for root, dirs, files in walk(directory):
+            self._ssh_mkdir(self.remote_build_dir, root[base_strip + 1:])
+            for fn in files:
+                if splitext(fn)[1] in ('.pyo', '.pyc', '.swp'):
+                    continue
+                local_file = join(root, fn)
+                remote_file = join(self.remote_build_dir, root[base_strip + 1:], fn)
+                self.debug('Sync {} -> {}'.format(local_file, remote_file))
+                self._sftp_client.put(local_file, remote_file)
+
+    def _ssh_command(self, command):
+        self.debug('Execute remote command {}'.format(command))
+        transport = self._ssh_client.get_transport()
+        channel = transport.open_session()
+        try:
+            channel.exec_command(command)
+            self._interactive_shell(channel)
+        finally:
+            channel.close()
+
+    def _interactive_shell(self, chan):
+        if has_termios:
+            self._posix_shell(chan)
+        else:
+            self._windows_shell(chan)
+
+    def _posix_shell(self, chan):
+        oldtty = termios.tcgetattr(stdin)
+        try:
+            chan.settimeout(0.0)
+
+            while True:
+                r, w, e = select([chan, stdin], [], [])
+                if chan in r:
+                    try:
+                        x = chan.recv(128)
+                        if len(x) == 0:
+                            print('\r\n*** EOF\r\n',)
+                            break
+                        stdout.write(x)
+                        stdout.flush()
+                    except socket.timeout:
+                        pass
+                if stdin in r:
+                    x = stdin.read(1)
+                    if len(x) == 0:
+                        break
+                    chan.sendall(x)
+        finally:
+            termios.tcsetattr(stdin, termios.TCSADRAIN, oldtty)
+
+    # thanks to Mike Looijmans for this code
+    def _windows_shell(self, chan):
+        import threading
+
+        stdout.write("Line-buffered terminal emulation. Press F6 or ^Z to send EOF.\r\n\r\n")
+
+        def writeall(sock):
+            while True:
+                data = sock.recv(256)
+                if not data:
+                    stdout.write('\r\n*** EOF ***\r\n\r\n')
+                    stdout.flush()
+                    break
+                stdout.write(data)
+                stdout.flush()
+
+        writer = threading.Thread(target=writeall, args=(chan,))
+        writer.start()
+
+        try:
+            while True:
+                d = stdin.read(1)
+                if not d:
+                    break
+                chan.send(d)
+        except EOFError:
+            # user hit ^Z or F6
+            pass
+
+
+def main():
+    try:
+        BuildozerRemote().run_command(sys.argv[1:])
+    except BuildozerCommandException:
+        pass
+    except BuildozerException as error:
+        Buildozer().error('%s' % error)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/venv/lib/python3.8/site-packages/buildozer/sitecustomize.py b/venv/lib/python3.8/site-packages/buildozer/sitecustomize.py
new file mode 100644
index 0000000..6650864
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/buildozer/sitecustomize.py
@@ -0,0 +1,3 @@
+from os.path import join, dirname
+import sys
+sys.path.append(join(dirname(__file__), '_applibs'))
diff --git a/venv/lib/python3.8/site-packages/buildozer/target.py b/venv/lib/python3.8/site-packages/buildozer/target.py
new file mode 100644
index 0000000..a70b60f
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/buildozer/target.py
@@ -0,0 +1,266 @@
+from sys import exit
+import os
+from os.path import join
+
+
+def no_config(f):
+    f.__no_config = True
+    return f
+
+
+class Target:
+    def __init__(self, buildozer):
+        self.buildozer = buildozer
+        self.build_mode = 'debug'
+        self.artifact_format = 'apk'
+        self.platform_update = False
+
+    def check_requirements(self):
+        pass
+
+    def check_configuration_tokens(self, errors=None):
+        if errors:
+            self.buildozer.info('Check target configuration tokens')
+            self.buildozer.error(
+                '{0} error(s) found in the buildozer.spec'.format(
+                len(errors)))
+            for error in errors:
+                print(error)
+            exit(1)
+
+    def compile_platform(self):
+        pass
+
+    def install_platform(self):
+        pass
+
+    def get_custom_commands(self):
+        result = []
+        for x in dir(self):
+            if not x.startswith('cmd_'):
+                continue
+            if x[4:] in self.buildozer.standard_cmds:
+                continue
+            result.append((x[4:], getattr(self, x).__doc__))
+        return result
+
+    def get_available_packages(self):
+        return ['kivy']
+
+    def run_commands(self, args):
+        if not args:
+            self.buildozer.error('Missing target command')
+            self.buildozer.usage()
+            exit(1)
+
+        result = []
+        last_command = []
+        while args:
+            arg = args.pop(0)
+            if arg == '--':
+                if last_command:
+                    last_command += args
+                    break
+            elif not arg.startswith('--'):
+                if last_command:
+                    result.append(last_command)
+                    last_command = []
+                last_command.append(arg)
+            else:
+                if not last_command:
+                    self.buildozer.error('Argument passed without a command')
+                    self.buildozer.usage()
+                    exit(1)
+                last_command.append(arg)
+        if last_command:
+            result.append(last_command)
+
+        config_check = False
+
+        for item in result:
+            command, args = item[0], item[1:]
+            if not hasattr(self, 'cmd_{0}'.format(command)):
+                self.buildozer.error('Unknown command {0}'.format(command))
+                exit(1)
+
+            func = getattr(self, 'cmd_{0}'.format(command))
+
+            need_config_check = not hasattr(func, '__no_config')
+            if need_config_check and not config_check:
+                config_check = True
+                self.check_configuration_tokens()
+
+            func(args)
+
+    def cmd_clean(self, *args):
+        self.buildozer.clean_platform()
+
+    def cmd_update(self, *args):
+        self.platform_update = True
+        self.buildozer.prepare_for_build()
+
+    def cmd_debug(self, *args):
+        self.buildozer.prepare_for_build()
+        self.build_mode = 'debug'
+        self.artifact_format = self.buildozer.config.getdefault('app', 'android.debug_artifact', 'apk')
+        self.buildozer.build()
+
+    def cmd_release(self, *args):
+        error = self.buildozer.error
+        self.buildozer.prepare_for_build()
+        if self.buildozer.config.get("app", "package.domain") == "org.test":
+            error("")
+            error("ERROR: Trying to release a package that starts with org.test")
+            error("")
+            error("The package.domain org.test is, as the name intented, a test.")
+            error("Once you published an application with org.test,")
+            error("you cannot change it, it will be part of the identifier")
+            error("for Google Play / App Store / etc.")
+            error("")
+            error("So change package.domain to anything else.")
+            error("")
+            error("If you messed up before, set the environment variable to force the build:")
+            error("export BUILDOZER_ALLOW_ORG_TEST_DOMAIN=1")
+            error("")
+            if "BUILDOZER_ALLOW_ORG_TEST_DOMAIN" not in os.environ:
+                exit(1)
+
+        if self.buildozer.config.get("app", "package.domain") == "org.kivy":
+            error("")
+            error("ERROR: Trying to release a package that starts with org.kivy")
+            error("")
+            error("The package.domain org.kivy is reserved for the Kivy official")
+            error("applications. Please use your own domain.")
+            error("")
+            error("If you are a Kivy developer, add an export in your shell")
+            error("export BUILDOZER_ALLOW_KIVY_ORG_DOMAIN=1")
+            error("")
+            if "BUILDOZER_ALLOW_KIVY_ORG_DOMAIN" not in os.environ:
+                exit(1)
+
+        self.build_mode = 'release'
+        self.artifact_format = self.buildozer.config.getdefault('app', 'android.release_artifact', 'aab')
+        self.buildozer.build()
+
+    def cmd_deploy(self, *args):
+        self.buildozer.prepare_for_build()
+
+    def cmd_run(self, *args):
+        self.buildozer.prepare_for_build()
+
+    def cmd_serve(self, *args):
+        self.buildozer.cmd_serve()
+
+    def path_or_git_url(self, repo, owner='kivy', branch='master',
+                        url_format='https://github.com/{owner}/{repo}.git',
+                        platform=None,
+                        squash_hyphen=True):
+        """Get source location for a git checkout
+
+        This method will check the `buildozer.spec` for the keys:
+            {repo}_dir
+            {repo}_url
+            {repo}_branch
+
+        and use them to determine the source location for a git checkout.
+
+        If a `platform` is specified, {platform}.{repo} will be used
+        as the base for the buildozer key
+
+        `{repo}_dir` specifies a custom checkout location
+        (relative to `buildozer.root_dir`). If present, `path` will be
+        set to this value and `url`, `branch` will be set to None,
+        None. Otherwise, `{repo}_url` and `{repo}_branch` will be
+        examined.
+
+        If no keys are present, the kwargs will be used to create
+        a sensible default URL and branch.
+
+        :Parameters:
+            `repo`: str (required)
+                name of repository to fetch. Used both for buildozer
+                keys ({platform}.{repo}_dir|_url|_branch) and in building
+                default git URL
+            `branch`: str (default 'master')
+                Specific branch to retrieve if none specified in
+                buildozer.spec.
+            `owner`: str
+                owner of repo.
+            `platform`: str or None
+                platform prefix to use when retrieving `buildozer.spec`
+                keys. If specified, key names will be {platform}.{repo}
+                instead of just {repo}
+            `squash_hyphen`: boolean
+                if True, change '-' to '_' when looking for
+                keys in buildozer.spec. This lets us keep backwards
+                compatibility with old buildozer.spec files
+            `url_format`: format string
+                Used to construct default git URL.
+                can use {repo} {owner} and {branch} if needed.
+
+        :Returns:
+            A Tuple (path, url, branch) where
+                `path`
+                    Path to a custom git checkout. If specified,
+                    both `url` and `branch` will be None
+                `url`
+                    URL of git repository from where code should be
+                    checked-out
+                `branch`
+                    branch name (or tag) that should be used for the
+                    check-out.
+
+        """
+        if squash_hyphen:
+            key = repo.replace('-', '_')
+        else:
+            key = repo
+        if platform:
+            key = "{}.{}".format(platform, key)
+        config = self.buildozer.config
+        path = config.getdefault('app', '{}_dir'.format(key), None)
+
+        if path is not None:
+            path = join(self.buildozer.root_dir, path)
+            url = None
+            branch = None
+        else:
+            branch = config.getdefault('app', '{}_branch'.format(key), branch)
+            default_url = url_format.format(owner=owner, repo=repo, branch=branch)
+            url = config.getdefault('app', '{}_url'.format(key), default_url)
+            if branch != 'master':
+                url = "--branch {} {}".format(branch, url)
+        return path, url, branch
+
+    def install_or_update_repo(self, repo, **kwargs):
+        """Install or update a git repository into the platform directory.
+
+        This will clone the contents of a git repository to
+        `buildozer.platform_dir`. The location of this repo can be
+        specified via URL and branch name, or via a custom (local)
+        directory name.
+
+        :Parameters:
+            **kwargs:
+                Any valid arguments for :meth:`path_or_git_url`
+
+        :Returns:
+            fully qualified path to updated git repo
+        """
+        cmd = self.buildozer.cmd
+        install_dir = join(self.buildozer.platform_dir, repo)
+        custom_dir, clone_url, clone_branch = self.path_or_git_url(repo, **kwargs)
+        if not self.buildozer.file_exists(install_dir):
+            if custom_dir:
+                cmd('mkdir -p "{}"'.format(install_dir))
+                cmd('cp -a "{}"/* "{}"/'.format(custom_dir, install_dir))
+            else:
+                cmd('git clone {}'.format(clone_url),
+                        cwd=self.buildozer.platform_dir)
+        elif self.platform_update:
+            if custom_dir:
+                cmd('cp -a "{}"/* "{}"/'.format(custom_dir, install_dir))
+            else:
+                cmd('git clean -dxf', cwd=install_dir)
+                cmd('git pull origin {}'.format(clone_branch), cwd=install_dir)
+        return install_dir
diff --git a/venv/lib/python3.8/site-packages/buildozer/targets/__init__.py b/venv/lib/python3.8/site-packages/buildozer/targets/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/venv/lib/python3.8/site-packages/buildozer/targets/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/buildozer/targets/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000..19e6a38
Binary files /dev/null and b/venv/lib/python3.8/site-packages/buildozer/targets/__pycache__/__init__.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/buildozer/targets/__pycache__/android.cpython-38.pyc b/venv/lib/python3.8/site-packages/buildozer/targets/__pycache__/android.cpython-38.pyc
new file mode 100644
index 0000000..2441c18
Binary files /dev/null and b/venv/lib/python3.8/site-packages/buildozer/targets/__pycache__/android.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/buildozer/targets/__pycache__/ios.cpython-38.pyc b/venv/lib/python3.8/site-packages/buildozer/targets/__pycache__/ios.cpython-38.pyc
new file mode 100644
index 0000000..897ac7a
Binary files /dev/null and b/venv/lib/python3.8/site-packages/buildozer/targets/__pycache__/ios.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/buildozer/targets/__pycache__/osx.cpython-38.pyc b/venv/lib/python3.8/site-packages/buildozer/targets/__pycache__/osx.cpython-38.pyc
new file mode 100644
index 0000000..8144c36
Binary files /dev/null and b/venv/lib/python3.8/site-packages/buildozer/targets/__pycache__/osx.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/buildozer/targets/android.py b/venv/lib/python3.8/site-packages/buildozer/targets/android.py
new file mode 100644
index 0000000..31d5ad4
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/buildozer/targets/android.py
@@ -0,0 +1,1517 @@
+'''
+Android target, based on python-for-android project
+'''
+
+import sys
+if sys.platform == 'win32':
+    raise NotImplementedError('Windows platform not yet working for Android')
+
+from platform import uname
+WSL = 'microsoft' in uname()[2].lower()
+
+ANDROID_API = '27'
+ANDROID_MINAPI = '21'
+APACHE_ANT_VERSION = '1.9.4'
+
+# This constant should *not* be updated, it is used only in the case
+# that python-for-android cannot provide a recommendation, which in
+# turn only happens if the python-for-android is old and probably
+# doesn't support any newer NDK.
+DEFAULT_ANDROID_NDK_VERSION = '17c'
+
+import traceback
+import os
+import io
+import re
+import ast
+from pipes import quote
+from sys import platform, executable
+from buildozer import BuildozerException, USE_COLOR
+from buildozer.target import Target
+from os import environ
+from os.path import exists, join, realpath, expanduser, basename, relpath
+from platform import architecture
+from shutil import copyfile, rmtree
+from glob import glob
+from time import sleep
+
+from buildozer.libs.version import parse
+from distutils.version import LooseVersion
+
+# buildozer.spec tokens that used to exist but are now ignored
+DEPRECATED_TOKENS = (('app', 'android.sdk'), )
+
+# Default SDK tag to download. This is not a configurable option
+# because it doesn't seem to matter much, it is normally correct to
+# download once then update all the components as buildozer already
+# does.
+DEFAULT_SDK_TAG = '6514223'
+
+DEFAULT_ARCHS = ['arm64-v8a', 'armeabi-v7a']
+
+MSG_P4A_RECOMMENDED_NDK_ERROR = (
+    "WARNING: Unable to find recommended Android NDK for current "
+    "installation of python-for-android, defaulting to the default "
+    "version r{android_ndk}".format(android_ndk=DEFAULT_ANDROID_NDK_VERSION)
+)
+
+
+class TargetAndroid(Target):
+    targetname = 'android'
+    p4a_directory_name = "python-for-android"
+    p4a_fork = 'kivy'
+    p4a_branch = 'master'
+    p4a_commit = 'HEAD'
+    p4a_recommended_ndk_version = None
+    extra_p4a_args = ''
+
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        if self.buildozer.config.has_option(
+            "app", "android.arch"
+        ) and not self.buildozer.config.has_option("app", "android.archs"):
+            self.buildozer.error("`android.archs` not detected, instead `android.arch` is present.")
+            self.buildozer.error("`android.arch` will be removed and ignored in future.")
+            self.buildozer.error("If you're seeing this error, please migrate to `android.archs`.")
+            self._archs = self.buildozer.config.getlist(
+                'app', 'android.arch', DEFAULT_ARCHS)
+        else:
+            self._archs = self.buildozer.config.getlist(
+                'app', 'android.archs', DEFAULT_ARCHS)
+        self._build_dir = join(
+            self.buildozer.platform_dir, 'build-{}'.format(self.archs_snake))
+        executable = sys.executable or 'python'
+        self._p4a_cmd = '{} -m pythonforandroid.toolchain '.format(executable)
+        self._p4a_bootstrap = self.buildozer.config.getdefault(
+            'app', 'p4a.bootstrap', 'sdl2')
+        color = 'always' if USE_COLOR else 'never'
+        self.extra_p4a_args = ' --color={} --storage-dir="{}"'.format(
+            color, self._build_dir)
+
+        # minapi should match ndk-api, so can use the same default if
+        # nothing is specified
+        ndk_api = self.buildozer.config.getdefault(
+            'app', 'android.ndk_api', self.android_minapi)
+        self.extra_p4a_args += ' --ndk-api={}'.format(ndk_api)
+
+        hook = self.buildozer.config.getdefault("app", "p4a.hook", None)
+        if hook is not None:
+            self.extra_p4a_args += ' --hook={}'.format(realpath(expanduser(hook)))
+        port = self.buildozer.config.getdefault('app', 'p4a.port', None)
+        if port is not None:
+            self.extra_p4a_args += ' --port={}'.format(port)
+
+        setup_py = self.buildozer.config.getdefault('app', 'p4a.setup_py', False)
+        if setup_py:
+            self.extra_p4a_args += ' --use-setup-py'
+        else:
+            self.extra_p4a_args += ' --ignore-setup-py'
+
+        activity_class_name = self.buildozer.config.getdefault(
+            'app', 'android.activity_class_name', 'org.kivy.android.PythonActivity')
+        if activity_class_name != 'org.kivy.android.PythonActivity':
+            self.extra_p4a_args += ' --activity-class-name={}'.format(activity_class_name)
+
+        if self.buildozer.log_level >= 2:
+            self.extra_p4a_args += ' --debug'
+
+        user_extra_p4a_args = self.buildozer.config.getdefault('app', 'p4a.extra_args',
+                                                               None)
+        if user_extra_p4a_args:
+            self.extra_p4a_args += ' ' + user_extra_p4a_args
+
+        self.warn_on_deprecated_tokens()
+
+    def warn_on_deprecated_tokens(self):
+        for section, token in DEPRECATED_TOKENS:
+            value = self.buildozer.config.getdefault(section, token, None)
+            if value is not None:
+                error = ('WARNING: Config token {} {} is deprecated and ignored, '
+                         'but you set value {}').format(section, token, value)
+                self.buildozer.error(error)
+
+    def _p4a(self, cmd, **kwargs):
+        kwargs.setdefault('cwd', self.p4a_dir)
+        return self.buildozer.cmd(self._p4a_cmd + cmd + self.extra_p4a_args, **kwargs)
+
+    @property
+    def p4a_dir(self):
+        """The directory where python-for-android is/will be installed."""
+
+        # Default p4a dir
+        p4a_dir = join(self.buildozer.platform_dir, self.p4a_directory_name)
+
+        # Possibly overridden by user setting
+        system_p4a_dir = self.buildozer.config.getdefault('app', 'p4a.source_dir')
+        if system_p4a_dir:
+            p4a_dir = expanduser(system_p4a_dir)
+
+        return p4a_dir
+
+    @property
+    def p4a_recommended_android_ndk(self):
+        """
+        Return the p4a's recommended android's NDK version, depending on the
+        p4a version used for our buildozer build. In case that we don't find
+        it, we will return the buildozer's recommended one, defined by global
+        variable `DEFAULT_ANDROID_NDK_VERSION`.
+        """
+        # make sure to read p4a version only the first time
+        if self.p4a_recommended_ndk_version is not None:
+            return self.p4a_recommended_ndk_version
+
+        # check p4a's recommendation file, and in case that exists find the
+        # recommended android's NDK version, otherwise return buildozer's one
+        ndk_version = DEFAULT_ANDROID_NDK_VERSION
+        rec_file = join(self.p4a_dir, "pythonforandroid", "recommendations.py")
+        if not os.path.isfile(rec_file):
+            self.buildozer.error(MSG_P4A_RECOMMENDED_NDK_ERROR)
+            return ndk_version
+
+        for line in open(rec_file, "r"):
+            if line.startswith("RECOMMENDED_NDK_VERSION ="):
+                ndk_version = line.replace(
+                    "RECOMMENDED_NDK_VERSION =", "")
+                # clean version of unwanted characters
+                for i in {"'", '"', "\n", " "}:
+                    ndk_version = ndk_version.replace(i, "")
+                self.buildozer.info(
+                    "Recommended android's NDK version by p4a is: {}".format(
+                        ndk_version
+                    )
+                )
+                self.p4a_recommended_ndk_version = ndk_version
+                break
+        return ndk_version
+
+    def _sdkmanager(self, *args, **kwargs):
+        """Call the sdkmanager in our Android SDK with the given arguments."""
+        # Use the android-sdk dir as cwd by default
+        android_sdk_dir = self.android_sdk_dir
+        kwargs['cwd'] = kwargs.get('cwd', android_sdk_dir)
+        sdkmanager_path = self.sdkmanager_path
+        sdk_root = f"--sdk_root={android_sdk_dir}"
+        command = f"{sdkmanager_path} {sdk_root} " + ' '.join(args)
+        return_child = kwargs.pop('return_child', False)
+        if return_child:
+            return self.buildozer.cmd_expect(command, **kwargs)
+        else:
+            kwargs['get_stdout'] = kwargs.get('get_stdout', True)
+            return self.buildozer.cmd(command, **kwargs)
+
+    @property
+    def android_ndk_version(self):
+        return self.buildozer.config.getdefault('app', 'android.ndk',
+                                                self.p4a_recommended_android_ndk)
+
+    @property
+    def android_api(self):
+        return self.buildozer.config.getdefault('app', 'android.api',
+                                                ANDROID_API)
+
+    @property
+    def android_minapi(self):
+        return self.buildozer.config.getdefault('app', 'android.minapi',
+                                                ANDROID_MINAPI)
+
+    @property
+    def android_sdk_dir(self):
+        directory = expanduser(self.buildozer.config.getdefault(
+            'app', 'android.sdk_path', ''))
+        if directory:
+            return realpath(directory)
+        return join(self.buildozer.global_platform_dir,
+                    'android-sdk')
+
+    @property
+    def android_ndk_dir(self):
+        directory = expanduser(self.buildozer.config.getdefault(
+            'app', 'android.ndk_path', ''))
+        if directory:
+            return realpath(directory)
+        version = self.buildozer.config.getdefault('app', 'android.ndk',
+                                                   self.android_ndk_version)
+        return join(self.buildozer.global_platform_dir,
+                    'android-ndk-r{0}'.format(version))
+
+    @property
+    def apache_ant_dir(self):
+        directory = expanduser(self.buildozer.config.getdefault(
+            'app', 'android.ant_path', ''))
+        if directory:
+            return realpath(directory)
+        version = self.buildozer.config.getdefault('app', 'android.ant',
+                                                   APACHE_ANT_VERSION)
+        return join(self.buildozer.global_platform_dir,
+                    'apache-ant-{0}'.format(version))
+
+    @property
+    def sdkmanager_path(self):
+        sdkmanager_path = join(
+            self.android_sdk_dir, 'tools', 'bin', 'sdkmanager')
+        if not os.path.isfile(sdkmanager_path):
+            raise BuildozerException(
+                ('sdkmanager path "{}" does not exist, sdkmanager is not'
+                 'installed'.format(sdkmanager_path)))
+        return sdkmanager_path
+
+    @property
+    def archs_snake(self):
+        return "_".join(self._archs)
+
+    def check_requirements(self):
+        if platform in ('win32', 'cygwin'):
+            try:
+                self._set_win32_java_home()
+            except:
+                traceback.print_exc()
+            self.adb_cmd = join(self.android_sdk_dir, 'platform-tools',
+                                'adb.exe')
+            self.javac_cmd = self._locate_java('javac.exe')
+            self.keytool_cmd = self._locate_java('keytool.exe')
+        # darwin, linux
+        else:
+            self.adb_cmd = join(self.android_sdk_dir, 'platform-tools', 'adb')
+            self.javac_cmd = self._locate_java('javac')
+            self.keytool_cmd = self._locate_java('keytool')
+
+            # Check for C header .
+            _, _, returncode_dpkg = self.buildozer.cmd('dpkg --version',
+                                                       break_on_error=False)
+            is_debian_like = (returncode_dpkg == 0)
+            if is_debian_like and \
+                    not self.buildozer.file_exists('/usr/include/zlib.h'):
+                raise BuildozerException(
+                    'zlib headers must be installed, '
+                    'run: sudo apt-get install zlib1g-dev')
+
+        # Adb arguments:
+        adb_args = self.buildozer.config.getdefault(
+            "app", "android.adb_args", None)
+        if adb_args is not None:
+            self.adb_cmd += ' ' + adb_args
+
+        # Need to add internally installed ant to path for external tools
+        # like adb to use
+        path = [join(self.apache_ant_dir, 'bin')]
+        if 'PATH' in self.buildozer.environ:
+            path.append(self.buildozer.environ['PATH'])
+        else:
+            path.append(os.environ['PATH'])
+        self.buildozer.environ['PATH'] = ':'.join(path)
+        checkbin = self.buildozer.checkbin
+        checkbin('Git (git)', 'git')
+        checkbin('Cython (cython)', 'cython')
+        checkbin('Java compiler (javac)', self.javac_cmd)
+        checkbin('Java keytool (keytool)', self.keytool_cmd)
+
+    def check_configuration_tokens(self):
+        errors = []
+
+        # check the permission
+        available_permissions = self._get_available_permissions()
+        if available_permissions:
+            permissions = self.buildozer.config.getlist(
+                'app', 'android.permissions', [])
+            for permission in permissions:
+                # no check on full named permission
+                # like com.google.android.providers.gsf.permission.READ_GSERVICES
+                if '.' in permission:
+                    continue
+                permission = permission.upper()
+                if permission not in available_permissions:
+                    errors.append(
+                        '[app] "android.permission" contain an unknown'
+                        ' permission {0}'.format(permission))
+
+        super().check_configuration_tokens(errors)
+
+    def _p4a_have_aab_support(self):
+        returncode = self._p4a("aab -h", break_on_error=False, show_output=False)[2]
+        if returncode == 0:
+            return True
+        else:
+            return False
+
+    def _get_available_permissions(self):
+        key = 'android:available_permissions'
+        key_sdk = 'android:available_permissions_sdk'
+
+        current_platform_tools = self._android_get_installed_platform_tools_version()
+
+        refresh_permissions = False
+        sdk = self.buildozer.state.get(key_sdk, None)
+        if not sdk or sdk != current_platform_tools:
+            refresh_permissions = True
+        if key not in self.buildozer.state:
+            refresh_permissions = True
+        if not refresh_permissions:
+            return self.buildozer.state[key]
+
+        try:
+            self.buildozer.debug(
+                'Read available permissions from api-versions.xml')
+            import xml.etree.ElementTree as ET
+            fn = join(self.android_sdk_dir, 'platform-tools', 'api',
+                      'api-versions.xml')
+            with io.open(fn, encoding='utf-8') as fd:
+                doc = ET.fromstring(fd.read())
+            fields = doc.findall(
+                './/class[@name="android/Manifest$permission"]/field[@name]')
+            available_permissions = [x.attrib['name'] for x in fields]
+
+            self.buildozer.state[key] = available_permissions
+            self.buildozer.state[key_sdk] = current_platform_tools
+            return available_permissions
+        except:
+            return None
+
+    def _set_win32_java_home(self):
+        if 'JAVA_HOME' in self.buildozer.environ:
+            return
+        import _winreg
+        with _winreg.OpenKey(
+                _winreg.HKEY_LOCAL_MACHINE,
+                r"SOFTWARE\JavaSoft\Java Development Kit") as jdk:  # @UndefinedVariable
+            current_version, _type = _winreg.QueryValueEx(
+                jdk, "CurrentVersion")  # @UndefinedVariable
+            with _winreg.OpenKey(jdk, current_version) as cv:  # @UndefinedVariable
+                java_home, _type = _winreg.QueryValueEx(
+                    cv, "JavaHome")  # @UndefinedVariable
+            self.buildozer.environ['JAVA_HOME'] = java_home
+
+    def _locate_java(self, s):
+        '''If JAVA_HOME is in the environ, return $JAVA_HOME/bin/s. Otherwise,
+        return s.
+        '''
+        if 'JAVA_HOME' in self.buildozer.environ:
+            return join(self.buildozer.environ['JAVA_HOME'], 'bin', s)
+        else:
+            return s
+
+    def _install_apache_ant(self):
+        ant_dir = self.apache_ant_dir
+        if self.buildozer.file_exists(ant_dir):
+            self.buildozer.info('Apache ANT found at {0}'.format(ant_dir))
+            return ant_dir
+
+        if not os.path.exists(ant_dir):
+            os.makedirs(ant_dir)
+
+        self.buildozer.info('Android ANT is missing, downloading')
+        archive = 'apache-ant-{0}-bin.tar.gz'.format(APACHE_ANT_VERSION)
+        url = 'https://archive.apache.org/dist/ant/binaries/'
+        self.buildozer.download(url,
+                                archive,
+                                cwd=ant_dir)
+        self.buildozer.file_extract(archive,
+                                    cwd=ant_dir)
+        self.buildozer.info('Apache ANT installation done.')
+        return ant_dir
+
+    def _install_android_sdk(self):
+        sdk_dir = self.android_sdk_dir
+        if self.buildozer.file_exists(sdk_dir):
+            self.buildozer.info('Android SDK found at {0}'.format(sdk_dir))
+            return sdk_dir
+
+        self.buildozer.info('Android SDK is missing, downloading')
+        if platform in ('win32', 'cygwin'):
+            archive = 'commandlinetools-win-{}_latest.zip'.format(DEFAULT_SDK_TAG)
+        elif platform in ('darwin', ):
+            archive = 'commandlinetools-mac-{}_latest.zip'.format(DEFAULT_SDK_TAG)
+        elif platform.startswith('linux'):
+            archive = 'commandlinetools-linux-{}_latest.zip'.format(DEFAULT_SDK_TAG)
+        else:
+            raise SystemError('Unsupported platform: {0}'.format(platform))
+
+        if not os.path.exists(sdk_dir):
+            os.makedirs(sdk_dir)
+
+        url = 'https://dl.google.com/android/repository/'
+        self.buildozer.download(url,
+                                archive,
+                                cwd=sdk_dir)
+
+        self.buildozer.info('Unpacking Android SDK')
+        self.buildozer.file_extract(archive,
+                                    cwd=sdk_dir)
+
+        self.buildozer.info('Android SDK tools base installation done.')
+
+        return sdk_dir
+
+    def _install_android_ndk(self):
+        ndk_dir = self.android_ndk_dir
+        if self.buildozer.file_exists(ndk_dir):
+            self.buildozer.info('Android NDK found at {0}'.format(ndk_dir))
+            return ndk_dir
+
+        import re
+        _version = int(re.search(r'(\d+)', self.android_ndk_version).group(1))
+
+        self.buildozer.info('Android NDK is missing, downloading')
+        # Welcome to the NDK URL hell!
+        # a list of all NDK URLs up to level 14 can be found here:
+        #  https://gist.github.com/roscopecoltran/43861414fbf341adac3b6fa05e7fad08
+        # it seems that from level 11 on the naming schema is consistent
+        # from 10e on the URLs can be looked up at
+        # https://developer.android.com/ndk/downloads/older_releases
+
+        is_darwin = platform == 'darwin'
+        is_linux = platform.startswith('linux')
+
+        if platform in ('win32', 'cygwin'):
+            # Checking of 32/64 bits at Windows from: https://stackoverflow.com/a/1405971/798575
+            import struct
+            archive = 'android-ndk-r{0}-windows-{1}.zip'
+            is_64 = (8 * struct.calcsize("P") == 64)
+        elif is_darwin or is_linux:
+            _platform = 'linux' if is_linux else 'darwin'
+            if self.android_ndk_version in ['10c', '10d', '10e']:
+                ext = 'bin'
+            elif _version <= 10:
+                ext = 'tar.bz2'
+            else:
+                ext = 'zip'
+            archive = 'android-ndk-r{0}-' + _platform + '{1}.' + ext
+            is_64 = (os.uname()[4] == 'x86_64')
+        else:
+            raise SystemError('Unsupported platform: {}'.format(platform))
+
+        architecture = 'x86_64' if is_64 else 'x86'
+        architecture = '' if _version >= 23 else f'-{architecture}'
+        unpacked = 'android-ndk-r{0}'
+        archive = archive.format(self.android_ndk_version, architecture)
+        unpacked = unpacked.format(self.android_ndk_version)
+
+        if _version >= 11:
+            url = 'https://dl.google.com/android/repository/'
+        else:
+            url = 'https://dl.google.com/android/ndk/'
+
+        self.buildozer.download(url,
+                                archive,
+                                cwd=self.buildozer.global_platform_dir)
+
+        self.buildozer.info('Unpacking Android NDK')
+        self.buildozer.file_extract(archive,
+                                    cwd=self.buildozer.global_platform_dir)
+        self.buildozer.file_rename(unpacked,
+                                   ndk_dir,
+                                   cwd=self.buildozer.global_platform_dir)
+        self.buildozer.info('Android NDK installation done.')
+        return ndk_dir
+
+    def _android_list_build_tools_versions(self):
+        available_packages = self._sdkmanager('--list')
+
+        lines = available_packages[0].split('\n')
+
+        build_tools_versions = []
+
+        for line in lines:
+            if not line.strip().startswith('build-tools;'):
+                continue
+            package_name = line.strip().split(' ')[0]
+            assert package_name.count(';') == 1, (
+                'could not parse package "{}"'.format(package_name))
+            version = package_name.split(';')[1]
+
+            build_tools_versions.append(parse(version))
+
+        return build_tools_versions
+
+    def _android_get_installed_platform_tools_version(self):
+        """
+        Crudely parse out the installed platform-tools version
+        """
+
+        platform_tools_dir = os.path.join(
+            self.android_sdk_dir,
+            'platform-tools')
+
+        if not os.path.exists(platform_tools_dir):
+            return None
+
+        data_file = os.path.join(platform_tools_dir, 'source.properties')
+        if not os.path.exists(data_file):
+            return None
+
+        with open(data_file, 'r') as fileh:
+            lines = fileh.readlines()
+
+        for line in lines:
+            if line.startswith('Pkg.Revision='):
+                break
+        else:
+            self.buildozer.error('Read {} but found no Pkg.Revision'.format(data_file))
+            # Don't actually exit, in case the build env is
+            # okay. Something else will fault if it's important.
+            return None
+
+        revision = line.split('=')[1].strip()
+
+        return revision
+
+    def _android_update_sdk(self, *sdkmanager_commands):
+        """Update the tools and package-tools if possible"""
+        auto_accept_license = self.buildozer.config.getbooldefault(
+            'app', 'android.accept_sdk_license', False)
+
+        kwargs = {}
+        if auto_accept_license:
+            # `SIGPIPE` is not being reported somehow, but `EPIPE` is.
+            # This leads to a stderr "Broken pipe" message which is harmless,
+            # but doesn't look good on terminal, hence redirecting to /dev/null
+            yes_command = 'yes 2>/dev/null'
+            android_sdk_dir = self.android_sdk_dir
+            sdkmanager_path = self.sdkmanager_path
+            sdk_root = f"--sdk_root={android_sdk_dir}"
+            command = f"{yes_command} | {sdkmanager_path} {sdk_root} --licenses"
+            self.buildozer.cmd(command, cwd=self.android_sdk_dir)
+        else:
+            kwargs['show_output'] = True
+
+        self._sdkmanager(*sdkmanager_commands, **kwargs)
+
+    def _read_version_subdir(self, *args):
+        versions = []
+        if not os.path.exists(join(*args)):
+            self.buildozer.debug('build-tools folder not found {}'.format(join(
+                *args)))
+            return parse("0")
+        for v in os.listdir(join(*args)):
+            try:
+                versions.append(parse(v))
+            except:
+                pass
+        if not versions:
+            self.buildozer.error(
+                'Unable to find the latest version for {}'.format(join(*args)))
+            return parse("0")
+        return max(versions)
+
+    def _find_latest_package(self, packages, key):
+        package_versions = []
+        for p in packages:
+            if not p.startswith(key):
+                continue
+            version_string = p.split(key)[-1]
+            version = parse(version_string)
+            package_versions.append(version)
+        if not package_versions:
+            return
+        return max(package_versions)
+
+    def _install_android_packages(self):
+
+        # if any of theses value change into the buildozer.spec, retry the
+        # update
+        cache_key = 'android:sdk_installation'
+        cache_value = [
+            self.android_api, self.android_minapi, self.android_ndk_version,
+            self.android_sdk_dir, self.android_ndk_dir
+        ]
+        if self.buildozer.state.get(cache_key, None) == cache_value:
+            return True
+
+        # 1. update the platform-tools package if needed
+
+        skip_upd = self.buildozer.config.getbooldefault(
+            'app', 'android.skip_update', False)
+
+        if not skip_upd:
+            self.buildozer.info('Installing/updating SDK platform tools if necessary')
+
+            # just calling sdkmanager with the items will install them if necessary
+            self._android_update_sdk('platform-tools')
+            self._android_update_sdk('--update')
+        else:
+            self.buildozer.info('Skipping Android SDK update due to spec file setting')
+            self.buildozer.info('Note: this also prevents installing missing '
+                                'SDK components')
+
+        # 2. install the latest build tool
+        self.buildozer.info('Updating SDK build tools if necessary')
+        installed_v_build_tools = self._read_version_subdir(self.android_sdk_dir,
+                                                  'build-tools')
+        available_v_build_tools = self._android_list_build_tools_versions()
+        if not available_v_build_tools:
+            self.buildozer.error('Did not find any build tools available to download')
+
+        latest_v_build_tools = sorted(available_v_build_tools)[-1]
+        if latest_v_build_tools > installed_v_build_tools:
+            if not skip_upd:
+                self._android_update_sdk(
+                    '"build-tools;{}"'.format(latest_v_build_tools))
+                installed_v_build_tools = latest_v_build_tools
+            else:
+                self.buildozer.info(
+                    'Skipping update to build tools {} due to spec setting'.format(
+                        latest_v_build_tools))
+
+        # 2. check aidl can be run
+        self._check_aidl(installed_v_build_tools)
+
+        # 3. finally, install the android for the current api
+        self.buildozer.info('Downloading platform api target if necessary')
+        android_platform = join(self.android_sdk_dir, 'platforms', 'android-{}'.format(self.android_api))
+        if not self.buildozer.file_exists(android_platform):
+            if not skip_upd:
+                self._sdkmanager('"platforms;android-{}"'.format(self.android_api))
+            else:
+                self.buildozer.info(
+                    'Skipping install API {} platform tools due to spec setting'.format(
+                        self.android_api))
+
+        self.buildozer.info('Android packages installation done.')
+
+        self.buildozer.state[cache_key] = cache_value
+        self.buildozer.state.sync()
+
+    def _check_aidl(self, v_build_tools):
+        self.buildozer.debug('Check that aidl can be executed')
+        v_build_tools = self._read_version_subdir(self.android_sdk_dir,
+                                                  'build-tools')
+        aidl_cmd = join(self.android_sdk_dir, 'build-tools',
+                        str(v_build_tools), 'aidl')
+        self.buildozer.checkbin('Aidl', aidl_cmd)
+        _, _, returncode = self.buildozer.cmd(aidl_cmd,
+                                              break_on_error=False,
+                                              show_output=False)
+        if returncode != 1:
+            self.buildozer.error('Aidl cannot be executed')
+            if architecture()[0] == '64bit':
+                self.buildozer.error('')
+                self.buildozer.error(
+                    'You might have missed to install 32bits libs')
+                self.buildozer.error(
+                    'Check https://buildozer.readthedocs.org/en/latest/installation.html')
+                self.buildozer.error('')
+            else:
+                self.buildozer.error('')
+                self.buildozer.error(
+                    'In case of a bug report, please add a full log with log_level = 2')
+                self.buildozer.error('')
+            raise BuildozerException()
+
+    def install_platform(self):
+        self._install_p4a()
+        self._install_apache_ant()
+        self._install_android_sdk()
+        self._install_android_ndk()
+        self._install_android_packages()
+
+        # ultimate configuration check.
+        # some of our configuration cannot be check without platform.
+        self.check_configuration_tokens()
+        if not self._p4a_have_aab_support():
+            self.buildozer.error(
+                "This buildozer version requires a python-for-android version with AAB (Android App Bundle) support. "
+                "Please update your pinned version accordingly."
+            )
+            raise BuildozerException()
+
+        self.buildozer.environ.update({
+            'PACKAGES_PATH': self.buildozer.global_packages_dir,
+            'ANDROIDSDK': self.android_sdk_dir,
+            'ANDROIDNDK': self.android_ndk_dir,
+            'ANDROIDAPI': self.android_api,
+            'ANDROIDMINAPI': self.android_minapi,
+        })
+
+    def _install_p4a(self):
+        cmd = self.buildozer.cmd
+        p4a_fork = self.buildozer.config.getdefault(
+            'app', 'p4a.fork', self.p4a_fork
+        )
+        p4a_url = self.buildozer.config.getdefault(
+            'app', 'p4a.url', f'https://github.com/{p4a_fork}/python-for-android.git'
+        )
+        p4a_branch = self.buildozer.config.getdefault(
+            'app', 'p4a.branch', self.p4a_branch
+        )
+        p4a_commit = self.buildozer.config.getdefault(
+            'app', 'p4a.commit', self.p4a_commit
+        )
+
+        p4a_dir = self.p4a_dir
+        system_p4a_dir = self.buildozer.config.getdefault('app',
+                                                          'p4a.source_dir')
+        if system_p4a_dir:
+            # Don't install anything, just check that the dir does exist
+            if not self.buildozer.file_exists(p4a_dir):
+                self.buildozer.error(
+                    'Path for p4a.source_dir does not exist')
+                self.buildozer.error('')
+                raise BuildozerException()
+        else:
+            # check that url/branch has not been changed
+            if self.buildozer.file_exists(p4a_dir):
+                cur_url = cmd(
+                    'git config --get remote.origin.url',
+                    get_stdout=True,
+                    cwd=p4a_dir,
+                )[0].strip()
+                cur_branch = cmd(
+                    'git branch -vv', get_stdout=True, cwd=p4a_dir
+                )[0].split()[1]
+                if any([cur_url != p4a_url, cur_branch != p4a_branch]):
+                    self.buildozer.info(
+                        f"Detected old url/branch ({cur_url}/{cur_branch}), deleting..."
+                    )
+                    rmtree(p4a_dir)
+
+            if not self.buildozer.file_exists(p4a_dir):
+                cmd(
+                    (
+                        'git clone -b {p4a_branch} --single-branch '
+                        '{p4a_url} {p4a_dir}'
+                    ).format(
+                        p4a_branch=p4a_branch,
+                        p4a_url=p4a_url,
+                        p4a_dir=self.p4a_directory_name,
+                    ),
+                    cwd=self.buildozer.platform_dir,
+                )
+            elif self.platform_update:
+                cmd('git clean -dxf', cwd=p4a_dir)
+                current_branch = cmd('git rev-parse --abbrev-ref HEAD',
+                                     get_stdout=True, cwd=p4a_dir)[0].strip()
+                if current_branch == p4a_branch:
+                    cmd('git pull', cwd=p4a_dir)
+                else:
+                    cmd('git fetch --tags origin {0}:{0}'.format(p4a_branch),
+                        cwd=p4a_dir)
+                    cmd('git checkout {}'.format(p4a_branch), cwd=p4a_dir)
+            if p4a_commit != 'HEAD':
+                cmd('git reset --hard {}'.format(p4a_commit), cwd=p4a_dir)
+
+        # also install dependencies (currently, only setup.py knows about it)
+        # let's extract them.
+        try:
+            with open(join(self.p4a_dir, "setup.py")) as fd:
+                setup = fd.read()
+                deps = re.findall(r"^\s*install_reqs = (\[[^\]]*\])", setup, re.DOTALL | re.MULTILINE)[0]
+                deps = ast.literal_eval(deps)
+        except IOError:
+            self.buildozer.error('Failed to read python-for-android setup.py at {}'.format(
+                join(self.p4a_dir, 'setup.py')))
+            sys.exit(1)
+        pip_deps = []
+        for dep in deps:
+            pip_deps.append("'{}'".format(dep))
+
+        # in virtualenv or conda env
+        options = "--user"
+        if "VIRTUAL_ENV" in os.environ or "CONDA_PREFIX" in os.environ:
+            options = ""
+        cmd('{} -m pip install -q {} {}'.format(executable, options, " ".join(pip_deps)))
+
+    def compile_platform(self):
+        app_requirements = self.buildozer.config.getlist(
+            'app', 'requirements', '')
+        dist_name = self.buildozer.config.get('app', 'package.name')
+        local_recipes = self.get_local_recipes_dir()
+        requirements = ','.join(app_requirements)
+        options = []
+
+        source_dirs = {
+            'P4A_{}_DIR'.format(name[20:]): realpath(expanduser(value))
+            for name, value in self.buildozer.config.items('app')
+            if name.startswith('requirements.source.')
+        }
+        if source_dirs:
+            self.buildozer.environ.update(source_dirs)
+            self.buildozer.info('Using custom source dirs:\n    {}'.format(
+                '\n    '.join(['{} = {}'.format(k, v)
+                               for k, v in source_dirs.items()])))
+
+        if self.buildozer.config.getbooldefault('app', 'android.copy_libs', True):
+            options.append("--copy-libs")
+        # support for recipes in a local directory within the project
+        if local_recipes:
+            options.append('--local-recipes')
+            options.append(local_recipes)
+
+        p4a_create = "create --dist_name={} --bootstrap={} --requirements={} ".format(dist_name, self._p4a_bootstrap, requirements)
+
+        for arch in self._archs:
+            p4a_create += "--arch {} ".format(arch)
+
+        p4a_create += " ".join(options)
+
+        self._p4a(p4a_create, get_stdout=True)[0]
+
+    def get_available_packages(self):
+        return True
+
+    def get_dist_dir(self, dist_name):
+        """Find the dist dir with the given name if one
+        already exists, otherwise return a new dist_dir name.
+        """
+
+        # If the expected dist name does exist, simply use that
+        expected_dist_dir = join(self._build_dir, 'dists', dist_name)
+        if exists(expected_dist_dir):
+            return expected_dist_dir
+
+        # If no directory has been found yet, our dist probably
+        # doesn't exist yet, so use the expected name
+        return expected_dist_dir
+
+    def get_local_recipes_dir(self):
+        local_recipes = self.buildozer.config.getdefault('app', 'p4a.local_recipes')
+        return realpath(expanduser(local_recipes)) if local_recipes else None
+
+    def execute_build_package(self, build_cmd):
+        # wrapper from previous old_toolchain to new toolchain
+        dist_name = self.buildozer.config.get('app', 'package.name')
+        local_recipes = self.get_local_recipes_dir()
+        cmd = [self.artifact_format, "--bootstrap", self._p4a_bootstrap, "--dist_name", dist_name]
+        for args in build_cmd:
+            option, values = args[0], args[1:]
+            if option == "debug":
+                continue
+            elif option == "release":
+                cmd.append("--release")
+                if self.check_p4a_sign_env(True):
+                    cmd.append("--sign")
+                continue
+            if option == "--window":
+                cmd.append("--window")
+            elif option == "--sdk":
+                cmd.append("--android_api")
+                cmd.extend(values)
+            else:
+                cmd.extend(args)
+
+        # support for presplash background color
+        presplash_color = self.buildozer.config.getdefault('app', 'android.presplash_color', None)
+        if presplash_color:
+            cmd.append('--presplash-color')
+            cmd.append("'{}'".format(presplash_color))
+
+        # support for services
+        services = self.buildozer.config.getlist('app', 'services', [])
+        for service in services:
+            cmd.append("--service")
+            cmd.append(service)
+
+        # support for copy-libs
+        if self.buildozer.config.getbooldefault('app', 'android.copy_libs', True):
+            cmd.append("--copy-libs")
+
+        # support for recipes in a local directory within the project
+        if local_recipes:
+            cmd.append('--local-recipes')
+            cmd.append(local_recipes)
+
+        # support for blacklist/whitelist filename
+        whitelist_src = self.buildozer.config.getdefault('app', 'android.whitelist_src', None)
+        blacklist_src = self.buildozer.config.getdefault('app', 'android.blacklist_src', None)
+        if whitelist_src:
+            cmd.append('--whitelist')
+            cmd.append(realpath(expanduser(whitelist_src)))
+        if blacklist_src:
+            cmd.append('--blacklist')
+            cmd.append(realpath(expanduser(blacklist_src)))
+
+        # support for java directory
+        javadirs = self.buildozer.config.getlist('app', 'android.add_src', [])
+        for javadir in javadirs:
+            cmd.append('--add-source')
+            cmd.append(realpath(expanduser(javadir)))
+
+        # support for aars
+        aars = self.buildozer.config.getlist('app', 'android.add_aars', [])
+        for aar in aars:
+            cmd.append('--add-aar')
+            cmd.append(realpath(expanduser(aar)))
+
+        # support for assets folder
+        assets = self.buildozer.config.getlist('app', 'android.add_assets', [])
+        for asset in assets:
+            cmd.append('--add-asset')
+            if ':' in asset:
+                asset_src, asset_dest = asset.split(":")
+            else:
+                asset_src = asset
+                asset_dest = asset
+            cmd.append(realpath(expanduser(asset_src)) + ':' + asset_dest)
+
+        # support for uses-lib
+        uses_library = self.buildozer.config.getlist(
+            'app', 'android.uses_library', '')
+        for lib in uses_library:
+            cmd.append('--uses-library={}'.format(lib))
+
+        # support for activity-class-name
+        activity_class_name = self.buildozer.config.getdefault(
+            'app', 'android.activity_class_name', 'org.kivy.android.PythonActivity')
+        if activity_class_name != 'org.kivy.android.PythonActivity':
+            cmd.append('--activity-class-name={}'.format(activity_class_name))
+
+        # support for service-class-name
+        service_class_name = self.buildozer.config.getdefault(
+            'app', 'android.service_class_name', 'org.kivy.android.PythonService')
+        if service_class_name != 'org.kivy.android.PythonService':
+            cmd.append('--service-class-name={}'.format(service_class_name))
+
+        # support for extra-manifest-xml
+        extra_manifest_xml = self.buildozer.config.getdefault(
+            'app', 'android.extra_manifest_xml', '')
+        if extra_manifest_xml:
+            cmd.append('--extra-manifest-xml="{}"'.format(open(extra_manifest_xml, 'rt').read()))
+
+        # support for extra-manifest-application-arguments
+        extra_manifest_application_arguments = self.buildozer.config.getdefault(
+            'app', 'android.extra_manifest_application_arguments', '')
+        if extra_manifest_application_arguments:
+            args_body = open(extra_manifest_application_arguments, 'rt').read().replace('"', '\\"').replace('\n', ' ').replace('\t', ' ')
+            cmd.append('--extra-manifest-application-arguments="{}"'.format(args_body))
+
+        # support for gradle dependencies
+        gradle_dependencies = self.buildozer.config.getlist('app', 'android.gradle_dependencies', [])
+        for gradle_dependency in gradle_dependencies:
+            cmd.append('--depend')
+            cmd.append(gradle_dependency)
+
+        # support for manifestPlaceholders
+        manifest_placeholders = self.buildozer.config.getdefault('app', 'android.manifest_placeholders', None)
+        if manifest_placeholders:
+            cmd.append('--manifest-placeholders')
+            cmd.append("{}".format(manifest_placeholders))
+
+        # support disabling of compilation
+        compile_py = self.buildozer.config.getdefault('app', 'android.no-compile-pyo', None)
+        if compile_py:
+            cmd.append('--no-compile-pyo')
+
+        for arch in self._archs:
+            cmd.append('--arch')
+            cmd.append(arch)
+
+        cmd = " ".join(cmd)
+        self._p4a(cmd)
+
+    def get_release_mode(self):
+        # aab, also if unsigned is named as *-release
+        if self.check_p4a_sign_env() or self.artifact_format in ["aab", "aar"]:
+            return "release"
+        return "release-unsigned"
+
+    def check_p4a_sign_env(self, error=False):
+        keys = ["KEYALIAS", "KEYSTORE_PASSWD", "KEYSTORE", "KEYALIAS_PASSWD"]
+        check = True
+        for key in keys:
+            key = "P4A_RELEASE_{}".format(key)
+            if key not in os.environ:
+                if error:
+                    self.buildozer.error(
+                        ("Asking for release but {} is missing"
+                         "--sign will not be passed").format(key))
+                check = False
+        return check
+
+    def cmd_run(self, *args):
+        entrypoint = self.buildozer.config.getdefault(
+            'app', 'android.entrypoint')
+        if not entrypoint:
+            self.buildozer.config.set('app', 'android.entrypoint', 'org.kivy.android.PythonActivity')
+
+        super().cmd_run(*args)
+
+        entrypoint = self.buildozer.config.getdefault(
+            'app', 'android.entrypoint', 'org.kivy.android.PythonActivity')
+
+        package = self._get_package()
+
+        # push on the device
+        for serial in self.serials:
+            self.buildozer.environ['ANDROID_SERIAL'] = serial
+            self.buildozer.info('Run on {}'.format(serial))
+            self.buildozer.cmd(
+                '{adb} shell am start -n {package}/{entry} -a {entry}'.format(
+                    adb=self.adb_cmd,
+                    package=package,
+                    entry=entrypoint),
+                cwd=self.buildozer.global_platform_dir)
+        self.buildozer.environ.pop('ANDROID_SERIAL', None)
+
+        while True:
+            if self._get_pid():
+                break
+            sleep(.1)
+            self.buildozer.info('Waiting for application to start.')
+
+        self.buildozer.info('Application started.')
+
+    def cmd_p4a(self, *args):
+        '''
+        Run p4a commands. Args must come after --, or
+        use --alias to make an alias
+        '''
+        self.check_requirements()
+        self.install_platform()
+        args = args[0]
+        if args and args[0] == '--alias':
+            print('To set up p4a in this shell session, execute:')
+            print('    alias p4a=$(buildozer {} p4a --alias 2>&1 >/dev/null)'
+                  .format(self.targetname))
+            sys.stderr.write('PYTHONPATH={} {}\n'.format(self.p4a_dir, self._p4a_cmd))
+        else:
+            self._p4a(' '.join(args) if args else '')
+
+    def cmd_clean(self, *args):
+        '''
+        Clean the build and distribution
+        '''
+        self._p4a("clean_builds")
+        self._p4a("clean_dists")
+
+    def _get_package(self):
+        config = self.buildozer.config
+        package_domain = config.getdefault('app', 'package.domain', '')
+        package = config.get('app', 'package.name')
+        if package_domain:
+            package = package_domain + '.' + package
+        return package.lower()
+
+    def _generate_whitelist(self, dist_dir):
+        p4a_whitelist = self.buildozer.config.getlist(
+            'app', 'android.whitelist') or []
+        whitelist_fn = join(dist_dir, 'whitelist.txt')
+        with open(whitelist_fn, 'w') as fd:
+            for wl in p4a_whitelist:
+                fd.write(wl + '\n')
+
+    def build_package(self):
+        dist_name = self.buildozer.config.get('app', 'package.name')
+        dist_dir = self.get_dist_dir(dist_name)
+        config = self.buildozer.config
+        package = self._get_package()
+        version = self.buildozer.get_version()
+
+        # add extra libs/armeabi files in dist/default/libs/armeabi
+        # (same for armeabi-v7a, arm64-v8a, x86, mips)
+        for config_key, lib_dir in (
+                ('android.add_libs_armeabi', 'armeabi'),
+                ('android.add_libs_armeabi_v7a', 'armeabi-v7a'),
+                ('android.add_libs_arm64_v8a', 'arm64-v8a'),
+                ('android.add_libs_x86', 'x86'),
+                ('android.add_libs_mips', 'mips')):
+
+            patterns = config.getlist('app', config_key, [])
+            if not patterns:
+                continue
+            if lib_dir not in self._archs:
+                continue
+
+            self.buildozer.debug('Search and copy libs for {}'.format(lib_dir))
+            for fn in self.buildozer.file_matches(patterns):
+                self.buildozer.file_copy(
+                    join(self.buildozer.root_dir, fn),
+                    join(dist_dir, 'libs', lib_dir, basename(fn)))
+
+        # update the project.properties libraries references
+        self._update_libraries_references(dist_dir)
+
+        # generate the whitelist if needed
+        self._generate_whitelist(dist_dir)
+
+        # build the app
+        build_cmd = [
+            ("--name", quote(config.get('app', 'title'))),
+            ("--version", version),
+            ("--package", package),
+            ("--minsdk", config.getdefault('app', 'android.minapi',
+                                           self.android_minapi)),
+            ("--ndk-api", config.getdefault('app', 'android.minapi',
+                                            self.android_minapi)),
+        ]
+        is_private_storage = config.getbooldefault(
+            'app', 'android.private_storage', True)
+        if is_private_storage:
+            build_cmd += [("--private", self.buildozer.app_dir)]
+        else:
+            build_cmd += [("--dir", self.buildozer.app_dir)]
+
+        # add permissions
+        permissions = config.getlist('app', 'android.permissions', [])
+        for permission in permissions:
+            # force the latest component to be uppercase
+            permission = permission.split('.')
+            permission[-1] = permission[-1].upper()
+            permission = '.'.join(permission)
+            build_cmd += [("--permission", permission)]
+
+        # add features
+        features = config.getlist('app', 'android.features', [])
+        for feature in features:
+            build_cmd += [("--feature", feature)]
+
+        # android.entrypoint
+        entrypoint = config.getdefault('app', 'android.entrypoint', 'org.kivy.android.PythonActivity')
+        build_cmd += [('--android-entrypoint', entrypoint)]
+
+        # android.apptheme
+        apptheme = config.getdefault('app', 'android.apptheme', '@android:style/Theme.NoTitleBar')
+        build_cmd += [('--android-apptheme', apptheme)]
+
+        # android.compile_options
+        compile_options = config.getlist('app', 'android.add_compile_options', [])
+        for option in compile_options:
+            build_cmd += [('--add-compile-option', option)]
+
+        # android.add_gradle_repositories
+        repos = config.getlist('app', 'android.add_gradle_repositories', [])
+        for repo in repos:
+            build_cmd += [('--add-gradle-repository', repo)]
+
+        # android packaging options
+        pkgoptions = config.getlist('app', 'android.add_packaging_options', [])
+        for pkgoption in pkgoptions:
+            build_cmd += [('--add-packaging-option', pkgoption)]
+
+        # meta-data
+        meta_datas = config.getlistvalues('app', 'android.meta_data', [])
+        for meta in meta_datas:
+            key, value = meta.split('=', 1)
+            meta = '{}={}'.format(key.strip(), value.strip())
+            build_cmd += [("--meta-data", meta)]
+
+        # add extra Java jar files
+        add_jars = config.getlist('app', 'android.add_jars', [])
+        for pattern in add_jars:
+            pattern = join(self.buildozer.root_dir, pattern)
+            matches = glob(expanduser(pattern.strip()))
+            if matches:
+                for jar in matches:
+                    build_cmd += [("--add-jar", jar)]
+            else:
+                raise SystemError('Failed to find jar file: {}'.format(
+                    pattern))
+
+        # add Java activity
+        add_activities = config.getlist('app', 'android.add_activities', [])
+        for activity in add_activities:
+            build_cmd += [("--add-activity", activity)]
+
+        # add presplash, lottie animation or static
+        presplash = config.getdefault('app', 'android.presplash_lottie', '')
+        if presplash:
+            build_cmd += [("--presplash-lottie", join(self.buildozer.root_dir,
+                                                      presplash))]
+        else:
+            presplash = config.getdefault('app', 'presplash.filename', '')
+            if presplash:
+                build_cmd += [("--presplash", join(self.buildozer.root_dir,
+                                                   presplash))]
+
+        # add icon
+        icon = config.getdefault('app', 'icon.filename', '')
+        if icon:
+            build_cmd += [("--icon", join(self.buildozer.root_dir, icon))]
+        icon_fg = config.getdefault('app', 'icon.adaptive_foreground.filename', '')
+        icon_bg = config.getdefault('app', 'icon.adaptive_background.filename', '')
+        if icon_fg and icon_bg:
+            build_cmd += [("--icon-fg", join(self.buildozer.root_dir, icon_fg))]
+            build_cmd += [("--icon-bg", join(self.buildozer.root_dir, icon_bg))]
+
+        # OUYA Console support
+        ouya_category = config.getdefault('app', 'android.ouya.category',
+                                          '').upper()
+        if ouya_category:
+            if ouya_category not in ('GAME', 'APP'):
+                raise SystemError(
+                    'Invalid android.ouya.category: "{}" must be one of GAME or APP'.format(
+                        ouya_category))
+            # add icon
+            ouya_icon = config.getdefault('app', 'android.ouya.icon.filename',
+                                          '')
+            build_cmd += [("--ouya-category", ouya_category)]
+            build_cmd += [("--ouya-icon", join(self.buildozer.root_dir,
+                                               ouya_icon))]
+
+        if config.getdefault('app', 'p4a.bootstrap', 'sdl2') != 'service_only':
+            # add orientation
+            orientation = config.getdefault('app', 'orientation', 'landscape')
+            if orientation == 'all':
+                orientation = 'sensor'
+            build_cmd += [("--orientation", orientation)]
+
+            # fullscreen ?
+            fullscreen = config.getbooldefault('app', 'fullscreen', True)
+            if not fullscreen:
+                build_cmd += [("--window", )]
+
+        # wakelock ?
+        wakelock = config.getbooldefault('app', 'android.wakelock', False)
+        if wakelock:
+            build_cmd += [("--wakelock", )]
+
+        # AndroidX ?
+        enable_androidx = config.getbooldefault('app',
+                                                'android.enable_androidx',
+                                                False)
+        if enable_androidx:
+            build_cmd += [("--enable-androidx", )]
+
+        # intent filters
+        intent_filters = config.getdefault(
+            'app', 'android.manifest.intent_filters', '')
+        if intent_filters:
+            build_cmd += [("--intent-filters", join(self.buildozer.root_dir,
+                                                    intent_filters))]
+
+        # activity launch mode
+        launch_mode = config.getdefault(
+            'app', 'android.manifest.launch_mode', '')
+        if launch_mode:
+            build_cmd += [("--activity-launch-mode", launch_mode)]
+
+        # numeric version
+        numeric_version = config.getdefault('app', 'android.numeric_version')
+        if numeric_version:
+            build_cmd += [("--numeric-version", numeric_version)]
+
+        # android.allow_backup
+        allow_backup = config.getbooldefault('app', 'android.allow_backup', True)
+        if not allow_backup:
+            build_cmd += [('--allow-backup', 'false')]
+
+        # android.backup_rules
+        backup_rules = config.getdefault('app', 'android.backup_rules', '')
+        if backup_rules:
+            build_cmd += [("--backup-rules", join(self.buildozer.root_dir,
+                                                  backup_rules))]
+
+        # build only in debug right now.
+        if self.build_mode == 'debug':
+            build_cmd += [("debug", )]
+            mode = 'debug'
+            mode_sign = mode
+        else:
+            build_cmd += [("release", )]
+            mode_sign = "release"
+            mode = self.get_release_mode()
+
+        self.execute_build_package(build_cmd)
+
+        try:
+            self.buildozer.hook("android_pre_build_apk")
+            self.execute_build_package(build_cmd)
+            self.buildozer.hook("android_post_build_apk")
+        except:
+            # maybe the hook fail because the apk is not
+            pass
+
+        build_tools_versions = os.listdir(join(self.android_sdk_dir, "build-tools"))
+        build_tools_versions = sorted(build_tools_versions, key=LooseVersion)
+        build_tools_version = build_tools_versions[-1]
+        gradle_files = ["build.gradle", "gradle", "gradlew"]
+        is_gradle_build = build_tools_version >= "25.0" and any(
+            (exists(join(dist_dir, x)) for x in gradle_files))
+        packagename = config.get('app', 'package.name')
+
+        if is_gradle_build:
+            # on gradle build, the apk use the package name, and have no version
+            packagename_src = basename(dist_dir)  # gradle specifically uses the folder name
+            artifact = u'{packagename}-{mode}.{artifact_format}'.format(
+                packagename=packagename_src, mode=mode, artifact_format=self.artifact_format)
+            if self.artifact_format == "apk":
+                artifact_dir = join(dist_dir, "build", "outputs", "apk", mode_sign)
+            elif self.artifact_format == "aab":
+                artifact_dir = join(dist_dir, "build", "outputs", "bundle", mode_sign)
+            elif self.artifact_format == "aar":
+                artifact_dir = join(dist_dir, "build", "outputs", "aar")
+
+        else:
+            # on ant, the apk use the title, and have version
+            bl = u'\'" ,'
+            apptitle = config.get('app', 'title')
+            if hasattr(apptitle, 'decode'):
+                apptitle = apptitle.decode('utf-8')
+            apktitle = ''.join([x for x in apptitle if x not in bl])
+            artifact = u'{title}-{version}-{mode}.apk'.format(
+                title=apktitle,
+                version=version,
+                mode=mode)
+            artifact_dir = join(dist_dir, "bin")
+
+        artifact_dest = u'{packagename}-{version}-{arch}-{mode}.{artifact_format}'.format(
+            packagename=packagename, mode=mode, version=version,
+            arch=self.archs_snake, artifact_format=self.artifact_format)
+
+        # copy to our place
+        copyfile(join(artifact_dir, artifact), join(self.buildozer.bin_dir, artifact_dest))
+
+        self.buildozer.info('Android packaging done!')
+        self.buildozer.info(
+            u'APK {0} available in the bin directory'.format(artifact_dest))
+        self.buildozer.state['android:latestapk'] = artifact_dest
+        self.buildozer.state['android:latestmode'] = self.build_mode
+
+    def _update_libraries_references(self, dist_dir):
+        # ensure the project.properties exist
+        project_fn = join(dist_dir, 'project.properties')
+
+        if not self.buildozer.file_exists(project_fn):
+            content = [
+                'target=android-{}\n'.format(self.android_api),
+                'APP_PLATFORM={}\n'.format(self.android_minapi)]
+        else:
+            with io.open(project_fn, encoding='utf-8') as fd:
+                content = fd.readlines()
+
+        # extract library reference
+        references = []
+        for line in content[:]:
+            if not line.startswith('android.library.reference.'):
+                continue
+            content.remove(line)
+
+        # convert our references to relative path
+        app_references = self.buildozer.config.getlist(
+            'app', 'android.library_references', [])
+        source_dir = realpath(expanduser(self.buildozer.config.getdefault(
+            'app', 'source.dir', '.')))
+        for cref in app_references:
+            # get the full path of the current reference
+            ref = realpath(join(source_dir, cref))
+            if not self.buildozer.file_exists(ref):
+                self.buildozer.error(
+                    'Invalid library reference (path not found): {}'.format(
+                        cref))
+                sys.exit(1)
+            # get a relative path from the project file
+            ref = relpath(ref, realpath(expanduser(dist_dir)))
+            # ensure the reference exists
+            references.append(ref)
+
+        # recreate the project.properties
+        with io.open(project_fn, 'w', encoding='utf-8') as fd:
+
+            try:
+                fd.writelines((line.decode('utf-8') for line in content))
+            except:
+                fd.writelines(content)
+            if content and not content[-1].endswith(u'\n'):
+                fd.write(u'\n')
+            for index, ref in enumerate(references):
+                fd.write(u'android.library.reference.{}={}\n'.format(index + 1, ref))
+
+        self.buildozer.debug('project.properties updated')
+
+    @property
+    def serials(self):
+        if hasattr(self, '_serials'):
+            return self._serials
+        serial = environ.get('ANDROID_SERIAL')
+        if serial:
+            return serial.split(',')
+        lines = self.buildozer.cmd('{} devices'.format(self.adb_cmd),
+                               get_stdout=True)[0].splitlines()
+        serials = []
+        for serial in lines:
+            if not serial:
+                continue
+            if serial.startswith('*') or serial.startswith('List '):
+                continue
+            serials.append(serial.split()[0])
+        self._serials = serials
+        return serials
+
+    def cmd_adb(self, *args):
+        '''
+        Run adb from the Android SDK.
+        Args must come after --, or use
+        --alias to make an alias
+        '''
+        self.check_requirements()
+        self.install_platform()
+        args = args[0]
+        if args and args[0] == '--alias':
+            print('To set up ADB in this shell session, execute:')
+            print('    alias adb=$(buildozer {} adb --alias 2>&1 >/dev/null)'
+                  .format(self.targetname))
+            sys.stderr.write(self.adb_cmd + '\n')
+        else:
+            self.buildozer.cmd(' '.join([self.adb_cmd] + args))
+
+    def cmd_deploy(self, *args):
+        super().cmd_deploy(*args)
+        state = self.buildozer.state
+        if 'android:latestapk' not in state:
+            self.buildozer.error('No APK built yet. Run "debug" first.')
+
+        if state.get('android:latestmode', '') != 'debug':
+            self.buildozer.error('Only debug APK are supported for deploy')
+            return
+
+        # search the APK in the bin dir
+        apk = state['android:latestapk']
+        full_apk = join(self.buildozer.bin_dir, apk)
+        if not self.buildozer.file_exists(full_apk):
+            self.buildozer.error(
+                'Unable to found the latest APK. Please run "debug" again.')
+
+        # push on the device
+        for serial in self.serials:
+            self.buildozer.environ['ANDROID_SERIAL'] = serial
+            self.buildozer.info('Deploy on {}'.format(serial))
+            self.buildozer.cmd('{0} install -r "{1}"'.format(
+                               self.adb_cmd, full_apk),
+                               cwd=self.buildozer.global_platform_dir)
+        self.buildozer.environ.pop('ANDROID_SERIAL', None)
+
+        self.buildozer.info('Application pushed.')
+
+    def _get_pid(self):
+        pid, *_ = self.buildozer.cmd(
+            f'{self.adb_cmd} shell pidof {self._get_package()}',
+            get_stdout=True,
+            show_output=False,
+            break_on_error=False,
+            quiet=True,
+        )
+        if pid:
+            return pid.strip()
+        return False
+
+    def cmd_logcat(self, *args):
+        '''Show the log from the device
+        '''
+        self.check_requirements()
+        serial = self.serials[0:]
+        if not serial:
+            return
+        filters = self.buildozer.config.getrawdefault(
+            "app", "android.logcat_filters", "", section_sep=":", split_char=" ")
+        filters = " ".join(filters)
+        self.buildozer.environ['ANDROID_SERIAL'] = serial[0]
+        extra_args = []
+        pid = None
+        if self.buildozer.config.getdefault('app', 'android.logcat_pid_only'):
+            pid = self._get_pid()
+            if pid:
+                extra_args.extend(('--pid', pid))
+
+        self.buildozer.cmd(
+            f"{self.adb_cmd} logcat {filters} {' '.join(extra_args)}",
+            cwd=self.buildozer.global_platform_dir,
+            show_output=True,
+            run_condition=self._get_pid if pid else None,
+            break_on_error=False,
+        )
+
+        self.buildozer.info(f"{self._get_package()} terminated")
+
+        self.buildozer.environ.pop('ANDROID_SERIAL', None)
+
+
+def get_target(buildozer):
+    buildozer.targetname = "android"
+    return TargetAndroid(buildozer)
diff --git a/venv/lib/python3.8/site-packages/buildozer/targets/ios.py b/venv/lib/python3.8/site-packages/buildozer/targets/ios.py
new file mode 100644
index 0000000..e2d0b73
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/buildozer/targets/ios.py
@@ -0,0 +1,463 @@
+'''
+iOS target, based on kivy-ios project
+'''
+
+import sys
+import plistlib
+from buildozer import BuildozerCommandException
+from buildozer.target import Target, no_config
+from os.path import join, basename, expanduser, realpath
+from getpass import getpass
+
+
+PHP_TEMPLATE = '''
+
+
+
+Install {appname}
+
+
+
+
+
+
+
+'''
+
+
+class TargetIos(Target):
+    targetname = "ios"
+
+    def __init__(self, buildozer):
+        super().__init__(buildozer)
+        executable = sys.executable or 'python'
+        self._toolchain_cmd = f"{executable} toolchain.py "
+        self._xcodebuild_cmd = "xcodebuild "
+        # set via install_platform()
+        self.ios_dir = None
+        self.ios_deploy_dir = None
+
+    def check_requirements(self):
+        if sys.platform != "darwin":
+            raise NotImplementedError("Only macOS is supported for iOS target")
+        checkbin = self.buildozer.checkbin
+        cmd = self.buildozer.cmd
+
+        checkbin('Xcode xcodebuild', 'xcodebuild')
+        checkbin('Xcode xcode-select', 'xcode-select')
+        checkbin('Git git', 'git')
+        checkbin('Cython cython', 'cython')
+        checkbin('pkg-config', 'pkg-config')
+        checkbin('autoconf', 'autoconf')
+        checkbin('automake', 'automake')
+        checkbin('libtool', 'libtool')
+
+        self.buildozer.debug('Check availability of a iPhone SDK')
+        sdk = cmd('xcodebuild -showsdks | fgrep "iphoneos" |'
+                'tail -n 1 | awk \'{print $2}\'',
+                get_stdout=True)[0]
+        if not sdk:
+            raise Exception(
+                'No iPhone SDK found. Please install at least one iOS SDK.')
+        else:
+            self.buildozer.debug(' -> found %r' % sdk)
+
+        self.buildozer.debug('Check Xcode path')
+        xcode = cmd('xcode-select -print-path', get_stdout=True)[0]
+        if not xcode:
+            raise Exception('Unable to get xcode path')
+        self.buildozer.debug(' -> found {0}'.format(xcode))
+
+    def install_platform(self):
+        """
+        Clones `kivy/kivy-ios` and `phonegap/ios-deploy` then sets `ios_dir`
+        and `ios_deploy_dir` accordingly.
+        """
+        self.ios_dir = self.install_or_update_repo('kivy-ios', platform='ios')
+        self.ios_deploy_dir = self.install_or_update_repo('ios-deploy',
+                                                          platform='ios',
+                                                          branch='1.7.0',
+                                                          owner='phonegap')
+
+    def toolchain(self, cmd, **kwargs):
+        kwargs.setdefault('cwd', self.ios_dir)
+        return self.buildozer.cmd(self._toolchain_cmd + cmd, **kwargs)
+
+    def xcodebuild(self, *args, **kwargs):
+        return self.buildozer.cmd(self._xcodebuild_cmd + ' '.join(arg for arg in args if arg is not None), **kwargs)
+
+    @property
+    def code_signing_allowed(self):
+        allowed = self.buildozer.config.getboolean("app", "ios.codesign.allowed")
+        allowed = "YES" if allowed else "NO"
+        return f"CODE_SIGNING_ALLOWED={allowed}"
+
+    @property
+    def code_signing_development_team(self):
+        team = self.buildozer.config.getdefault("app", f"ios.codesign.development_team.{self.build_mode}", None)
+        return f"DEVELOPMENT_TEAM={team}" if team else None
+
+    def get_available_packages(self):
+        available_modules = self.toolchain("recipes --compact", get_stdout=True)[0]
+        return available_modules.splitlines()[0].split()
+
+    def load_plist_from_file(self, plist_rfn):
+        with open(plist_rfn, 'rb') as f:
+            return plistlib.load(f)
+
+    def dump_plist_to_file(self, plist, plist_rfn):
+        with open(plist_rfn, 'wb') as f:
+            plistlib.dump(plist, f)
+
+    def compile_platform(self):
+        # for ios, the compilation depends really on the app requirements.
+        # compile the distribution only if the requirements changed.
+        last_requirements = self.buildozer.state.get('ios.requirements', '')
+        app_requirements = self.buildozer.config.getlist('app', 'requirements',
+                '')
+
+        # we need to extract the requirements that kivy-ios knows about
+        available_modules = self.get_available_packages()
+        onlyname = lambda x: x.split('==')[0]  # noqa: E731 do not assign a lambda expression, use a def
+        ios_requirements = [x for x in app_requirements if onlyname(x) in
+                            available_modules]
+
+        need_compile = 0
+        if last_requirements != ios_requirements:
+            need_compile = 1
+
+        # len('requirements.source.') == 20, so use name[20:]
+        source_dirs = {'{}_DIR'.format(name[20:].upper()):
+                            realpath(expanduser(value))
+                       for name, value in self.buildozer.config.items('app')
+                       if name.startswith('requirements.source.')}
+        if source_dirs:
+            need_compile = 1
+            self.buildozer.environ.update(source_dirs)
+            self.buildozer.info('Using custom source dirs:\n    {}'.format(
+                '\n    '.join(['{} = {}'.format(k, v)
+                               for k, v in source_dirs.items()])))
+
+        if not need_compile:
+            self.buildozer.info('Distribution already compiled, pass.')
+            return
+
+        modules_str = ' '.join(ios_requirements)
+        self.toolchain(f"build {modules_str}")
+
+        if not self.buildozer.file_exists(self.ios_deploy_dir, 'ios-deploy'):
+            self.xcodebuild(cwd=self.ios_deploy_dir)
+
+        self.buildozer.state['ios.requirements'] = ios_requirements
+        self.buildozer.state.sync()
+
+    def _get_package(self):
+        config = self.buildozer.config
+        package_domain = config.getdefault('app', 'package.domain', '')
+        package = config.get('app', 'package.name')
+        if package_domain:
+            package = package_domain + '.' + package
+        return package.lower()
+
+    def build_package(self):
+        self._unlock_keychain()
+
+        # create the project
+        app_name = self.buildozer.namify(self.buildozer.config.get('app',
+            'package.name'))
+
+        ios_frameworks = self.buildozer.config.getlist('app', 'ios.frameworks', '')
+        frameworks_cmd = ''
+        for framework in ios_frameworks:
+            frameworks_cmd += '--add-framework={} '.format(framework)
+
+        self.app_project_dir = join(self.ios_dir, '{0}-ios'.format(app_name.lower()))
+        if not self.buildozer.file_exists(self.app_project_dir):
+            cmd = f"create {frameworks_cmd}{app_name} {self.buildozer.app_dir}"
+        else:
+            cmd = f"update {frameworks_cmd}{app_name}-ios"
+        self.toolchain(cmd)
+
+        # fix the plist
+        plist_fn = '{}-Info.plist'.format(app_name.lower())
+        plist_rfn = join(self.app_project_dir, plist_fn)
+        version = self.buildozer.get_version()
+        self.buildozer.info('Update Plist {}'.format(plist_fn))
+        plist = self.load_plist_from_file(plist_rfn)
+        plist['CFBundleIdentifier'] = self._get_package()
+        plist['CFBundleShortVersionString'] = version
+        plist['CFBundleVersion'] = '{}.{}'.format(version,
+                self.buildozer.build_id)
+
+        # add icons
+        self._create_icons()
+
+        # Generate OTA distribution manifest if `app_url`, `display_image_url` and `full_size_image_url` are defined.
+        app_url = self.buildozer.config.getdefault("app", "ios.manifest.app_url", None)
+        display_image_url = self.buildozer.config.getdefault("app", "ios.manifest.display_image_url", None)
+        full_size_image_url = self.buildozer.config.getdefault("app", "ios.manifest.full_size_image_url", None)
+
+        if any((app_url, display_image_url, full_size_image_url)):
+
+            if not all((app_url, display_image_url, full_size_image_url)):
+                self.buildozer.error("Options ios.manifest.app_url, ios.manifest.display_image_url"
+                                     " and ios.manifest.full_size_image_url should be defined all together")
+                return
+
+            plist['manifest'] = {
+                'appURL': app_url,
+                'displayImageURL': display_image_url,
+                'fullSizeImageURL': full_size_image_url,
+            }
+
+        # ok, write the modified plist.
+        self.dump_plist_to_file(plist, plist_rfn)
+
+        mode = self.build_mode.capitalize()
+        self.xcodebuild(
+            f'-configuration {mode}',
+            '-allowProvisioningUpdates',
+            'ENABLE_BITCODE=NO',
+            self.code_signing_allowed,
+            self.code_signing_development_team,
+            'clean build',
+            cwd=self.app_project_dir)
+        ios_app_dir = '{app_lower}-ios/build/{mode}-iphoneos/{app_lower}.app'.format(
+                app_lower=app_name.lower(), mode=mode)
+        self.buildozer.state['ios:latestappdir'] = ios_app_dir
+
+        intermediate_dir = join(self.ios_dir, '{}-{}.intermediates'.format(app_name, version))
+        xcarchive = join(intermediate_dir, '{}-{}.xcarchive'.format(
+            app_name, version))
+        ipa_name = '{}-{}.ipa'.format(app_name, version)
+        ipa_tmp = join(intermediate_dir, ipa_name)
+        ipa = join(self.buildozer.bin_dir, ipa_name)
+        build_dir = join(self.ios_dir, '{}-ios'.format(app_name.lower()))
+
+        self.buildozer.rmdir(intermediate_dir)
+
+        self.buildozer.info('Creating archive...')
+        self.xcodebuild(
+            '-alltargets',
+            f'-configuration {mode}',
+            f'-scheme {app_name.lower()}',
+            f'-archivePath "{xcarchive}"',
+            '-destination \'generic/platform=iOS\'',
+            'archive',
+            'ENABLE_BITCODE=NO',
+            self.code_signing_allowed,
+            self.code_signing_development_team,
+            cwd=build_dir)
+
+        key = 'ios.codesign.{}'.format(self.build_mode)
+        ioscodesign = self.buildozer.config.getdefault('app', key, '')
+        if not ioscodesign:
+            self.buildozer.error('Cannot create the IPA package without'
+                ' signature. You must fill the "{}" token.'.format(key))
+            return
+        elif ioscodesign[0] not in ('"', "'"):
+            ioscodesign = '"{}"'.format(ioscodesign)
+
+        self.buildozer.info('Creating IPA...')
+        self.xcodebuild(
+            '-exportArchive',
+            f'-archivePath "{xcarchive}"',
+            f'-exportOptionsPlist "{plist_rfn}"',
+            f'-exportPath "{ipa_tmp}"',
+            f'CODE_SIGN_IDENTITY={ioscodesign}',
+            'ENABLE_BITCODE=NO',
+            cwd=build_dir)
+
+        self.buildozer.info('Moving IPA to bin...')
+        self.buildozer.file_rename(ipa_tmp, ipa)
+
+        self.buildozer.info('iOS packaging done!')
+        self.buildozer.info('IPA {0} available in the bin directory'.format(
+            basename(ipa)))
+        self.buildozer.state['ios:latestipa'] = ipa
+        self.buildozer.state['ios:latestmode'] = self.build_mode
+
+    def cmd_deploy(self, *args):
+        super().cmd_deploy(*args)
+        self._run_ios_deploy(lldb=False)
+
+    def cmd_run(self, *args):
+        super().cmd_run(*args)
+        self._run_ios_deploy(lldb=True)
+
+    def cmd_xcode(self, *args):
+        '''Open the xcode project.
+        '''
+        app_name = self.buildozer.namify(self.buildozer.config.get('app',
+            'package.name'))
+        app_name = app_name.lower()
+
+        ios_dir = ios_dir = join(self.buildozer.platform_dir, 'kivy-ios')
+        self.buildozer.cmd('open {}.xcodeproj'.format(
+            app_name), cwd=join(ios_dir, '{}-ios'.format(app_name)))
+
+    def _run_ios_deploy(self, lldb=False):
+        state = self.buildozer.state
+        if 'ios:latestappdir' not in state:
+            self.buildozer.error(
+                'App not built yet. Run "debug" or "release" first.')
+            return
+        ios_app_dir = state.get('ios:latestappdir')
+
+        if lldb:
+            debug_mode = '-d'
+            self.buildozer.info('Deploy and start the application')
+        else:
+            debug_mode = ''
+            self.buildozer.info('Deploy the application')
+
+        self.buildozer.cmd('{iosdeploy} {debug_mode} -b {app_dir}'.format(
+            iosdeploy=join(self.ios_deploy_dir, 'ios-deploy'),
+            debug_mode=debug_mode, app_dir=ios_app_dir),
+            cwd=self.ios_dir, show_output=True)
+
+    def _create_icons(self):
+        icon = self.buildozer.config.getdefault('app', 'icon.filename', '')
+        if not icon:
+            return
+        icon_fn = join(self.buildozer.app_dir, icon)
+        if not self.buildozer.file_exists(icon_fn):
+            self.buildozer.error('Icon {} does not exists'.format(icon_fn))
+            return
+
+        self.toolchain(f"icon {self.app_project_dir} {icon_fn}")
+
+    def check_configuration_tokens(self):
+        errors = []
+        config = self.buildozer.config
+        if not config.getboolean('app', 'ios.codesign.allowed'):
+            return
+        identity_debug = config.getdefault('app', 'ios.codesign.debug', '')
+        identity_release = config.getdefault('app', 'ios.codesign.release',
+                identity_debug)
+        available_identities = self._get_available_identities()
+
+        if not identity_debug:
+            errors.append('[app] "ios.codesign.debug" key missing, '
+                    'you must give a certificate name to use.')
+        elif identity_debug not in available_identities:
+            errors.append('[app] identity {} not found. '
+                    'Check with list_identities'.format(identity_debug))
+
+        if not identity_release:
+            errors.append('[app] "ios.codesign.release" key missing, '
+                    'you must give a certificate name to use.')
+        elif identity_release not in available_identities:
+            errors.append('[app] identity "{}" not found. '
+                    'Check with list_identities'.format(identity_release))
+        super().check_configuration_tokens(errors)
+
+    @no_config
+    def cmd_list_identities(self, *args):
+        '''List the available identities to use for signing.
+        '''
+        identities = self._get_available_identities()
+        print('Available identities:')
+        for x in identities:
+            print('  - {}'.format(x))
+
+    def _get_available_identities(self):
+        output = self.buildozer.cmd('security find-identity -v -p codesigning',
+                get_stdout=True)[0]
+
+        lines = output.splitlines()[:-1]
+        lines = [u'"{}"'.format(x.split('"')[1]) for x in lines]
+        return lines
+
+    def _unlock_keychain(self):
+        password_file = join(self.buildozer.buildozer_dir, '.ioscodesign')
+        password = None
+        if self.buildozer.file_exists(password_file):
+            with open(password_file) as fd:
+                password = fd.read()
+
+        if not password:
+            # no password available, try to unlock anyway...
+            error = self.buildozer.cmd('security unlock-keychain -u',
+                    break_on_error=False)[2]
+            if not error:
+                return
+        else:
+            # password available, try to unlock
+            error = self.buildozer.cmd('security unlock-keychain -p {}'.format(
+                password), break_on_error=False, sensible=True)[2]
+            if not error:
+                return
+
+        # we need the password to unlock.
+        correct = False
+        attempt = 3
+        while attempt:
+            attempt -= 1
+            password = getpass('Password to unlock the default keychain:')
+            error = self.buildozer.cmd('security unlock-keychain -p "{}"'.format(
+                password), break_on_error=False, sensible=True)[2]
+            if not error:
+                correct = True
+                break
+            self.buildozer.error('Invalid keychain password')
+
+        if not correct:
+            self.buildozer.error('Unable to unlock the keychain, exiting.')
+            raise BuildozerCommandException()
+
+        # maybe user want to save it for further reuse?
+        print(
+            'The keychain password can be saved in the build directory\n'
+            'As soon as the build directory will be cleaned, '
+            'the password will be erased.')
+
+        save = None
+        while save is None:
+            q = input('Do you want to save the password (Y/n): ')
+            if q in ('', 'Y'):
+                save = True
+            elif q == 'n':
+                save = False
+            else:
+                print('Invalid answer!')
+
+        if save:
+            with open(password_file, 'wb') as fd:
+                fd.write(password.encode())
+
+
+def get_target(buildozer):
+    return TargetIos(buildozer)
diff --git a/venv/lib/python3.8/site-packages/buildozer/targets/osx.py b/venv/lib/python3.8/site-packages/buildozer/targets/osx.py
new file mode 100644
index 0000000..6fd5d22
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/buildozer/targets/osx.py
@@ -0,0 +1,246 @@
+'''
+OSX target, based on kivy-sdk-packager
+'''
+
+import sys
+if sys.platform != 'darwin':
+    raise NotImplementedError('This will only work on osx')
+
+from buildozer.target import Target
+from os.path import exists, join, abspath, dirname
+from subprocess import check_call, check_output
+
+
+class TargetOSX(Target):
+    targetname = "osx"
+
+    def ensure_sdk(self):
+        self.buildozer.info('Check if kivy-sdk-packager exists')
+        if exists(
+                join(self.buildozer.platform_dir, 'kivy-sdk-packager-master')):
+            self.buildozer.info(
+                    'kivy-sdk-packager found at '
+                '{}'.format(self.buildozer.platform_dir))
+            return
+
+        self.buildozer.info('kivy-sdk-packager does not exist, clone it')
+        platdir = self.buildozer.platform_dir
+        check_call(
+            ('curl', '-O', '-L',
+            'https://github.com/kivy/kivy-sdk-packager/archive/master.zip'),
+            cwd=platdir)
+        check_call(('unzip', 'master.zip'), cwd=platdir)
+        check_call(('rm', 'master.zip'), cwd=platdir)
+
+    def download_kivy(self, cwd, py_branch=2):
+        current_kivy_vers = self.buildozer.config.get('app', 'osx.kivy_version')
+
+        if exists('/Applications/Kivy{}.app'.format(py_branch)):
+            self.buildozer.info('Kivy found in Applications dir...')
+            check_call(
+                ('cp', '-a', '/Applications/Kivy{}.app'.format(py_branch),
+                'Kivy.app'), cwd=cwd)
+
+        else:
+            if not exists(join(cwd, 'Kivy{}.dmg'.format(py_branch))):
+                self.buildozer.info('Downloading kivy...')
+                status_code = check_output(
+                    ('curl', '-L', '--write-out', '%{http_code}', '-o', 'Kivy{}.dmg'.format(py_branch),
+                    'https://kivy.org/downloads/{}/Kivy-{}-osx-python{}.dmg'
+                    .format(current_kivy_vers, current_kivy_vers, py_branch)),
+                    cwd=cwd)
+
+                if status_code == "404":
+                    self.buildozer.error(
+                        "Unable to download the Kivy App. Check osx.kivy_version in your buildozer.spec, and verify "
+                        "Kivy servers are accessible. https://kivy.org/downloads/")
+                    check_call(("rm", "Kivy{}.dmg".format(py_branch)), cwd=cwd)
+                    sys.exit(1)
+
+            self.buildozer.info('Extracting and installing Kivy...')
+            check_call(('hdiutil', 'attach', cwd + '/Kivy{}.dmg'.format(py_branch)))
+            check_call(('cp', '-a', '/Volumes/Kivy/Kivy.app', './Kivy.app'), cwd=cwd)
+
+    def ensure_kivyapp(self):
+        self.buildozer.info('check if Kivy.app exists in local dir')
+        kivy_app_dir = join(self.buildozer.platform_dir, 'kivy-sdk-packager-master', 'osx')
+
+        py_branch = self.buildozer.config.get('app', 'osx.python_version')
+
+        if not int(py_branch) in (2, 3):
+            self.buildozer.error('incompatible python version... aborting')
+            sys.exit(1)
+
+        if exists(join(kivy_app_dir, 'Kivy.app')):
+            self.buildozer.info('Kivy.app found at ' + kivy_app_dir)
+        else:
+            self.download_kivy(kivy_app_dir, py_branch)
+
+    def check_requirements(self):
+        self.ensure_sdk()
+        self.ensure_kivyapp()
+
+    def check_configuration_tokens(self, errors=None):
+        if errors:
+            self.buildozer.info('Check target configuration tokens')
+            self.buildozer.error(
+                '{0} error(s) found in the buildozer.spec'.format(
+                len(errors)))
+            for error in errors:
+                print(error)
+            sys.exit(1)
+        # check
+
+    def build_package(self):
+        self.buildozer.info('Building package')
+
+        bc = self.buildozer.config
+        bcg = bc.get
+        package_name = bcg('app', 'package.name')
+        domain = bcg('app', 'package.domain')
+        title = bcg('app', 'title')
+        app_deps = open('requirements.txt').read()
+        icon = bc.getdefault('app', 'icon.filename', '')
+        version = self.buildozer.get_version()
+        author = bc.getdefault('app', 'author', '')
+
+        self.buildozer.info('Create {}.app'.format(package_name))
+        cwd = join(self.buildozer.platform_dir, 'kivy-sdk-packager-master', 'osx')
+        # remove kivy from app_deps
+        app_deps = [a for a in app_deps.split('\n') if not a.startswith('#') and a not in ['kivy', '']]
+
+        cmd = [
+            'Kivy.app/Contents/Resources/script',
+             '-m', 'pip', 'install',
+             ]
+        cmd.extend(app_deps)
+        check_output(cmd, cwd=cwd)
+
+        cmd = [
+            'python', 'package_app.py', self.buildozer.app_dir,
+            '--appname={}'.format(package_name),
+             '--bundlename={}'.format(title),
+             '--bundleid={}'.format(domain),
+             '--bundleversion={}'.format(version),
+             '--displayname={}'.format(title)
+             ]
+        if icon:
+            cmd.append('--icon={}'.format(icon))
+        if author:
+            cmd.append('--author={}'.format(author))
+
+        check_output(cmd, cwd=cwd)
+
+        self.buildozer.info('{}.app created.'.format(package_name))
+        self.buildozer.info('Creating {}.dmg'.format(package_name))
+        check_output(
+            ('sh', '-x', 'create-osx-dmg.sh', package_name + '.app'),
+            cwd=cwd)
+        self.buildozer.info('{}.dmg created'.format(package_name))
+        self.buildozer.info('moving {}.dmg to bin.'.format(package_name))
+        binpath = join(
+            self.buildozer.user_build_dir or
+            dirname(abspath(self.buildozer.specfilename)), 'bin')
+        check_output(
+            ('cp', '-a', package_name + '.dmg', binpath),
+            cwd=cwd)
+        self.buildozer.info('All Done!')
+
+    def compile_platform(self):
+        pass
+
+    def install_platform(self):
+        # ultimate configuration check.
+        # some of our configuration cannot be check without platform.
+        self.check_configuration_tokens()
+        #
+        self.buildozer.environ.update({
+            'PACKAGES_PATH': self.buildozer.global_packages_dir,
+        })
+
+    def get_custom_commands(self):
+        result = []
+        for x in dir(self):
+            if not x.startswith('cmd_'):
+                continue
+            if x[4:] in self.buildozer.standard_cmds:
+                continue
+            result.append((x[4:], getattr(self, x).__doc__))
+        return result
+
+    def get_available_packages(self):
+        return ['kivy']
+
+    def run_commands(self, args):
+        if not args:
+            self.buildozer.error('Missing target command')
+            self.buildozer.usage()
+            sys.exit(1)
+
+        result = []
+        last_command = []
+        for arg in args:
+            if not arg.startswith('--'):
+                if last_command:
+                    result.append(last_command)
+                    last_command = []
+                last_command.append(arg)
+            else:
+                if not last_command:
+                    self.buildozer.error('Argument passed without a command')
+                    self.buildozer.usage()
+                    sys.exit(1)
+                last_command.append(arg)
+        if last_command:
+            result.append(last_command)
+
+        config_check = False
+
+        for item in result:
+            command, args = item[0], item[1:]
+            if not hasattr(self, 'cmd_{0}'.format(command)):
+                self.buildozer.error('Unknown command {0}'.format(command))
+                sys.exit(1)
+
+            func = getattr(self, 'cmd_{0}'.format(command))
+
+            need_config_check = not hasattr(func, '__no_config')
+            if need_config_check and not config_check:
+                config_check = True
+                self.check_configuration_tokens()
+
+            func(args)
+
+    def check_build_prepared(self):
+        self._build_prepared = False
+
+    def cmd_clean(self, *args):
+        self.buildozer.clean_platform()
+
+    def cmd_update(self, *args):
+        self.platform_update = True
+        self.buildozer.prepare_for_build()
+
+    def cmd_debug(self, *args):
+        self.buildozer.prepare_for_build()
+        self.build_mode = 'debug'
+        self.check_build_prepared()
+        self.buildozer.build()
+
+    def cmd_release(self, *args):
+        self.buildozer.prepare_for_build()
+        self.build_mode = 'release'
+        self.buildozer.build()
+
+    def cmd_deploy(self, *args):
+        self.buildozer.prepare_for_build()
+
+    def cmd_run(self, *args):
+        self.buildozer.prepare_for_build()
+
+    def cmd_serve(self, *args):
+        self.buildozer.cmd_serve()
+
+
+def get_target(buildozer):
+    return TargetOSX(buildozer)
diff --git a/venv/lib/python3.8/site-packages/colorama-0.4.6.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/colorama-0.4.6.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/colorama-0.4.6.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/lib/python3.8/site-packages/colorama-0.4.6.dist-info/METADATA b/venv/lib/python3.8/site-packages/colorama-0.4.6.dist-info/METADATA
new file mode 100644
index 0000000..a1b5c57
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/colorama-0.4.6.dist-info/METADATA
@@ -0,0 +1,441 @@
+Metadata-Version: 2.1
+Name: colorama
+Version: 0.4.6
+Summary: Cross-platform colored terminal text.
+Project-URL: Homepage, https://github.com/tartley/colorama
+Author-email: Jonathan Hartley 
+License-File: LICENSE.txt
+Keywords: ansi,color,colour,crossplatform,terminal,text,windows,xplatform
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Terminals
+Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7
+Description-Content-Type: text/x-rst
+
+.. image:: https://img.shields.io/pypi/v/colorama.svg
+    :target: https://pypi.org/project/colorama/
+    :alt: Latest Version
+
+.. image:: https://img.shields.io/pypi/pyversions/colorama.svg
+    :target: https://pypi.org/project/colorama/
+    :alt: Supported Python versions
+
+.. image:: https://github.com/tartley/colorama/actions/workflows/test.yml/badge.svg
+    :target: https://github.com/tartley/colorama/actions/workflows/test.yml
+    :alt: Build Status
+
+Colorama
+========
+
+Makes ANSI escape character sequences (for producing colored terminal text and
+cursor positioning) work under MS Windows.
+
+.. |donate| image:: https://www.paypalobjects.com/en_US/i/btn/btn_donate_SM.gif
+  :target: https://www.paypal.com/cgi-bin/webscr?cmd=_donations&business=2MZ9D2GMLYCUJ&item_name=Colorama¤cy_code=USD
+  :alt: Donate with Paypal
+
+`PyPI for releases `_ |
+`Github for source `_ |
+`Colorama for enterprise on Tidelift `_
+
+If you find Colorama useful, please |donate| to the authors. Thank you!
+
+Installation
+------------
+
+Tested on CPython 2.7, 3.7, 3.8, 3.9 and 3.10 and Pypy 2.7 and 3.8.
+
+No requirements other than the standard library.
+
+.. code-block:: bash
+
+    pip install colorama
+    # or
+    conda install -c anaconda colorama
+
+Description
+-----------
+
+ANSI escape character sequences have long been used to produce colored terminal
+text and cursor positioning on Unix and Macs. Colorama makes this work on
+Windows, too, by wrapping ``stdout``, stripping ANSI sequences it finds (which
+would appear as gobbledygook in the output), and converting them into the
+appropriate win32 calls to modify the state of the terminal. On other platforms,
+Colorama does nothing.
+
+This has the upshot of providing a simple cross-platform API for printing
+colored terminal text from Python, and has the happy side-effect that existing
+applications or libraries which use ANSI sequences to produce colored output on
+Linux or Macs can now also work on Windows, simply by calling
+``colorama.just_fix_windows_console()`` (since v0.4.6) or ``colorama.init()``
+(all versions, but may have other side-effects – see below).
+
+An alternative approach is to install ``ansi.sys`` on Windows machines, which
+provides the same behaviour for all applications running in terminals. Colorama
+is intended for situations where that isn't easy (e.g., maybe your app doesn't
+have an installer.)
+
+Demo scripts in the source code repository print some colored text using
+ANSI sequences. Compare their output under Gnome-terminal's built in ANSI
+handling, versus on Windows Command-Prompt using Colorama:
+
+.. image:: https://github.com/tartley/colorama/raw/master/screenshots/ubuntu-demo.png
+    :width: 661
+    :height: 357
+    :alt: ANSI sequences on Ubuntu under gnome-terminal.
+
+.. image:: https://github.com/tartley/colorama/raw/master/screenshots/windows-demo.png
+    :width: 668
+    :height: 325
+    :alt: Same ANSI sequences on Windows, using Colorama.
+
+These screenshots show that, on Windows, Colorama does not support ANSI 'dim
+text'; it looks the same as 'normal text'.
+
+Usage
+-----
+
+Initialisation
+..............
+
+If the only thing you want from Colorama is to get ANSI escapes to work on
+Windows, then run:
+
+.. code-block:: python
+
+    from colorama import just_fix_windows_console
+    just_fix_windows_console()
+
+If you're on a recent version of Windows 10 or better, and your stdout/stderr
+are pointing to a Windows console, then this will flip the magic configuration
+switch to enable Windows' built-in ANSI support.
+
+If you're on an older version of Windows, and your stdout/stderr are pointing to
+a Windows console, then this will wrap ``sys.stdout`` and/or ``sys.stderr`` in a
+magic file object that intercepts ANSI escape sequences and issues the
+appropriate Win32 calls to emulate them.
+
+In all other circumstances, it does nothing whatsoever. Basically the idea is
+that this makes Windows act like Unix with respect to ANSI escape handling.
+
+It's safe to call this function multiple times. It's safe to call this function
+on non-Windows platforms, but it won't do anything. It's safe to call this
+function when one or both of your stdout/stderr are redirected to a file – it
+won't do anything to those streams.
+
+Alternatively, you can use the older interface with more features (but also more
+potential footguns):
+
+.. code-block:: python
+
+    from colorama import init
+    init()
+
+This does the same thing as ``just_fix_windows_console``, except for the
+following differences:
+
+- It's not safe to call ``init`` multiple times; you can end up with multiple
+  layers of wrapping and broken ANSI support.
+
+- Colorama will apply a heuristic to guess whether stdout/stderr support ANSI,
+  and if it thinks they don't, then it will wrap ``sys.stdout`` and
+  ``sys.stderr`` in a magic file object that strips out ANSI escape sequences
+  before printing them. This happens on all platforms, and can be convenient if
+  you want to write your code to emit ANSI escape sequences unconditionally, and
+  let Colorama decide whether they should actually be output. But note that
+  Colorama's heuristic is not particularly clever.
+
+- ``init`` also accepts explicit keyword args to enable/disable various
+  functionality – see below.
+
+To stop using Colorama before your program exits, simply call ``deinit()``.
+This will restore ``stdout`` and ``stderr`` to their original values, so that
+Colorama is disabled. To resume using Colorama again, call ``reinit()``; it is
+cheaper than calling ``init()`` again (but does the same thing).
+
+Most users should depend on ``colorama >= 0.4.6``, and use
+``just_fix_windows_console``. The old ``init`` interface will be supported
+indefinitely for backwards compatibility, but we don't plan to fix any issues
+with it, also for backwards compatibility.
+
+Colored Output
+..............
+
+Cross-platform printing of colored text can then be done using Colorama's
+constant shorthand for ANSI escape sequences. These are deliberately
+rudimentary, see below.
+
+.. code-block:: python
+
+    from colorama import Fore, Back, Style
+    print(Fore.RED + 'some red text')
+    print(Back.GREEN + 'and with a green background')
+    print(Style.DIM + 'and in dim text')
+    print(Style.RESET_ALL)
+    print('back to normal now')
+
+...or simply by manually printing ANSI sequences from your own code:
+
+.. code-block:: python
+
+    print('\033[31m' + 'some red text')
+    print('\033[39m') # and reset to default color
+
+...or, Colorama can be used in conjunction with existing ANSI libraries
+such as the venerable `Termcolor `_
+the fabulous `Blessings `_,
+or the incredible `_Rich `_.
+
+If you wish Colorama's Fore, Back and Style constants were more capable,
+then consider using one of the above highly capable libraries to generate
+colors, etc, and use Colorama just for its primary purpose: to convert
+those ANSI sequences to also work on Windows:
+
+SIMILARLY, do not send PRs adding the generation of new ANSI types to Colorama.
+We are only interested in converting ANSI codes to win32 API calls, not
+shortcuts like the above to generate ANSI characters.
+
+.. code-block:: python
+
+    from colorama import just_fix_windows_console
+    from termcolor import colored
+
+    # use Colorama to make Termcolor work on Windows too
+    just_fix_windows_console()
+
+    # then use Termcolor for all colored text output
+    print(colored('Hello, World!', 'green', 'on_red'))
+
+Available formatting constants are::
+
+    Fore: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, RESET.
+    Back: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, RESET.
+    Style: DIM, NORMAL, BRIGHT, RESET_ALL
+
+``Style.RESET_ALL`` resets foreground, background, and brightness. Colorama will
+perform this reset automatically on program exit.
+
+These are fairly well supported, but not part of the standard::
+
+    Fore: LIGHTBLACK_EX, LIGHTRED_EX, LIGHTGREEN_EX, LIGHTYELLOW_EX, LIGHTBLUE_EX, LIGHTMAGENTA_EX, LIGHTCYAN_EX, LIGHTWHITE_EX
+    Back: LIGHTBLACK_EX, LIGHTRED_EX, LIGHTGREEN_EX, LIGHTYELLOW_EX, LIGHTBLUE_EX, LIGHTMAGENTA_EX, LIGHTCYAN_EX, LIGHTWHITE_EX
+
+Cursor Positioning
+..................
+
+ANSI codes to reposition the cursor are supported. See ``demos/demo06.py`` for
+an example of how to generate them.
+
+Init Keyword Args
+.................
+
+``init()`` accepts some ``**kwargs`` to override default behaviour.
+
+init(autoreset=False):
+    If you find yourself repeatedly sending reset sequences to turn off color
+    changes at the end of every print, then ``init(autoreset=True)`` will
+    automate that:
+
+    .. code-block:: python
+
+        from colorama import init
+        init(autoreset=True)
+        print(Fore.RED + 'some red text')
+        print('automatically back to default color again')
+
+init(strip=None):
+    Pass ``True`` or ``False`` to override whether ANSI codes should be
+    stripped from the output. The default behaviour is to strip if on Windows
+    or if output is redirected (not a tty).
+
+init(convert=None):
+    Pass ``True`` or ``False`` to override whether to convert ANSI codes in the
+    output into win32 calls. The default behaviour is to convert if on Windows
+    and output is to a tty (terminal).
+
+init(wrap=True):
+    On Windows, Colorama works by replacing ``sys.stdout`` and ``sys.stderr``
+    with proxy objects, which override the ``.write()`` method to do their work.
+    If this wrapping causes you problems, then this can be disabled by passing
+    ``init(wrap=False)``. The default behaviour is to wrap if ``autoreset`` or
+    ``strip`` or ``convert`` are True.
+
+    When wrapping is disabled, colored printing on non-Windows platforms will
+    continue to work as normal. To do cross-platform colored output, you can
+    use Colorama's ``AnsiToWin32`` proxy directly:
+
+    .. code-block:: python
+
+        import sys
+        from colorama import init, AnsiToWin32
+        init(wrap=False)
+        stream = AnsiToWin32(sys.stderr).stream
+
+        # Python 2
+        print >>stream, Fore.BLUE + 'blue text on stderr'
+
+        # Python 3
+        print(Fore.BLUE + 'blue text on stderr', file=stream)
+
+Recognised ANSI Sequences
+.........................
+
+ANSI sequences generally take the form::
+
+    ESC [  ;  ... 
+
+Where ```` is an integer, and ```` is a single letter. Zero or
+more params are passed to a ````. If no params are passed, it is
+generally synonymous with passing a single zero. No spaces exist in the
+sequence; they have been inserted here simply to read more easily.
+
+The only ANSI sequences that Colorama converts into win32 calls are::
+
+    ESC [ 0 m       # reset all (colors and brightness)
+    ESC [ 1 m       # bright
+    ESC [ 2 m       # dim (looks same as normal brightness)
+    ESC [ 22 m      # normal brightness
+
+    # FOREGROUND:
+    ESC [ 30 m      # black
+    ESC [ 31 m      # red
+    ESC [ 32 m      # green
+    ESC [ 33 m      # yellow
+    ESC [ 34 m      # blue
+    ESC [ 35 m      # magenta
+    ESC [ 36 m      # cyan
+    ESC [ 37 m      # white
+    ESC [ 39 m      # reset
+
+    # BACKGROUND
+    ESC [ 40 m      # black
+    ESC [ 41 m      # red
+    ESC [ 42 m      # green
+    ESC [ 43 m      # yellow
+    ESC [ 44 m      # blue
+    ESC [ 45 m      # magenta
+    ESC [ 46 m      # cyan
+    ESC [ 47 m      # white
+    ESC [ 49 m      # reset
+
+    # cursor positioning
+    ESC [ y;x H     # position cursor at x across, y down
+    ESC [ y;x f     # position cursor at x across, y down
+    ESC [ n A       # move cursor n lines up
+    ESC [ n B       # move cursor n lines down
+    ESC [ n C       # move cursor n characters forward
+    ESC [ n D       # move cursor n characters backward
+
+    # clear the screen
+    ESC [ mode J    # clear the screen
+
+    # clear the line
+    ESC [ mode K    # clear the line
+
+Multiple numeric params to the ``'m'`` command can be combined into a single
+sequence::
+
+    ESC [ 36 ; 45 ; 1 m     # bright cyan text on magenta background
+
+All other ANSI sequences of the form ``ESC [  ;  ... ``
+are silently stripped from the output on Windows.
+
+Any other form of ANSI sequence, such as single-character codes or alternative
+initial characters, are not recognised or stripped. It would be cool to add
+them though. Let me know if it would be useful for you, via the Issues on
+GitHub.
+
+Status & Known Problems
+-----------------------
+
+I've personally only tested it on Windows XP (CMD, Console2), Ubuntu
+(gnome-terminal, xterm), and OS X.
+
+Some valid ANSI sequences aren't recognised.
+
+If you're hacking on the code, see `README-hacking.md`_. ESPECIALLY, see the
+explanation there of why we do not want PRs that allow Colorama to generate new
+types of ANSI codes.
+
+See outstanding issues and wish-list:
+https://github.com/tartley/colorama/issues
+
+If anything doesn't work for you, or doesn't do what you expected or hoped for,
+I'd love to hear about it on that issues list, would be delighted by patches,
+and would be happy to grant commit access to anyone who submits a working patch
+or two.
+
+.. _README-hacking.md: README-hacking.md
+
+License
+-------
+
+Copyright Jonathan Hartley & Arnon Yaari, 2013-2020. BSD 3-Clause license; see
+LICENSE file.
+
+Professional support
+--------------------
+
+.. |tideliftlogo| image:: https://cdn2.hubspot.net/hubfs/4008838/website/logos/logos_for_download/Tidelift_primary-shorthand-logo.png
+   :alt: Tidelift
+   :target: https://tidelift.com/subscription/pkg/pypi-colorama?utm_source=pypi-colorama&utm_medium=referral&utm_campaign=readme
+
+.. list-table::
+   :widths: 10 100
+
+   * - |tideliftlogo|
+     - Professional support for colorama is available as part of the
+       `Tidelift Subscription`_.
+       Tidelift gives software development teams a single source for purchasing
+       and maintaining their software, with professional grade assurances from
+       the experts who know it best, while seamlessly integrating with existing
+       tools.
+
+.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-colorama?utm_source=pypi-colorama&utm_medium=referral&utm_campaign=readme
+
+Thanks
+------
+
+See the CHANGELOG for more thanks!
+
+* Marc Schlaich (schlamar) for a ``setup.py`` fix for Python2.5.
+* Marc Abramowitz, reported & fixed a crash on exit with closed ``stdout``,
+  providing a solution to issue #7's setuptools/distutils debate,
+  and other fixes.
+* User 'eryksun', for guidance on correctly instantiating ``ctypes.windll``.
+* Matthew McCormick for politely pointing out a longstanding crash on non-Win.
+* Ben Hoyt, for a magnificent fix under 64-bit Windows.
+* Jesse at Empty Square for submitting a fix for examples in the README.
+* User 'jamessp', an observant documentation fix for cursor positioning.
+* User 'vaal1239', Dave Mckee & Lackner Kristof for a tiny but much-needed Win7
+  fix.
+* Julien Stuyck, for wisely suggesting Python3 compatible updates to README.
+* Daniel Griffith for multiple fabulous patches.
+* Oscar Lesta for a valuable fix to stop ANSI chars being sent to non-tty
+  output.
+* Roger Binns, for many suggestions, valuable feedback, & bug reports.
+* Tim Golden for thought and much appreciated feedback on the initial idea.
+* User 'Zearin' for updates to the README file.
+* John Szakmeister for adding support for light colors
+* Charles Merriam for adding documentation to demos
+* Jurko for a fix on 64-bit Windows CPython2.5 w/o ctypes
+* Florian Bruhin for a fix when stdout or stderr are None
+* Thomas Weininger for fixing ValueError on Windows
+* Remi Rampin for better Github integration and fixes to the README file
+* Simeon Visser for closing a file handle using 'with' and updating classifiers
+  to include Python 3.3 and 3.4
+* Andy Neff for fixing RESET of LIGHT_EX colors.
+* Jonathan Hartley for the initial idea and implementation.
diff --git a/venv/lib/python3.8/site-packages/colorama-0.4.6.dist-info/RECORD b/venv/lib/python3.8/site-packages/colorama-0.4.6.dist-info/RECORD
new file mode 100644
index 0000000..2a41455
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/colorama-0.4.6.dist-info/RECORD
@@ -0,0 +1,32 @@
+colorama-0.4.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+colorama-0.4.6.dist-info/METADATA,sha256=e67SnrUMOym9sz_4TjF3vxvAV4T3aF7NyqRHHH3YEMw,17158
+colorama-0.4.6.dist-info/RECORD,,
+colorama-0.4.6.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+colorama-0.4.6.dist-info/WHEEL,sha256=cdcF4Fbd0FPtw2EMIOwH-3rSOTUdTCeOSXRMD1iLUb8,105
+colorama-0.4.6.dist-info/licenses/LICENSE.txt,sha256=ysNcAmhuXQSlpxQL-zs25zrtSWZW6JEQLkKIhteTAxg,1491
+colorama/__init__.py,sha256=wePQA4U20tKgYARySLEC047ucNX-g8pRLpYBuiHlLb8,266
+colorama/__pycache__/__init__.cpython-38.pyc,,
+colorama/__pycache__/ansi.cpython-38.pyc,,
+colorama/__pycache__/ansitowin32.cpython-38.pyc,,
+colorama/__pycache__/initialise.cpython-38.pyc,,
+colorama/__pycache__/win32.cpython-38.pyc,,
+colorama/__pycache__/winterm.cpython-38.pyc,,
+colorama/ansi.py,sha256=Top4EeEuaQdBWdteKMEcGOTeKeF19Q-Wo_6_Cj5kOzQ,2522
+colorama/ansitowin32.py,sha256=vPNYa3OZbxjbuFyaVo0Tmhmy1FZ1lKMWCnT7odXpItk,11128
+colorama/initialise.py,sha256=-hIny86ClXo39ixh5iSCfUIa2f_h_bgKRDW7gqs-KLU,3325
+colorama/tests/__init__.py,sha256=MkgPAEzGQd-Rq0w0PZXSX2LadRWhUECcisJY8lSrm4Q,75
+colorama/tests/__pycache__/__init__.cpython-38.pyc,,
+colorama/tests/__pycache__/ansi_test.cpython-38.pyc,,
+colorama/tests/__pycache__/ansitowin32_test.cpython-38.pyc,,
+colorama/tests/__pycache__/initialise_test.cpython-38.pyc,,
+colorama/tests/__pycache__/isatty_test.cpython-38.pyc,,
+colorama/tests/__pycache__/utils.cpython-38.pyc,,
+colorama/tests/__pycache__/winterm_test.cpython-38.pyc,,
+colorama/tests/ansi_test.py,sha256=FeViDrUINIZcr505PAxvU4AjXz1asEiALs9GXMhwRaE,2839
+colorama/tests/ansitowin32_test.py,sha256=RN7AIhMJ5EqDsYaCjVo-o4u8JzDD4ukJbmevWKS70rY,10678
+colorama/tests/initialise_test.py,sha256=BbPy-XfyHwJ6zKozuQOvNvQZzsx9vdb_0bYXn7hsBTc,6741
+colorama/tests/isatty_test.py,sha256=Pg26LRpv0yQDB5Ac-sxgVXG7hsA1NYvapFgApZfYzZg,1866
+colorama/tests/utils.py,sha256=1IIRylG39z5-dzq09R_ngufxyPZxgldNbrxKxUGwGKE,1079
+colorama/tests/winterm_test.py,sha256=qoWFPEjym5gm2RuMwpf3pOis3a5r_PJZFCzK254JL8A,3709
+colorama/win32.py,sha256=YQOKwMTwtGBbsY4dL5HYTvwTeP9wIQra5MvPNddpxZs,6181
+colorama/winterm.py,sha256=XCQFDHjPi6AHYNdZwy0tA02H-Jh48Jp-HvCjeLeLp3U,7134
diff --git a/venv/lib/python3.8/site-packages/colorama-0.4.6.dist-info/REQUESTED b/venv/lib/python3.8/site-packages/colorama-0.4.6.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/venv/lib/python3.8/site-packages/colorama-0.4.6.dist-info/WHEEL b/venv/lib/python3.8/site-packages/colorama-0.4.6.dist-info/WHEEL
new file mode 100644
index 0000000..d79189f
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/colorama-0.4.6.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: hatchling 1.11.1
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
diff --git a/venv/lib/python3.8/site-packages/colorama-0.4.6.dist-info/licenses/LICENSE.txt b/venv/lib/python3.8/site-packages/colorama-0.4.6.dist-info/licenses/LICENSE.txt
new file mode 100644
index 0000000..3105888
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/colorama-0.4.6.dist-info/licenses/LICENSE.txt
@@ -0,0 +1,27 @@
+Copyright (c) 2010 Jonathan Hartley
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+  list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+  this list of conditions and the following disclaimer in the documentation
+  and/or other materials provided with the distribution.
+
+* Neither the name of the copyright holders, nor those of its contributors
+  may be used to endorse or promote products derived from this software without
+  specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/venv/lib/python3.8/site-packages/colorama/__init__.py b/venv/lib/python3.8/site-packages/colorama/__init__.py
new file mode 100644
index 0000000..383101c
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/colorama/__init__.py
@@ -0,0 +1,7 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+from .initialise import init, deinit, reinit, colorama_text, just_fix_windows_console
+from .ansi import Fore, Back, Style, Cursor
+from .ansitowin32 import AnsiToWin32
+
+__version__ = '0.4.6'
+
diff --git a/venv/lib/python3.8/site-packages/colorama/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/colorama/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000..bccac6b
Binary files /dev/null and b/venv/lib/python3.8/site-packages/colorama/__pycache__/__init__.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/colorama/__pycache__/ansi.cpython-38.pyc b/venv/lib/python3.8/site-packages/colorama/__pycache__/ansi.cpython-38.pyc
new file mode 100644
index 0000000..8d68ae3
Binary files /dev/null and b/venv/lib/python3.8/site-packages/colorama/__pycache__/ansi.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/colorama/__pycache__/ansitowin32.cpython-38.pyc b/venv/lib/python3.8/site-packages/colorama/__pycache__/ansitowin32.cpython-38.pyc
new file mode 100644
index 0000000..13ae4a5
Binary files /dev/null and b/venv/lib/python3.8/site-packages/colorama/__pycache__/ansitowin32.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/colorama/__pycache__/initialise.cpython-38.pyc b/venv/lib/python3.8/site-packages/colorama/__pycache__/initialise.cpython-38.pyc
new file mode 100644
index 0000000..1df189d
Binary files /dev/null and b/venv/lib/python3.8/site-packages/colorama/__pycache__/initialise.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/colorama/__pycache__/win32.cpython-38.pyc b/venv/lib/python3.8/site-packages/colorama/__pycache__/win32.cpython-38.pyc
new file mode 100644
index 0000000..acbda3d
Binary files /dev/null and b/venv/lib/python3.8/site-packages/colorama/__pycache__/win32.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/colorama/__pycache__/winterm.cpython-38.pyc b/venv/lib/python3.8/site-packages/colorama/__pycache__/winterm.cpython-38.pyc
new file mode 100644
index 0000000..efe14c7
Binary files /dev/null and b/venv/lib/python3.8/site-packages/colorama/__pycache__/winterm.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/colorama/ansi.py b/venv/lib/python3.8/site-packages/colorama/ansi.py
new file mode 100644
index 0000000..11ec695
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/colorama/ansi.py
@@ -0,0 +1,102 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+'''
+This module generates ANSI character codes to printing colors to terminals.
+See: http://en.wikipedia.org/wiki/ANSI_escape_code
+'''
+
+CSI = '\033['
+OSC = '\033]'
+BEL = '\a'
+
+
+def code_to_chars(code):
+    return CSI + str(code) + 'm'
+
+def set_title(title):
+    return OSC + '2;' + title + BEL
+
+def clear_screen(mode=2):
+    return CSI + str(mode) + 'J'
+
+def clear_line(mode=2):
+    return CSI + str(mode) + 'K'
+
+
+class AnsiCodes(object):
+    def __init__(self):
+        # the subclasses declare class attributes which are numbers.
+        # Upon instantiation we define instance attributes, which are the same
+        # as the class attributes but wrapped with the ANSI escape sequence
+        for name in dir(self):
+            if not name.startswith('_'):
+                value = getattr(self, name)
+                setattr(self, name, code_to_chars(value))
+
+
+class AnsiCursor(object):
+    def UP(self, n=1):
+        return CSI + str(n) + 'A'
+    def DOWN(self, n=1):
+        return CSI + str(n) + 'B'
+    def FORWARD(self, n=1):
+        return CSI + str(n) + 'C'
+    def BACK(self, n=1):
+        return CSI + str(n) + 'D'
+    def POS(self, x=1, y=1):
+        return CSI + str(y) + ';' + str(x) + 'H'
+
+
+class AnsiFore(AnsiCodes):
+    BLACK           = 30
+    RED             = 31
+    GREEN           = 32
+    YELLOW          = 33
+    BLUE            = 34
+    MAGENTA         = 35
+    CYAN            = 36
+    WHITE           = 37
+    RESET           = 39
+
+    # These are fairly well supported, but not part of the standard.
+    LIGHTBLACK_EX   = 90
+    LIGHTRED_EX     = 91
+    LIGHTGREEN_EX   = 92
+    LIGHTYELLOW_EX  = 93
+    LIGHTBLUE_EX    = 94
+    LIGHTMAGENTA_EX = 95
+    LIGHTCYAN_EX    = 96
+    LIGHTWHITE_EX   = 97
+
+
+class AnsiBack(AnsiCodes):
+    BLACK           = 40
+    RED             = 41
+    GREEN           = 42
+    YELLOW          = 43
+    BLUE            = 44
+    MAGENTA         = 45
+    CYAN            = 46
+    WHITE           = 47
+    RESET           = 49
+
+    # These are fairly well supported, but not part of the standard.
+    LIGHTBLACK_EX   = 100
+    LIGHTRED_EX     = 101
+    LIGHTGREEN_EX   = 102
+    LIGHTYELLOW_EX  = 103
+    LIGHTBLUE_EX    = 104
+    LIGHTMAGENTA_EX = 105
+    LIGHTCYAN_EX    = 106
+    LIGHTWHITE_EX   = 107
+
+
+class AnsiStyle(AnsiCodes):
+    BRIGHT    = 1
+    DIM       = 2
+    NORMAL    = 22
+    RESET_ALL = 0
+
+Fore   = AnsiFore()
+Back   = AnsiBack()
+Style  = AnsiStyle()
+Cursor = AnsiCursor()
diff --git a/venv/lib/python3.8/site-packages/colorama/ansitowin32.py b/venv/lib/python3.8/site-packages/colorama/ansitowin32.py
new file mode 100644
index 0000000..abf209e
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/colorama/ansitowin32.py
@@ -0,0 +1,277 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+import re
+import sys
+import os
+
+from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style, BEL
+from .winterm import enable_vt_processing, WinTerm, WinColor, WinStyle
+from .win32 import windll, winapi_test
+
+
+winterm = None
+if windll is not None:
+    winterm = WinTerm()
+
+
+class StreamWrapper(object):
+    '''
+    Wraps a stream (such as stdout), acting as a transparent proxy for all
+    attribute access apart from method 'write()', which is delegated to our
+    Converter instance.
+    '''
+    def __init__(self, wrapped, converter):
+        # double-underscore everything to prevent clashes with names of
+        # attributes on the wrapped stream object.
+        self.__wrapped = wrapped
+        self.__convertor = converter
+
+    def __getattr__(self, name):
+        return getattr(self.__wrapped, name)
+
+    def __enter__(self, *args, **kwargs):
+        # special method lookup bypasses __getattr__/__getattribute__, see
+        # https://stackoverflow.com/questions/12632894/why-doesnt-getattr-work-with-exit
+        # thus, contextlib magic methods are not proxied via __getattr__
+        return self.__wrapped.__enter__(*args, **kwargs)
+
+    def __exit__(self, *args, **kwargs):
+        return self.__wrapped.__exit__(*args, **kwargs)
+
+    def __setstate__(self, state):
+        self.__dict__ = state
+
+    def __getstate__(self):
+        return self.__dict__
+
+    def write(self, text):
+        self.__convertor.write(text)
+
+    def isatty(self):
+        stream = self.__wrapped
+        if 'PYCHARM_HOSTED' in os.environ:
+            if stream is not None and (stream is sys.__stdout__ or stream is sys.__stderr__):
+                return True
+        try:
+            stream_isatty = stream.isatty
+        except AttributeError:
+            return False
+        else:
+            return stream_isatty()
+
+    @property
+    def closed(self):
+        stream = self.__wrapped
+        try:
+            return stream.closed
+        # AttributeError in the case that the stream doesn't support being closed
+        # ValueError for the case that the stream has already been detached when atexit runs
+        except (AttributeError, ValueError):
+            return True
+
+
+class AnsiToWin32(object):
+    '''
+    Implements a 'write()' method which, on Windows, will strip ANSI character
+    sequences from the text, and if outputting to a tty, will convert them into
+    win32 function calls.
+    '''
+    ANSI_CSI_RE = re.compile('\001?\033\\[((?:\\d|;)*)([a-zA-Z])\002?')   # Control Sequence Introducer
+    ANSI_OSC_RE = re.compile('\001?\033\\]([^\a]*)(\a)\002?')             # Operating System Command
+
+    def __init__(self, wrapped, convert=None, strip=None, autoreset=False):
+        # The wrapped stream (normally sys.stdout or sys.stderr)
+        self.wrapped = wrapped
+
+        # should we reset colors to defaults after every .write()
+        self.autoreset = autoreset
+
+        # create the proxy wrapping our output stream
+        self.stream = StreamWrapper(wrapped, self)
+
+        on_windows = os.name == 'nt'
+        # We test if the WinAPI works, because even if we are on Windows
+        # we may be using a terminal that doesn't support the WinAPI
+        # (e.g. Cygwin Terminal). In this case it's up to the terminal
+        # to support the ANSI codes.
+        conversion_supported = on_windows and winapi_test()
+        try:
+            fd = wrapped.fileno()
+        except Exception:
+            fd = -1
+        system_has_native_ansi = not on_windows or enable_vt_processing(fd)
+        have_tty = not self.stream.closed and self.stream.isatty()
+        need_conversion = conversion_supported and not system_has_native_ansi
+
+        # should we strip ANSI sequences from our output?
+        if strip is None:
+            strip = need_conversion or not have_tty
+        self.strip = strip
+
+        # should we should convert ANSI sequences into win32 calls?
+        if convert is None:
+            convert = need_conversion and have_tty
+        self.convert = convert
+
+        # dict of ansi codes to win32 functions and parameters
+        self.win32_calls = self.get_win32_calls()
+
+        # are we wrapping stderr?
+        self.on_stderr = self.wrapped is sys.stderr
+
+    def should_wrap(self):
+        '''
+        True if this class is actually needed. If false, then the output
+        stream will not be affected, nor will win32 calls be issued, so
+        wrapping stdout is not actually required. This will generally be
+        False on non-Windows platforms, unless optional functionality like
+        autoreset has been requested using kwargs to init()
+        '''
+        return self.convert or self.strip or self.autoreset
+
+    def get_win32_calls(self):
+        if self.convert and winterm:
+            return {
+                AnsiStyle.RESET_ALL: (winterm.reset_all, ),
+                AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT),
+                AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL),
+                AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL),
+                AnsiFore.BLACK: (winterm.fore, WinColor.BLACK),
+                AnsiFore.RED: (winterm.fore, WinColor.RED),
+                AnsiFore.GREEN: (winterm.fore, WinColor.GREEN),
+                AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW),
+                AnsiFore.BLUE: (winterm.fore, WinColor.BLUE),
+                AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA),
+                AnsiFore.CYAN: (winterm.fore, WinColor.CYAN),
+                AnsiFore.WHITE: (winterm.fore, WinColor.GREY),
+                AnsiFore.RESET: (winterm.fore, ),
+                AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True),
+                AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True),
+                AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True),
+                AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True),
+                AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True),
+                AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True),
+                AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True),
+                AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True),
+                AnsiBack.BLACK: (winterm.back, WinColor.BLACK),
+                AnsiBack.RED: (winterm.back, WinColor.RED),
+                AnsiBack.GREEN: (winterm.back, WinColor.GREEN),
+                AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW),
+                AnsiBack.BLUE: (winterm.back, WinColor.BLUE),
+                AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA),
+                AnsiBack.CYAN: (winterm.back, WinColor.CYAN),
+                AnsiBack.WHITE: (winterm.back, WinColor.GREY),
+                AnsiBack.RESET: (winterm.back, ),
+                AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True),
+                AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True),
+                AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True),
+                AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True),
+                AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True),
+                AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True),
+                AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True),
+                AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True),
+            }
+        return dict()
+
+    def write(self, text):
+        if self.strip or self.convert:
+            self.write_and_convert(text)
+        else:
+            self.wrapped.write(text)
+            self.wrapped.flush()
+        if self.autoreset:
+            self.reset_all()
+
+
+    def reset_all(self):
+        if self.convert:
+            self.call_win32('m', (0,))
+        elif not self.strip and not self.stream.closed:
+            self.wrapped.write(Style.RESET_ALL)
+
+
+    def write_and_convert(self, text):
+        '''
+        Write the given text to our wrapped stream, stripping any ANSI
+        sequences from the text, and optionally converting them into win32
+        calls.
+        '''
+        cursor = 0
+        text = self.convert_osc(text)
+        for match in self.ANSI_CSI_RE.finditer(text):
+            start, end = match.span()
+            self.write_plain_text(text, cursor, start)
+            self.convert_ansi(*match.groups())
+            cursor = end
+        self.write_plain_text(text, cursor, len(text))
+
+
+    def write_plain_text(self, text, start, end):
+        if start < end:
+            self.wrapped.write(text[start:end])
+            self.wrapped.flush()
+
+
+    def convert_ansi(self, paramstring, command):
+        if self.convert:
+            params = self.extract_params(command, paramstring)
+            self.call_win32(command, params)
+
+
+    def extract_params(self, command, paramstring):
+        if command in 'Hf':
+            params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';'))
+            while len(params) < 2:
+                # defaults:
+                params = params + (1,)
+        else:
+            params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0)
+            if len(params) == 0:
+                # defaults:
+                if command in 'JKm':
+                    params = (0,)
+                elif command in 'ABCD':
+                    params = (1,)
+
+        return params
+
+
+    def call_win32(self, command, params):
+        if command == 'm':
+            for param in params:
+                if param in self.win32_calls:
+                    func_args = self.win32_calls[param]
+                    func = func_args[0]
+                    args = func_args[1:]
+                    kwargs = dict(on_stderr=self.on_stderr)
+                    func(*args, **kwargs)
+        elif command in 'J':
+            winterm.erase_screen(params[0], on_stderr=self.on_stderr)
+        elif command in 'K':
+            winterm.erase_line(params[0], on_stderr=self.on_stderr)
+        elif command in 'Hf':     # cursor position - absolute
+            winterm.set_cursor_position(params, on_stderr=self.on_stderr)
+        elif command in 'ABCD':   # cursor position - relative
+            n = params[0]
+            # A - up, B - down, C - forward, D - back
+            x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command]
+            winterm.cursor_adjust(x, y, on_stderr=self.on_stderr)
+
+
+    def convert_osc(self, text):
+        for match in self.ANSI_OSC_RE.finditer(text):
+            start, end = match.span()
+            text = text[:start] + text[end:]
+            paramstring, command = match.groups()
+            if command == BEL:
+                if paramstring.count(";") == 1:
+                    params = paramstring.split(";")
+                    # 0 - change title and icon (we will only change title)
+                    # 1 - change icon (we don't support this)
+                    # 2 - change title
+                    if params[0] in '02':
+                        winterm.set_title(params[1])
+        return text
+
+
+    def flush(self):
+        self.wrapped.flush()
diff --git a/venv/lib/python3.8/site-packages/colorama/initialise.py b/venv/lib/python3.8/site-packages/colorama/initialise.py
new file mode 100644
index 0000000..d5fd4b7
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/colorama/initialise.py
@@ -0,0 +1,121 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+import atexit
+import contextlib
+import sys
+
+from .ansitowin32 import AnsiToWin32
+
+
+def _wipe_internal_state_for_tests():
+    global orig_stdout, orig_stderr
+    orig_stdout = None
+    orig_stderr = None
+
+    global wrapped_stdout, wrapped_stderr
+    wrapped_stdout = None
+    wrapped_stderr = None
+
+    global atexit_done
+    atexit_done = False
+
+    global fixed_windows_console
+    fixed_windows_console = False
+
+    try:
+        # no-op if it wasn't registered
+        atexit.unregister(reset_all)
+    except AttributeError:
+        # python 2: no atexit.unregister. Oh well, we did our best.
+        pass
+
+
+def reset_all():
+    if AnsiToWin32 is not None:    # Issue #74: objects might become None at exit
+        AnsiToWin32(orig_stdout).reset_all()
+
+
+def init(autoreset=False, convert=None, strip=None, wrap=True):
+
+    if not wrap and any([autoreset, convert, strip]):
+        raise ValueError('wrap=False conflicts with any other arg=True')
+
+    global wrapped_stdout, wrapped_stderr
+    global orig_stdout, orig_stderr
+
+    orig_stdout = sys.stdout
+    orig_stderr = sys.stderr
+
+    if sys.stdout is None:
+        wrapped_stdout = None
+    else:
+        sys.stdout = wrapped_stdout = \
+            wrap_stream(orig_stdout, convert, strip, autoreset, wrap)
+    if sys.stderr is None:
+        wrapped_stderr = None
+    else:
+        sys.stderr = wrapped_stderr = \
+            wrap_stream(orig_stderr, convert, strip, autoreset, wrap)
+
+    global atexit_done
+    if not atexit_done:
+        atexit.register(reset_all)
+        atexit_done = True
+
+
+def deinit():
+    if orig_stdout is not None:
+        sys.stdout = orig_stdout
+    if orig_stderr is not None:
+        sys.stderr = orig_stderr
+
+
+def just_fix_windows_console():
+    global fixed_windows_console
+
+    if sys.platform != "win32":
+        return
+    if fixed_windows_console:
+        return
+    if wrapped_stdout is not None or wrapped_stderr is not None:
+        # Someone already ran init() and it did stuff, so we won't second-guess them
+        return
+
+    # On newer versions of Windows, AnsiToWin32.__init__ will implicitly enable the
+    # native ANSI support in the console as a side-effect. We only need to actually
+    # replace sys.stdout/stderr if we're in the old-style conversion mode.
+    new_stdout = AnsiToWin32(sys.stdout, convert=None, strip=None, autoreset=False)
+    if new_stdout.convert:
+        sys.stdout = new_stdout
+    new_stderr = AnsiToWin32(sys.stderr, convert=None, strip=None, autoreset=False)
+    if new_stderr.convert:
+        sys.stderr = new_stderr
+
+    fixed_windows_console = True
+
+@contextlib.contextmanager
+def colorama_text(*args, **kwargs):
+    init(*args, **kwargs)
+    try:
+        yield
+    finally:
+        deinit()
+
+
+def reinit():
+    if wrapped_stdout is not None:
+        sys.stdout = wrapped_stdout
+    if wrapped_stderr is not None:
+        sys.stderr = wrapped_stderr
+
+
+def wrap_stream(stream, convert, strip, autoreset, wrap):
+    if wrap:
+        wrapper = AnsiToWin32(stream,
+            convert=convert, strip=strip, autoreset=autoreset)
+        if wrapper.should_wrap():
+            stream = wrapper.stream
+    return stream
+
+
+# Use this for initial setup as well, to reduce code duplication
+_wipe_internal_state_for_tests()
diff --git a/venv/lib/python3.8/site-packages/colorama/tests/__init__.py b/venv/lib/python3.8/site-packages/colorama/tests/__init__.py
new file mode 100644
index 0000000..8c5661e
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/colorama/tests/__init__.py
@@ -0,0 +1 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
diff --git a/venv/lib/python3.8/site-packages/colorama/tests/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/colorama/tests/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000..502d46d
Binary files /dev/null and b/venv/lib/python3.8/site-packages/colorama/tests/__pycache__/__init__.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/colorama/tests/__pycache__/ansi_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/colorama/tests/__pycache__/ansi_test.cpython-38.pyc
new file mode 100644
index 0000000..97b35e1
Binary files /dev/null and b/venv/lib/python3.8/site-packages/colorama/tests/__pycache__/ansi_test.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/colorama/tests/__pycache__/ansitowin32_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/colorama/tests/__pycache__/ansitowin32_test.cpython-38.pyc
new file mode 100644
index 0000000..2cc8478
Binary files /dev/null and b/venv/lib/python3.8/site-packages/colorama/tests/__pycache__/ansitowin32_test.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/colorama/tests/__pycache__/initialise_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/colorama/tests/__pycache__/initialise_test.cpython-38.pyc
new file mode 100644
index 0000000..7c9eb17
Binary files /dev/null and b/venv/lib/python3.8/site-packages/colorama/tests/__pycache__/initialise_test.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/colorama/tests/__pycache__/isatty_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/colorama/tests/__pycache__/isatty_test.cpython-38.pyc
new file mode 100644
index 0000000..d412410
Binary files /dev/null and b/venv/lib/python3.8/site-packages/colorama/tests/__pycache__/isatty_test.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/colorama/tests/__pycache__/utils.cpython-38.pyc b/venv/lib/python3.8/site-packages/colorama/tests/__pycache__/utils.cpython-38.pyc
new file mode 100644
index 0000000..f858f5d
Binary files /dev/null and b/venv/lib/python3.8/site-packages/colorama/tests/__pycache__/utils.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/colorama/tests/__pycache__/winterm_test.cpython-38.pyc b/venv/lib/python3.8/site-packages/colorama/tests/__pycache__/winterm_test.cpython-38.pyc
new file mode 100644
index 0000000..00056b6
Binary files /dev/null and b/venv/lib/python3.8/site-packages/colorama/tests/__pycache__/winterm_test.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/colorama/tests/ansi_test.py b/venv/lib/python3.8/site-packages/colorama/tests/ansi_test.py
new file mode 100644
index 0000000..0a20c80
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/colorama/tests/ansi_test.py
@@ -0,0 +1,76 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+import sys
+from unittest import TestCase, main
+
+from ..ansi import Back, Fore, Style
+from ..ansitowin32 import AnsiToWin32
+
+stdout_orig = sys.stdout
+stderr_orig = sys.stderr
+
+
+class AnsiTest(TestCase):
+
+    def setUp(self):
+        # sanity check: stdout should be a file or StringIO object.
+        # It will only be AnsiToWin32 if init() has previously wrapped it
+        self.assertNotEqual(type(sys.stdout), AnsiToWin32)
+        self.assertNotEqual(type(sys.stderr), AnsiToWin32)
+
+    def tearDown(self):
+        sys.stdout = stdout_orig
+        sys.stderr = stderr_orig
+
+
+    def testForeAttributes(self):
+        self.assertEqual(Fore.BLACK, '\033[30m')
+        self.assertEqual(Fore.RED, '\033[31m')
+        self.assertEqual(Fore.GREEN, '\033[32m')
+        self.assertEqual(Fore.YELLOW, '\033[33m')
+        self.assertEqual(Fore.BLUE, '\033[34m')
+        self.assertEqual(Fore.MAGENTA, '\033[35m')
+        self.assertEqual(Fore.CYAN, '\033[36m')
+        self.assertEqual(Fore.WHITE, '\033[37m')
+        self.assertEqual(Fore.RESET, '\033[39m')
+
+        # Check the light, extended versions.
+        self.assertEqual(Fore.LIGHTBLACK_EX, '\033[90m')
+        self.assertEqual(Fore.LIGHTRED_EX, '\033[91m')
+        self.assertEqual(Fore.LIGHTGREEN_EX, '\033[92m')
+        self.assertEqual(Fore.LIGHTYELLOW_EX, '\033[93m')
+        self.assertEqual(Fore.LIGHTBLUE_EX, '\033[94m')
+        self.assertEqual(Fore.LIGHTMAGENTA_EX, '\033[95m')
+        self.assertEqual(Fore.LIGHTCYAN_EX, '\033[96m')
+        self.assertEqual(Fore.LIGHTWHITE_EX, '\033[97m')
+
+
+    def testBackAttributes(self):
+        self.assertEqual(Back.BLACK, '\033[40m')
+        self.assertEqual(Back.RED, '\033[41m')
+        self.assertEqual(Back.GREEN, '\033[42m')
+        self.assertEqual(Back.YELLOW, '\033[43m')
+        self.assertEqual(Back.BLUE, '\033[44m')
+        self.assertEqual(Back.MAGENTA, '\033[45m')
+        self.assertEqual(Back.CYAN, '\033[46m')
+        self.assertEqual(Back.WHITE, '\033[47m')
+        self.assertEqual(Back.RESET, '\033[49m')
+
+        # Check the light, extended versions.
+        self.assertEqual(Back.LIGHTBLACK_EX, '\033[100m')
+        self.assertEqual(Back.LIGHTRED_EX, '\033[101m')
+        self.assertEqual(Back.LIGHTGREEN_EX, '\033[102m')
+        self.assertEqual(Back.LIGHTYELLOW_EX, '\033[103m')
+        self.assertEqual(Back.LIGHTBLUE_EX, '\033[104m')
+        self.assertEqual(Back.LIGHTMAGENTA_EX, '\033[105m')
+        self.assertEqual(Back.LIGHTCYAN_EX, '\033[106m')
+        self.assertEqual(Back.LIGHTWHITE_EX, '\033[107m')
+
+
+    def testStyleAttributes(self):
+        self.assertEqual(Style.DIM, '\033[2m')
+        self.assertEqual(Style.NORMAL, '\033[22m')
+        self.assertEqual(Style.BRIGHT, '\033[1m')
+
+
+if __name__ == '__main__':
+    main()
diff --git a/venv/lib/python3.8/site-packages/colorama/tests/ansitowin32_test.py b/venv/lib/python3.8/site-packages/colorama/tests/ansitowin32_test.py
new file mode 100644
index 0000000..91ca551
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/colorama/tests/ansitowin32_test.py
@@ -0,0 +1,294 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+from io import StringIO, TextIOWrapper
+from unittest import TestCase, main
+try:
+    from contextlib import ExitStack
+except ImportError:
+    # python 2
+    from contextlib2 import ExitStack
+
+try:
+    from unittest.mock import MagicMock, Mock, patch
+except ImportError:
+    from mock import MagicMock, Mock, patch
+
+from ..ansitowin32 import AnsiToWin32, StreamWrapper
+from ..win32 import ENABLE_VIRTUAL_TERMINAL_PROCESSING
+from .utils import osname
+
+
+class StreamWrapperTest(TestCase):
+
+    def testIsAProxy(self):
+        mockStream = Mock()
+        wrapper = StreamWrapper(mockStream, None)
+        self.assertTrue( wrapper.random_attr is mockStream.random_attr )
+
+    def testDelegatesWrite(self):
+        mockStream = Mock()
+        mockConverter = Mock()
+        wrapper = StreamWrapper(mockStream, mockConverter)
+        wrapper.write('hello')
+        self.assertTrue(mockConverter.write.call_args, (('hello',), {}))
+
+    def testDelegatesContext(self):
+        mockConverter = Mock()
+        s = StringIO()
+        with StreamWrapper(s, mockConverter) as fp:
+            fp.write(u'hello')
+        self.assertTrue(s.closed)
+
+    def testProxyNoContextManager(self):
+        mockStream = MagicMock()
+        mockStream.__enter__.side_effect = AttributeError()
+        mockConverter = Mock()
+        with self.assertRaises(AttributeError) as excinfo:
+            with StreamWrapper(mockStream, mockConverter) as wrapper:
+                wrapper.write('hello')
+
+    def test_closed_shouldnt_raise_on_closed_stream(self):
+        stream = StringIO()
+        stream.close()
+        wrapper = StreamWrapper(stream, None)
+        self.assertEqual(wrapper.closed, True)
+
+    def test_closed_shouldnt_raise_on_detached_stream(self):
+        stream = TextIOWrapper(StringIO())
+        stream.detach()
+        wrapper = StreamWrapper(stream, None)
+        self.assertEqual(wrapper.closed, True)
+
+class AnsiToWin32Test(TestCase):
+
+    def testInit(self):
+        mockStdout = Mock()
+        auto = Mock()
+        stream = AnsiToWin32(mockStdout, autoreset=auto)
+        self.assertEqual(stream.wrapped, mockStdout)
+        self.assertEqual(stream.autoreset, auto)
+
+    @patch('colorama.ansitowin32.winterm', None)
+    @patch('colorama.ansitowin32.winapi_test', lambda *_: True)
+    def testStripIsTrueOnWindows(self):
+        with osname('nt'):
+            mockStdout = Mock()
+            stream = AnsiToWin32(mockStdout)
+            self.assertTrue(stream.strip)
+
+    def testStripIsFalseOffWindows(self):
+        with osname('posix'):
+            mockStdout = Mock(closed=False)
+            stream = AnsiToWin32(mockStdout)
+            self.assertFalse(stream.strip)
+
+    def testWriteStripsAnsi(self):
+        mockStdout = Mock()
+        stream = AnsiToWin32(mockStdout)
+        stream.wrapped = Mock()
+        stream.write_and_convert = Mock()
+        stream.strip = True
+
+        stream.write('abc')
+
+        self.assertFalse(stream.wrapped.write.called)
+        self.assertEqual(stream.write_and_convert.call_args, (('abc',), {}))
+
+    def testWriteDoesNotStripAnsi(self):
+        mockStdout = Mock()
+        stream = AnsiToWin32(mockStdout)
+        stream.wrapped = Mock()
+        stream.write_and_convert = Mock()
+        stream.strip = False
+        stream.convert = False
+
+        stream.write('abc')
+
+        self.assertFalse(stream.write_and_convert.called)
+        self.assertEqual(stream.wrapped.write.call_args, (('abc',), {}))
+
+    def assert_autoresets(self, convert, autoreset=True):
+        stream = AnsiToWin32(Mock())
+        stream.convert = convert
+        stream.reset_all = Mock()
+        stream.autoreset = autoreset
+        stream.winterm = Mock()
+
+        stream.write('abc')
+
+        self.assertEqual(stream.reset_all.called, autoreset)
+
+    def testWriteAutoresets(self):
+        self.assert_autoresets(convert=True)
+        self.assert_autoresets(convert=False)
+        self.assert_autoresets(convert=True, autoreset=False)
+        self.assert_autoresets(convert=False, autoreset=False)
+
+    def testWriteAndConvertWritesPlainText(self):
+        stream = AnsiToWin32(Mock())
+        stream.write_and_convert( 'abc' )
+        self.assertEqual( stream.wrapped.write.call_args, (('abc',), {}) )
+
+    def testWriteAndConvertStripsAllValidAnsi(self):
+        stream = AnsiToWin32(Mock())
+        stream.call_win32 = Mock()
+        data = [
+            'abc\033[mdef',
+            'abc\033[0mdef',
+            'abc\033[2mdef',
+            'abc\033[02mdef',
+            'abc\033[002mdef',
+            'abc\033[40mdef',
+            'abc\033[040mdef',
+            'abc\033[0;1mdef',
+            'abc\033[40;50mdef',
+            'abc\033[50;30;40mdef',
+            'abc\033[Adef',
+            'abc\033[0Gdef',
+            'abc\033[1;20;128Hdef',
+        ]
+        for datum in data:
+            stream.wrapped.write.reset_mock()
+            stream.write_and_convert( datum )
+            self.assertEqual(
+               [args[0] for args in stream.wrapped.write.call_args_list],
+               [ ('abc',), ('def',) ]
+            )
+
+    def testWriteAndConvertSkipsEmptySnippets(self):
+        stream = AnsiToWin32(Mock())
+        stream.call_win32 = Mock()
+        stream.write_and_convert( '\033[40m\033[41m' )
+        self.assertFalse( stream.wrapped.write.called )
+
+    def testWriteAndConvertCallsWin32WithParamsAndCommand(self):
+        stream = AnsiToWin32(Mock())
+        stream.convert = True
+        stream.call_win32 = Mock()
+        stream.extract_params = Mock(return_value='params')
+        data = {
+            'abc\033[adef':         ('a', 'params'),
+            'abc\033[;;bdef':       ('b', 'params'),
+            'abc\033[0cdef':        ('c', 'params'),
+            'abc\033[;;0;;Gdef':    ('G', 'params'),
+            'abc\033[1;20;128Hdef': ('H', 'params'),
+        }
+        for datum, expected in data.items():
+            stream.call_win32.reset_mock()
+            stream.write_and_convert( datum )
+            self.assertEqual( stream.call_win32.call_args[0], expected )
+
+    def test_reset_all_shouldnt_raise_on_closed_orig_stdout(self):
+        stream = StringIO()
+        converter = AnsiToWin32(stream)
+        stream.close()
+
+        converter.reset_all()
+
+    def test_wrap_shouldnt_raise_on_closed_orig_stdout(self):
+        stream = StringIO()
+        stream.close()
+        with \
+            patch("colorama.ansitowin32.os.name", "nt"), \
+            patch("colorama.ansitowin32.winapi_test", lambda: True):
+                converter = AnsiToWin32(stream)
+        self.assertTrue(converter.strip)
+        self.assertFalse(converter.convert)
+
+    def test_wrap_shouldnt_raise_on_missing_closed_attr(self):
+        with \
+            patch("colorama.ansitowin32.os.name", "nt"), \
+            patch("colorama.ansitowin32.winapi_test", lambda: True):
+                converter = AnsiToWin32(object())
+        self.assertTrue(converter.strip)
+        self.assertFalse(converter.convert)
+
+    def testExtractParams(self):
+        stream = AnsiToWin32(Mock())
+        data = {
+            '':               (0,),
+            ';;':             (0,),
+            '2':              (2,),
+            ';;002;;':        (2,),
+            '0;1':            (0, 1),
+            ';;003;;456;;':   (3, 456),
+            '11;22;33;44;55': (11, 22, 33, 44, 55),
+        }
+        for datum, expected in data.items():
+            self.assertEqual(stream.extract_params('m', datum), expected)
+
+    def testCallWin32UsesLookup(self):
+        listener = Mock()
+        stream = AnsiToWin32(listener)
+        stream.win32_calls = {
+            1: (lambda *_, **__: listener(11),),
+            2: (lambda *_, **__: listener(22),),
+            3: (lambda *_, **__: listener(33),),
+        }
+        stream.call_win32('m', (3, 1, 99, 2))
+        self.assertEqual(
+            [a[0][0] for a in listener.call_args_list],
+            [33, 11, 22] )
+
+    def test_osc_codes(self):
+        mockStdout = Mock()
+        stream = AnsiToWin32(mockStdout, convert=True)
+        with patch('colorama.ansitowin32.winterm') as winterm:
+            data = [
+                '\033]0\x07',                      # missing arguments
+                '\033]0;foo\x08',                  # wrong OSC command
+                '\033]0;colorama_test_title\x07',  # should work
+                '\033]1;colorama_test_title\x07',  # wrong set command
+                '\033]2;colorama_test_title\x07',  # should work
+                '\033]' + ';' * 64 + '\x08',       # see issue #247
+            ]
+            for code in data:
+                stream.write(code)
+            self.assertEqual(winterm.set_title.call_count, 2)
+
+    def test_native_windows_ansi(self):
+        with ExitStack() as stack:
+            def p(a, b):
+                stack.enter_context(patch(a, b, create=True))
+            # Pretend to be on Windows
+            p("colorama.ansitowin32.os.name", "nt")
+            p("colorama.ansitowin32.winapi_test", lambda: True)
+            p("colorama.win32.winapi_test", lambda: True)
+            p("colorama.winterm.win32.windll", "non-None")
+            p("colorama.winterm.get_osfhandle", lambda _: 1234)
+
+            # Pretend that our mock stream has native ANSI support
+            p(
+                "colorama.winterm.win32.GetConsoleMode",
+                lambda _: ENABLE_VIRTUAL_TERMINAL_PROCESSING,
+            )
+            SetConsoleMode = Mock()
+            p("colorama.winterm.win32.SetConsoleMode", SetConsoleMode)
+
+            stdout = Mock()
+            stdout.closed = False
+            stdout.isatty.return_value = True
+            stdout.fileno.return_value = 1
+
+            # Our fake console says it has native vt support, so AnsiToWin32 should
+            # enable that support and do nothing else.
+            stream = AnsiToWin32(stdout)
+            SetConsoleMode.assert_called_with(1234, ENABLE_VIRTUAL_TERMINAL_PROCESSING)
+            self.assertFalse(stream.strip)
+            self.assertFalse(stream.convert)
+            self.assertFalse(stream.should_wrap())
+
+            # Now let's pretend we're on an old Windows console, that doesn't have
+            # native ANSI support.
+            p("colorama.winterm.win32.GetConsoleMode", lambda _: 0)
+            SetConsoleMode = Mock()
+            p("colorama.winterm.win32.SetConsoleMode", SetConsoleMode)
+
+            stream = AnsiToWin32(stdout)
+            SetConsoleMode.assert_called_with(1234, ENABLE_VIRTUAL_TERMINAL_PROCESSING)
+            self.assertTrue(stream.strip)
+            self.assertTrue(stream.convert)
+            self.assertTrue(stream.should_wrap())
+
+
+if __name__ == '__main__':
+    main()
diff --git a/venv/lib/python3.8/site-packages/colorama/tests/initialise_test.py b/venv/lib/python3.8/site-packages/colorama/tests/initialise_test.py
new file mode 100644
index 0000000..89f9b07
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/colorama/tests/initialise_test.py
@@ -0,0 +1,189 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+import sys
+from unittest import TestCase, main, skipUnless
+
+try:
+    from unittest.mock import patch, Mock
+except ImportError:
+    from mock import patch, Mock
+
+from ..ansitowin32 import StreamWrapper
+from ..initialise import init, just_fix_windows_console, _wipe_internal_state_for_tests
+from .utils import osname, replace_by
+
+orig_stdout = sys.stdout
+orig_stderr = sys.stderr
+
+
+class InitTest(TestCase):
+
+    @skipUnless(sys.stdout.isatty(), "sys.stdout is not a tty")
+    def setUp(self):
+        # sanity check
+        self.assertNotWrapped()
+
+    def tearDown(self):
+        _wipe_internal_state_for_tests()
+        sys.stdout = orig_stdout
+        sys.stderr = orig_stderr
+
+    def assertWrapped(self):
+        self.assertIsNot(sys.stdout, orig_stdout, 'stdout should be wrapped')
+        self.assertIsNot(sys.stderr, orig_stderr, 'stderr should be wrapped')
+        self.assertTrue(isinstance(sys.stdout, StreamWrapper),
+            'bad stdout wrapper')
+        self.assertTrue(isinstance(sys.stderr, StreamWrapper),
+            'bad stderr wrapper')
+
+    def assertNotWrapped(self):
+        self.assertIs(sys.stdout, orig_stdout, 'stdout should not be wrapped')
+        self.assertIs(sys.stderr, orig_stderr, 'stderr should not be wrapped')
+
+    @patch('colorama.initialise.reset_all')
+    @patch('colorama.ansitowin32.winapi_test', lambda *_: True)
+    @patch('colorama.ansitowin32.enable_vt_processing', lambda *_: False)
+    def testInitWrapsOnWindows(self, _):
+        with osname("nt"):
+            init()
+            self.assertWrapped()
+
+    @patch('colorama.initialise.reset_all')
+    @patch('colorama.ansitowin32.winapi_test', lambda *_: False)
+    def testInitDoesntWrapOnEmulatedWindows(self, _):
+        with osname("nt"):
+            init()
+            self.assertNotWrapped()
+
+    def testInitDoesntWrapOnNonWindows(self):
+        with osname("posix"):
+            init()
+            self.assertNotWrapped()
+
+    def testInitDoesntWrapIfNone(self):
+        with replace_by(None):
+            init()
+            # We can't use assertNotWrapped here because replace_by(None)
+            # changes stdout/stderr already.
+            self.assertIsNone(sys.stdout)
+            self.assertIsNone(sys.stderr)
+
+    def testInitAutoresetOnWrapsOnAllPlatforms(self):
+        with osname("posix"):
+            init(autoreset=True)
+            self.assertWrapped()
+
+    def testInitWrapOffDoesntWrapOnWindows(self):
+        with osname("nt"):
+            init(wrap=False)
+            self.assertNotWrapped()
+
+    def testInitWrapOffIncompatibleWithAutoresetOn(self):
+        self.assertRaises(ValueError, lambda: init(autoreset=True, wrap=False))
+
+    @patch('colorama.win32.SetConsoleTextAttribute')
+    @patch('colorama.initialise.AnsiToWin32')
+    def testAutoResetPassedOn(self, mockATW32, _):
+        with osname("nt"):
+            init(autoreset=True)
+            self.assertEqual(len(mockATW32.call_args_list), 2)
+            self.assertEqual(mockATW32.call_args_list[1][1]['autoreset'], True)
+            self.assertEqual(mockATW32.call_args_list[0][1]['autoreset'], True)
+
+    @patch('colorama.initialise.AnsiToWin32')
+    def testAutoResetChangeable(self, mockATW32):
+        with osname("nt"):
+            init()
+
+            init(autoreset=True)
+            self.assertEqual(len(mockATW32.call_args_list), 4)
+            self.assertEqual(mockATW32.call_args_list[2][1]['autoreset'], True)
+            self.assertEqual(mockATW32.call_args_list[3][1]['autoreset'], True)
+
+            init()
+            self.assertEqual(len(mockATW32.call_args_list), 6)
+            self.assertEqual(
+                mockATW32.call_args_list[4][1]['autoreset'], False)
+            self.assertEqual(
+                mockATW32.call_args_list[5][1]['autoreset'], False)
+
+
+    @patch('colorama.initialise.atexit.register')
+    def testAtexitRegisteredOnlyOnce(self, mockRegister):
+        init()
+        self.assertTrue(mockRegister.called)
+        mockRegister.reset_mock()
+        init()
+        self.assertFalse(mockRegister.called)
+
+
+class JustFixWindowsConsoleTest(TestCase):
+    def _reset(self):
+        _wipe_internal_state_for_tests()
+        sys.stdout = orig_stdout
+        sys.stderr = orig_stderr
+
+    def tearDown(self):
+        self._reset()
+
+    @patch("colorama.ansitowin32.winapi_test", lambda: True)
+    def testJustFixWindowsConsole(self):
+        if sys.platform != "win32":
+            # just_fix_windows_console should be a no-op
+            just_fix_windows_console()
+            self.assertIs(sys.stdout, orig_stdout)
+            self.assertIs(sys.stderr, orig_stderr)
+        else:
+            def fake_std():
+                # Emulate stdout=not a tty, stderr=tty
+                # to check that we handle both cases correctly
+                stdout = Mock()
+                stdout.closed = False
+                stdout.isatty.return_value = False
+                stdout.fileno.return_value = 1
+                sys.stdout = stdout
+
+                stderr = Mock()
+                stderr.closed = False
+                stderr.isatty.return_value = True
+                stderr.fileno.return_value = 2
+                sys.stderr = stderr
+
+            for native_ansi in [False, True]:
+                with patch(
+                    'colorama.ansitowin32.enable_vt_processing',
+                    lambda *_: native_ansi
+                ):
+                    self._reset()
+                    fake_std()
+
+                    # Regular single-call test
+                    prev_stdout = sys.stdout
+                    prev_stderr = sys.stderr
+                    just_fix_windows_console()
+                    self.assertIs(sys.stdout, prev_stdout)
+                    if native_ansi:
+                        self.assertIs(sys.stderr, prev_stderr)
+                    else:
+                        self.assertIsNot(sys.stderr, prev_stderr)
+
+                    # second call without resetting is always a no-op
+                    prev_stdout = sys.stdout
+                    prev_stderr = sys.stderr
+                    just_fix_windows_console()
+                    self.assertIs(sys.stdout, prev_stdout)
+                    self.assertIs(sys.stderr, prev_stderr)
+
+                    self._reset()
+                    fake_std()
+
+                    # If init() runs first, just_fix_windows_console should be a no-op
+                    init()
+                    prev_stdout = sys.stdout
+                    prev_stderr = sys.stderr
+                    just_fix_windows_console()
+                    self.assertIs(prev_stdout, sys.stdout)
+                    self.assertIs(prev_stderr, sys.stderr)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/venv/lib/python3.8/site-packages/colorama/tests/isatty_test.py b/venv/lib/python3.8/site-packages/colorama/tests/isatty_test.py
new file mode 100644
index 0000000..0f84e4b
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/colorama/tests/isatty_test.py
@@ -0,0 +1,57 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+import sys
+from unittest import TestCase, main
+
+from ..ansitowin32 import StreamWrapper, AnsiToWin32
+from .utils import pycharm, replace_by, replace_original_by, StreamTTY, StreamNonTTY
+
+
+def is_a_tty(stream):
+    return StreamWrapper(stream, None).isatty()
+
+class IsattyTest(TestCase):
+
+    def test_TTY(self):
+        tty = StreamTTY()
+        self.assertTrue(is_a_tty(tty))
+        with pycharm():
+            self.assertTrue(is_a_tty(tty))
+
+    def test_nonTTY(self):
+        non_tty = StreamNonTTY()
+        self.assertFalse(is_a_tty(non_tty))
+        with pycharm():
+            self.assertFalse(is_a_tty(non_tty))
+
+    def test_withPycharm(self):
+        with pycharm():
+            self.assertTrue(is_a_tty(sys.stderr))
+            self.assertTrue(is_a_tty(sys.stdout))
+
+    def test_withPycharmTTYOverride(self):
+        tty = StreamTTY()
+        with pycharm(), replace_by(tty):
+            self.assertTrue(is_a_tty(tty))
+
+    def test_withPycharmNonTTYOverride(self):
+        non_tty = StreamNonTTY()
+        with pycharm(), replace_by(non_tty):
+            self.assertFalse(is_a_tty(non_tty))
+
+    def test_withPycharmNoneOverride(self):
+        with pycharm():
+            with replace_by(None), replace_original_by(None):
+                self.assertFalse(is_a_tty(None))
+                self.assertFalse(is_a_tty(StreamNonTTY()))
+                self.assertTrue(is_a_tty(StreamTTY()))
+
+    def test_withPycharmStreamWrapped(self):
+        with pycharm():
+            self.assertTrue(AnsiToWin32(StreamTTY()).stream.isatty())
+            self.assertFalse(AnsiToWin32(StreamNonTTY()).stream.isatty())
+            self.assertTrue(AnsiToWin32(sys.stdout).stream.isatty())
+            self.assertTrue(AnsiToWin32(sys.stderr).stream.isatty())
+
+
+if __name__ == '__main__':
+    main()
diff --git a/venv/lib/python3.8/site-packages/colorama/tests/utils.py b/venv/lib/python3.8/site-packages/colorama/tests/utils.py
new file mode 100644
index 0000000..472fafb
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/colorama/tests/utils.py
@@ -0,0 +1,49 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+from contextlib import contextmanager
+from io import StringIO
+import sys
+import os
+
+
+class StreamTTY(StringIO):
+    def isatty(self):
+        return True
+
+class StreamNonTTY(StringIO):
+    def isatty(self):
+        return False
+
+@contextmanager
+def osname(name):
+    orig = os.name
+    os.name = name
+    yield
+    os.name = orig
+
+@contextmanager
+def replace_by(stream):
+    orig_stdout = sys.stdout
+    orig_stderr = sys.stderr
+    sys.stdout = stream
+    sys.stderr = stream
+    yield
+    sys.stdout = orig_stdout
+    sys.stderr = orig_stderr
+
+@contextmanager
+def replace_original_by(stream):
+    orig_stdout = sys.__stdout__
+    orig_stderr = sys.__stderr__
+    sys.__stdout__ = stream
+    sys.__stderr__ = stream
+    yield
+    sys.__stdout__ = orig_stdout
+    sys.__stderr__ = orig_stderr
+
+@contextmanager
+def pycharm():
+    os.environ["PYCHARM_HOSTED"] = "1"
+    non_tty = StreamNonTTY()
+    with replace_by(non_tty), replace_original_by(non_tty):
+        yield
+    del os.environ["PYCHARM_HOSTED"]
diff --git a/venv/lib/python3.8/site-packages/colorama/tests/winterm_test.py b/venv/lib/python3.8/site-packages/colorama/tests/winterm_test.py
new file mode 100644
index 0000000..d0955f9
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/colorama/tests/winterm_test.py
@@ -0,0 +1,131 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+import sys
+from unittest import TestCase, main, skipUnless
+
+try:
+    from unittest.mock import Mock, patch
+except ImportError:
+    from mock import Mock, patch
+
+from ..winterm import WinColor, WinStyle, WinTerm
+
+
+class WinTermTest(TestCase):
+
+    @patch('colorama.winterm.win32')
+    def testInit(self, mockWin32):
+        mockAttr = Mock()
+        mockAttr.wAttributes = 7 + 6 * 16 + 8
+        mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr
+        term = WinTerm()
+        self.assertEqual(term._fore, 7)
+        self.assertEqual(term._back, 6)
+        self.assertEqual(term._style, 8)
+
+    @skipUnless(sys.platform.startswith("win"), "requires Windows")
+    def testGetAttrs(self):
+        term = WinTerm()
+
+        term._fore = 0
+        term._back = 0
+        term._style = 0
+        self.assertEqual(term.get_attrs(), 0)
+
+        term._fore = WinColor.YELLOW
+        self.assertEqual(term.get_attrs(), WinColor.YELLOW)
+
+        term._back = WinColor.MAGENTA
+        self.assertEqual(
+            term.get_attrs(),
+            WinColor.YELLOW + WinColor.MAGENTA * 16)
+
+        term._style = WinStyle.BRIGHT
+        self.assertEqual(
+            term.get_attrs(),
+            WinColor.YELLOW + WinColor.MAGENTA * 16 + WinStyle.BRIGHT)
+
+    @patch('colorama.winterm.win32')
+    def testResetAll(self, mockWin32):
+        mockAttr = Mock()
+        mockAttr.wAttributes = 1 + 2 * 16 + 8
+        mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr
+        term = WinTerm()
+
+        term.set_console = Mock()
+        term._fore = -1
+        term._back = -1
+        term._style = -1
+
+        term.reset_all()
+
+        self.assertEqual(term._fore, 1)
+        self.assertEqual(term._back, 2)
+        self.assertEqual(term._style, 8)
+        self.assertEqual(term.set_console.called, True)
+
+    @skipUnless(sys.platform.startswith("win"), "requires Windows")
+    def testFore(self):
+        term = WinTerm()
+        term.set_console = Mock()
+        term._fore = 0
+
+        term.fore(5)
+
+        self.assertEqual(term._fore, 5)
+        self.assertEqual(term.set_console.called, True)
+
+    @skipUnless(sys.platform.startswith("win"), "requires Windows")
+    def testBack(self):
+        term = WinTerm()
+        term.set_console = Mock()
+        term._back = 0
+
+        term.back(5)
+
+        self.assertEqual(term._back, 5)
+        self.assertEqual(term.set_console.called, True)
+
+    @skipUnless(sys.platform.startswith("win"), "requires Windows")
+    def testStyle(self):
+        term = WinTerm()
+        term.set_console = Mock()
+        term._style = 0
+
+        term.style(22)
+
+        self.assertEqual(term._style, 22)
+        self.assertEqual(term.set_console.called, True)
+
+    @patch('colorama.winterm.win32')
+    def testSetConsole(self, mockWin32):
+        mockAttr = Mock()
+        mockAttr.wAttributes = 0
+        mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr
+        term = WinTerm()
+        term.windll = Mock()
+
+        term.set_console()
+
+        self.assertEqual(
+            mockWin32.SetConsoleTextAttribute.call_args,
+            ((mockWin32.STDOUT, term.get_attrs()), {})
+        )
+
+    @patch('colorama.winterm.win32')
+    def testSetConsoleOnStderr(self, mockWin32):
+        mockAttr = Mock()
+        mockAttr.wAttributes = 0
+        mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr
+        term = WinTerm()
+        term.windll = Mock()
+
+        term.set_console(on_stderr=True)
+
+        self.assertEqual(
+            mockWin32.SetConsoleTextAttribute.call_args,
+            ((mockWin32.STDERR, term.get_attrs()), {})
+        )
+
+
+if __name__ == '__main__':
+    main()
diff --git a/venv/lib/python3.8/site-packages/colorama/win32.py b/venv/lib/python3.8/site-packages/colorama/win32.py
new file mode 100644
index 0000000..841b0e2
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/colorama/win32.py
@@ -0,0 +1,180 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+
+# from winbase.h
+STDOUT = -11
+STDERR = -12
+
+ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x0004
+
+try:
+    import ctypes
+    from ctypes import LibraryLoader
+    windll = LibraryLoader(ctypes.WinDLL)
+    from ctypes import wintypes
+except (AttributeError, ImportError):
+    windll = None
+    SetConsoleTextAttribute = lambda *_: None
+    winapi_test = lambda *_: None
+else:
+    from ctypes import byref, Structure, c_char, POINTER
+
+    COORD = wintypes._COORD
+
+    class CONSOLE_SCREEN_BUFFER_INFO(Structure):
+        """struct in wincon.h."""
+        _fields_ = [
+            ("dwSize", COORD),
+            ("dwCursorPosition", COORD),
+            ("wAttributes", wintypes.WORD),
+            ("srWindow", wintypes.SMALL_RECT),
+            ("dwMaximumWindowSize", COORD),
+        ]
+        def __str__(self):
+            return '(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)' % (
+                self.dwSize.Y, self.dwSize.X
+                , self.dwCursorPosition.Y, self.dwCursorPosition.X
+                , self.wAttributes
+                , self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right
+                , self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X
+            )
+
+    _GetStdHandle = windll.kernel32.GetStdHandle
+    _GetStdHandle.argtypes = [
+        wintypes.DWORD,
+    ]
+    _GetStdHandle.restype = wintypes.HANDLE
+
+    _GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo
+    _GetConsoleScreenBufferInfo.argtypes = [
+        wintypes.HANDLE,
+        POINTER(CONSOLE_SCREEN_BUFFER_INFO),
+    ]
+    _GetConsoleScreenBufferInfo.restype = wintypes.BOOL
+
+    _SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute
+    _SetConsoleTextAttribute.argtypes = [
+        wintypes.HANDLE,
+        wintypes.WORD,
+    ]
+    _SetConsoleTextAttribute.restype = wintypes.BOOL
+
+    _SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition
+    _SetConsoleCursorPosition.argtypes = [
+        wintypes.HANDLE,
+        COORD,
+    ]
+    _SetConsoleCursorPosition.restype = wintypes.BOOL
+
+    _FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA
+    _FillConsoleOutputCharacterA.argtypes = [
+        wintypes.HANDLE,
+        c_char,
+        wintypes.DWORD,
+        COORD,
+        POINTER(wintypes.DWORD),
+    ]
+    _FillConsoleOutputCharacterA.restype = wintypes.BOOL
+
+    _FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute
+    _FillConsoleOutputAttribute.argtypes = [
+        wintypes.HANDLE,
+        wintypes.WORD,
+        wintypes.DWORD,
+        COORD,
+        POINTER(wintypes.DWORD),
+    ]
+    _FillConsoleOutputAttribute.restype = wintypes.BOOL
+
+    _SetConsoleTitleW = windll.kernel32.SetConsoleTitleW
+    _SetConsoleTitleW.argtypes = [
+        wintypes.LPCWSTR
+    ]
+    _SetConsoleTitleW.restype = wintypes.BOOL
+
+    _GetConsoleMode = windll.kernel32.GetConsoleMode
+    _GetConsoleMode.argtypes = [
+        wintypes.HANDLE,
+        POINTER(wintypes.DWORD)
+    ]
+    _GetConsoleMode.restype = wintypes.BOOL
+
+    _SetConsoleMode = windll.kernel32.SetConsoleMode
+    _SetConsoleMode.argtypes = [
+        wintypes.HANDLE,
+        wintypes.DWORD
+    ]
+    _SetConsoleMode.restype = wintypes.BOOL
+
+    def _winapi_test(handle):
+        csbi = CONSOLE_SCREEN_BUFFER_INFO()
+        success = _GetConsoleScreenBufferInfo(
+            handle, byref(csbi))
+        return bool(success)
+
+    def winapi_test():
+        return any(_winapi_test(h) for h in
+                   (_GetStdHandle(STDOUT), _GetStdHandle(STDERR)))
+
+    def GetConsoleScreenBufferInfo(stream_id=STDOUT):
+        handle = _GetStdHandle(stream_id)
+        csbi = CONSOLE_SCREEN_BUFFER_INFO()
+        success = _GetConsoleScreenBufferInfo(
+            handle, byref(csbi))
+        return csbi
+
+    def SetConsoleTextAttribute(stream_id, attrs):
+        handle = _GetStdHandle(stream_id)
+        return _SetConsoleTextAttribute(handle, attrs)
+
+    def SetConsoleCursorPosition(stream_id, position, adjust=True):
+        position = COORD(*position)
+        # If the position is out of range, do nothing.
+        if position.Y <= 0 or position.X <= 0:
+            return
+        # Adjust for Windows' SetConsoleCursorPosition:
+        #    1. being 0-based, while ANSI is 1-based.
+        #    2. expecting (x,y), while ANSI uses (y,x).
+        adjusted_position = COORD(position.Y - 1, position.X - 1)
+        if adjust:
+            # Adjust for viewport's scroll position
+            sr = GetConsoleScreenBufferInfo(STDOUT).srWindow
+            adjusted_position.Y += sr.Top
+            adjusted_position.X += sr.Left
+        # Resume normal processing
+        handle = _GetStdHandle(stream_id)
+        return _SetConsoleCursorPosition(handle, adjusted_position)
+
+    def FillConsoleOutputCharacter(stream_id, char, length, start):
+        handle = _GetStdHandle(stream_id)
+        char = c_char(char.encode())
+        length = wintypes.DWORD(length)
+        num_written = wintypes.DWORD(0)
+        # Note that this is hard-coded for ANSI (vs wide) bytes.
+        success = _FillConsoleOutputCharacterA(
+            handle, char, length, start, byref(num_written))
+        return num_written.value
+
+    def FillConsoleOutputAttribute(stream_id, attr, length, start):
+        ''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )'''
+        handle = _GetStdHandle(stream_id)
+        attribute = wintypes.WORD(attr)
+        length = wintypes.DWORD(length)
+        num_written = wintypes.DWORD(0)
+        # Note that this is hard-coded for ANSI (vs wide) bytes.
+        return _FillConsoleOutputAttribute(
+            handle, attribute, length, start, byref(num_written))
+
+    def SetConsoleTitle(title):
+        return _SetConsoleTitleW(title)
+
+    def GetConsoleMode(handle):
+        mode = wintypes.DWORD()
+        success = _GetConsoleMode(handle, byref(mode))
+        if not success:
+            raise ctypes.WinError()
+        return mode.value
+
+    def SetConsoleMode(handle, mode):
+        success = _SetConsoleMode(handle, mode)
+        if not success:
+            raise ctypes.WinError()
diff --git a/venv/lib/python3.8/site-packages/colorama/winterm.py b/venv/lib/python3.8/site-packages/colorama/winterm.py
new file mode 100644
index 0000000..aad867e
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/colorama/winterm.py
@@ -0,0 +1,195 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+try:
+    from msvcrt import get_osfhandle
+except ImportError:
+    def get_osfhandle(_):
+        raise OSError("This isn't windows!")
+
+
+from . import win32
+
+# from wincon.h
+class WinColor(object):
+    BLACK   = 0
+    BLUE    = 1
+    GREEN   = 2
+    CYAN    = 3
+    RED     = 4
+    MAGENTA = 5
+    YELLOW  = 6
+    GREY    = 7
+
+# from wincon.h
+class WinStyle(object):
+    NORMAL              = 0x00 # dim text, dim background
+    BRIGHT              = 0x08 # bright text, dim background
+    BRIGHT_BACKGROUND   = 0x80 # dim text, bright background
+
+class WinTerm(object):
+
+    def __init__(self):
+        self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes
+        self.set_attrs(self._default)
+        self._default_fore = self._fore
+        self._default_back = self._back
+        self._default_style = self._style
+        # In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style.
+        # So that LIGHT_EX colors and BRIGHT style do not clobber each other,
+        # we track them separately, since LIGHT_EX is overwritten by Fore/Back
+        # and BRIGHT is overwritten by Style codes.
+        self._light = 0
+
+    def get_attrs(self):
+        return self._fore + self._back * 16 + (self._style | self._light)
+
+    def set_attrs(self, value):
+        self._fore = value & 7
+        self._back = (value >> 4) & 7
+        self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND)
+
+    def reset_all(self, on_stderr=None):
+        self.set_attrs(self._default)
+        self.set_console(attrs=self._default)
+        self._light = 0
+
+    def fore(self, fore=None, light=False, on_stderr=False):
+        if fore is None:
+            fore = self._default_fore
+        self._fore = fore
+        # Emulate LIGHT_EX with BRIGHT Style
+        if light:
+            self._light |= WinStyle.BRIGHT
+        else:
+            self._light &= ~WinStyle.BRIGHT
+        self.set_console(on_stderr=on_stderr)
+
+    def back(self, back=None, light=False, on_stderr=False):
+        if back is None:
+            back = self._default_back
+        self._back = back
+        # Emulate LIGHT_EX with BRIGHT_BACKGROUND Style
+        if light:
+            self._light |= WinStyle.BRIGHT_BACKGROUND
+        else:
+            self._light &= ~WinStyle.BRIGHT_BACKGROUND
+        self.set_console(on_stderr=on_stderr)
+
+    def style(self, style=None, on_stderr=False):
+        if style is None:
+            style = self._default_style
+        self._style = style
+        self.set_console(on_stderr=on_stderr)
+
+    def set_console(self, attrs=None, on_stderr=False):
+        if attrs is None:
+            attrs = self.get_attrs()
+        handle = win32.STDOUT
+        if on_stderr:
+            handle = win32.STDERR
+        win32.SetConsoleTextAttribute(handle, attrs)
+
+    def get_position(self, handle):
+        position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition
+        # Because Windows coordinates are 0-based,
+        # and win32.SetConsoleCursorPosition expects 1-based.
+        position.X += 1
+        position.Y += 1
+        return position
+
+    def set_cursor_position(self, position=None, on_stderr=False):
+        if position is None:
+            # I'm not currently tracking the position, so there is no default.
+            # position = self.get_position()
+            return
+        handle = win32.STDOUT
+        if on_stderr:
+            handle = win32.STDERR
+        win32.SetConsoleCursorPosition(handle, position)
+
+    def cursor_adjust(self, x, y, on_stderr=False):
+        handle = win32.STDOUT
+        if on_stderr:
+            handle = win32.STDERR
+        position = self.get_position(handle)
+        adjusted_position = (position.Y + y, position.X + x)
+        win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False)
+
+    def erase_screen(self, mode=0, on_stderr=False):
+        # 0 should clear from the cursor to the end of the screen.
+        # 1 should clear from the cursor to the beginning of the screen.
+        # 2 should clear the entire screen, and move cursor to (1,1)
+        handle = win32.STDOUT
+        if on_stderr:
+            handle = win32.STDERR
+        csbi = win32.GetConsoleScreenBufferInfo(handle)
+        # get the number of character cells in the current buffer
+        cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y
+        # get number of character cells before current cursor position
+        cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X
+        if mode == 0:
+            from_coord = csbi.dwCursorPosition
+            cells_to_erase = cells_in_screen - cells_before_cursor
+        elif mode == 1:
+            from_coord = win32.COORD(0, 0)
+            cells_to_erase = cells_before_cursor
+        elif mode == 2:
+            from_coord = win32.COORD(0, 0)
+            cells_to_erase = cells_in_screen
+        else:
+            # invalid mode
+            return
+        # fill the entire screen with blanks
+        win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
+        # now set the buffer's attributes accordingly
+        win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)
+        if mode == 2:
+            # put the cursor where needed
+            win32.SetConsoleCursorPosition(handle, (1, 1))
+
+    def erase_line(self, mode=0, on_stderr=False):
+        # 0 should clear from the cursor to the end of the line.
+        # 1 should clear from the cursor to the beginning of the line.
+        # 2 should clear the entire line.
+        handle = win32.STDOUT
+        if on_stderr:
+            handle = win32.STDERR
+        csbi = win32.GetConsoleScreenBufferInfo(handle)
+        if mode == 0:
+            from_coord = csbi.dwCursorPosition
+            cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X
+        elif mode == 1:
+            from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
+            cells_to_erase = csbi.dwCursorPosition.X
+        elif mode == 2:
+            from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
+            cells_to_erase = csbi.dwSize.X
+        else:
+            # invalid mode
+            return
+        # fill the entire screen with blanks
+        win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
+        # now set the buffer's attributes accordingly
+        win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)
+
+    def set_title(self, title):
+        win32.SetConsoleTitle(title)
+
+
+def enable_vt_processing(fd):
+    if win32.windll is None or not win32.winapi_test():
+        return False
+
+    try:
+        handle = get_osfhandle(fd)
+        mode = win32.GetConsoleMode(handle)
+        win32.SetConsoleMode(
+            handle,
+            mode | win32.ENABLE_VIRTUAL_TERMINAL_PROCESSING,
+        )
+
+        mode = win32.GetConsoleMode(handle)
+        if mode & win32.ENABLE_VIRTUAL_TERMINAL_PROCESSING:
+            return True
+    # Can get TypeError in testsuite where 'fd' is a Mock()
+    except (OSError, TypeError):
+        return False
diff --git a/venv/lib/python3.8/site-packages/cython.py b/venv/lib/python3.8/site-packages/cython.py
new file mode 100644
index 0000000..9283c4d
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/cython.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+#
+#   Cython -- Main Program, generic
+#
+
+if __name__ == '__main__':
+
+    import os
+    import sys
+
+    # Make sure we import the right Cython
+    cythonpath, _ = os.path.split(os.path.realpath(__file__))
+    sys.path.insert(0, cythonpath)
+
+    from Cython.Compiler.Main import main
+    main(command_line = 1)
+
+else:
+    # Void cython.* directives.
+    from Cython.Shadow import *
+    ## and bring in the __version__
+    from Cython import __version__
+    from Cython import load_ipython_extension
diff --git a/venv/lib/python3.8/site-packages/distlib-0.3.6.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/distlib-0.3.6.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/distlib-0.3.6.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/lib/python3.8/site-packages/distlib-0.3.6.dist-info/LICENSE.txt b/venv/lib/python3.8/site-packages/distlib-0.3.6.dist-info/LICENSE.txt
new file mode 100644
index 0000000..c31ac56
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/distlib-0.3.6.dist-info/LICENSE.txt
@@ -0,0 +1,284 @@
+A. HISTORY OF THE SOFTWARE
+==========================
+
+Python was created in the early 1990s by Guido van Rossum at Stichting
+Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
+as a successor of a language called ABC.  Guido remains Python's
+principal author, although it includes many contributions from others.
+
+In 1995, Guido continued his work on Python at the Corporation for
+National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
+in Reston, Virginia where he released several versions of the
+software.
+
+In May 2000, Guido and the Python core development team moved to
+BeOpen.com to form the BeOpen PythonLabs team.  In October of the same
+year, the PythonLabs team moved to Digital Creations (now Zope
+Corporation, see http://www.zope.com).  In 2001, the Python Software
+Foundation (PSF, see http://www.python.org/psf/) was formed, a
+non-profit organization created specifically to own Python-related
+Intellectual Property.  Zope Corporation is a sponsoring member of
+the PSF.
+
+All Python releases are Open Source (see http://www.opensource.org for
+the Open Source Definition).  Historically, most, but not all, Python
+releases have also been GPL-compatible; the table below summarizes
+the various releases.
+
+    Release         Derived     Year        Owner       GPL-
+                    from                                compatible? (1)
+
+    0.9.0 thru 1.2              1991-1995   CWI         yes
+    1.3 thru 1.5.2  1.2         1995-1999   CNRI        yes
+    1.6             1.5.2       2000        CNRI        no
+    2.0             1.6         2000        BeOpen.com  no
+    1.6.1           1.6         2001        CNRI        yes (2)
+    2.1             2.0+1.6.1   2001        PSF         no
+    2.0.1           2.0+1.6.1   2001        PSF         yes
+    2.1.1           2.1+2.0.1   2001        PSF         yes
+    2.2             2.1.1       2001        PSF         yes
+    2.1.2           2.1.1       2002        PSF         yes
+    2.1.3           2.1.2       2002        PSF         yes
+    2.2.1           2.2         2002        PSF         yes
+    2.2.2           2.2.1       2002        PSF         yes
+    2.2.3           2.2.2       2003        PSF         yes
+    2.3             2.2.2       2002-2003   PSF         yes
+    2.3.1           2.3         2002-2003   PSF         yes
+    2.3.2           2.3.1       2002-2003   PSF         yes
+    2.3.3           2.3.2       2002-2003   PSF         yes
+    2.3.4           2.3.3       2004        PSF         yes
+    2.3.5           2.3.4       2005        PSF         yes
+    2.4             2.3         2004        PSF         yes
+    2.4.1           2.4         2005        PSF         yes
+    2.4.2           2.4.1       2005        PSF         yes
+    2.4.3           2.4.2       2006        PSF         yes
+    2.4.4           2.4.3       2006        PSF         yes
+    2.5             2.4         2006        PSF         yes
+    2.5.1           2.5         2007        PSF         yes
+    2.5.2           2.5.1       2008        PSF         yes
+    2.5.3           2.5.2       2008        PSF         yes
+    2.6             2.5         2008        PSF         yes
+    2.6.1           2.6         2008        PSF         yes
+    2.6.2           2.6.1       2009        PSF         yes
+    2.6.3           2.6.2       2009        PSF         yes
+    2.6.4           2.6.3       2009        PSF         yes
+    2.6.5           2.6.4       2010        PSF         yes
+    3.0             2.6         2008        PSF         yes
+    3.0.1           3.0         2009        PSF         yes
+    3.1             3.0.1       2009        PSF         yes
+    3.1.1           3.1         2009        PSF         yes
+    3.1.2           3.1         2010        PSF         yes
+    3.2             3.1         2010        PSF         yes
+
+Footnotes:
+
+(1) GPL-compatible doesn't mean that we're distributing Python under
+    the GPL.  All Python licenses, unlike the GPL, let you distribute
+    a modified version without making your changes open source.  The
+    GPL-compatible licenses make it possible to combine Python with
+    other software that is released under the GPL; the others don't.
+
+(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
+    because its license has a choice of law clause.  According to
+    CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
+    is "not incompatible" with the GPL.
+
+Thanks to the many outside volunteers who have worked under Guido's
+direction to make these releases possible.
+
+
+B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
+===============================================================
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
+Python Software Foundation; All Rights Reserved" are retained in Python alone or
+in any derivative version prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis.  PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee.  This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
+-------------------------------------------
+
+BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
+
+1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
+office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
+Individual or Organization ("Licensee") accessing and otherwise using
+this software in source or binary form and its associated
+documentation ("the Software").
+
+2. Subject to the terms and conditions of this BeOpen Python License
+Agreement, BeOpen hereby grants Licensee a non-exclusive,
+royalty-free, world-wide license to reproduce, analyze, test, perform
+and/or display publicly, prepare derivative works, distribute, and
+otherwise use the Software alone or in any derivative version,
+provided, however, that the BeOpen Python License is retained in the
+Software, alone or in any derivative version prepared by Licensee.
+
+3. BeOpen is making the Software available to Licensee on an "AS IS"
+basis.  BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
+SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
+AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
+DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+5. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+6. This License Agreement shall be governed by and interpreted in all
+respects by the law of the State of California, excluding conflict of
+law provisions.  Nothing in this License Agreement shall be deemed to
+create any relationship of agency, partnership, or joint venture
+between BeOpen and Licensee.  This License Agreement does not grant
+permission to use BeOpen trademarks or trade names in a trademark
+sense to endorse or promote products or services of Licensee, or any
+third party.  As an exception, the "BeOpen Python" logos available at
+http://www.pythonlabs.com/logos.html may be used according to the
+permissions granted on that web page.
+
+7. By copying, installing or otherwise using the software, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
+---------------------------------------
+
+1. This LICENSE AGREEMENT is between the Corporation for National
+Research Initiatives, having an office at 1895 Preston White Drive,
+Reston, VA 20191 ("CNRI"), and the Individual or Organization
+("Licensee") accessing and otherwise using Python 1.6.1 software in
+source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, CNRI
+hereby grants Licensee a nonexclusive, royalty-free, world-wide
+license to reproduce, analyze, test, perform and/or display publicly,
+prepare derivative works, distribute, and otherwise use Python 1.6.1
+alone or in any derivative version, provided, however, that CNRI's
+License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
+1995-2001 Corporation for National Research Initiatives; All Rights
+Reserved" are retained in Python 1.6.1 alone or in any derivative
+version prepared by Licensee.  Alternately, in lieu of CNRI's License
+Agreement, Licensee may substitute the following text (omitting the
+quotes): "Python 1.6.1 is made available subject to the terms and
+conditions in CNRI's License Agreement.  This Agreement together with
+Python 1.6.1 may be located on the Internet using the following
+unique, persistent identifier (known as a handle): 1895.22/1013.  This
+Agreement may also be obtained from a proxy server on the Internet
+using the following URL: http://hdl.handle.net/1895.22/1013".
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python 1.6.1 or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python 1.6.1.
+
+4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
+basis.  CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. This License Agreement shall be governed by the federal
+intellectual property law of the United States, including without
+limitation the federal copyright law, and, to the extent such
+U.S. federal law does not apply, by the law of the Commonwealth of
+Virginia, excluding Virginia's conflict of law provisions.
+Notwithstanding the foregoing, with regard to derivative works based
+on Python 1.6.1 that incorporate non-separable material that was
+previously distributed under the GNU General Public License (GPL), the
+law of the Commonwealth of Virginia shall govern this License
+Agreement only as to issues arising under or with respect to
+Paragraphs 4, 5, and 7 of this License Agreement.  Nothing in this
+License Agreement shall be deemed to create any relationship of
+agency, partnership, or joint venture between CNRI and Licensee.  This
+License Agreement does not grant permission to use CNRI trademarks or
+trade name in a trademark sense to endorse or promote products or
+services of Licensee, or any third party.
+
+8. By clicking on the "ACCEPT" button where indicated, or by copying,
+installing or otherwise using Python 1.6.1, Licensee agrees to be
+bound by the terms and conditions of this License Agreement.
+
+        ACCEPT
+
+
+CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
+--------------------------------------------------
+
+Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
+The Netherlands.  All rights reserved.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted,
+provided that the above copyright notice appear in all copies and that
+both that copyright notice and this permission notice appear in
+supporting documentation, and that the name of Stichting Mathematisch
+Centrum or CWI not be used in advertising or publicity pertaining to
+distribution of the software without specific, written prior
+permission.
+
+STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
+THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
+FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/venv/lib/python3.8/site-packages/distlib-0.3.6.dist-info/METADATA b/venv/lib/python3.8/site-packages/distlib-0.3.6.dist-info/METADATA
new file mode 100644
index 0000000..806df8b
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/distlib-0.3.6.dist-info/METADATA
@@ -0,0 +1,116 @@
+Metadata-Version: 2.1
+Name: distlib
+Version: 0.3.6
+Summary: Distribution utilities
+Home-page: https://github.com/pypa/distlib
+Author: Vinay Sajip
+Author-email: vinay_sajip@red-dove.com
+License: Python license
+Project-URL: Documentation, https://distlib.readthedocs.io/
+Project-URL: Source, https://github.com/pypa/distlib
+Project-URL: Tracker, https://github.com/pypa/distlib/issues
+Platform: any
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Python Software Foundation License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Topic :: Software Development
+License-File: LICENSE.txt
+
+|badge1| |badge2|
+
+.. |badge1| image:: https://img.shields.io/github/workflow/status/pypa/distlib/Tests
+   :alt: GitHub test status
+
+.. |badge2| image:: https://img.shields.io/codecov/c/github/pypa/distlib
+   :target: https://app.codecov.io/gh/pypa/distlib
+   :alt: GitHub coverage status
+
+What is it?
+-----------
+
+Distlib is a library which implements low-level functions that relate to
+packaging and distribution of Python software. It is intended to be used as the
+basis for third-party packaging tools. The documentation is available at
+
+https://distlib.readthedocs.io/
+
+Main features
+-------------
+
+Distlib currently offers the following features:
+
+* The package ``distlib.database``, which implements a database of installed
+  distributions, as defined by :pep:`376`, and distribution dependency graph
+  logic. Support is also provided for non-installed distributions (i.e.
+  distributions registered with metadata on an index like PyPI), including
+  the ability to scan for dependencies and building dependency graphs.
+* The package ``distlib.index``, which implements an interface to perform
+  operations on an index, such as registering a project, uploading a
+  distribution or uploading documentation. Support is included for verifying
+  SSL connections (with domain matching) and signing/verifying packages using
+  GnuPG.
+* The package ``distlib.metadata``, which implements distribution metadata as
+  defined by :pep:`643`, :pep:`566`, :pep:`345`, :pep:`314` and :pep:`241`.
+* The package ``distlib.markers``, which implements environment markers as
+  defined by :pep:`508`.
+* The package ``distlib.manifest``, which implements lists of files used
+  in packaging source distributions.
+* The package ``distlib.locators``, which allows finding distributions, whether
+  on PyPI (XML-RPC or via the "simple" interface), local directories or some
+  other source.
+* The package ``distlib.resources``, which allows access to data files stored
+  in Python packages, both in the file system and in .zip files.
+* The package ``distlib.scripts``, which allows installing of scripts with
+  adjustment of shebang lines and support for native Windows executable
+  launchers.
+* The package ``distlib.version``, which implements version specifiers as
+  defined by :pep:`440`, but also support for working with "legacy" versions and
+  semantic versions.
+* The package ``distlib.wheel``, which provides support for building and
+  installing from the Wheel format for binary distributions (see :pep:`427`).
+* The package ``distlib.util``, which contains miscellaneous functions and
+  classes which are useful in packaging, but which do not fit neatly into
+  one of the other packages in ``distlib``.* The package implements enhanced
+  globbing functionality such as the ability to use ``**`` in patterns to
+  specify recursing into subdirectories.
+
+
+Python version and platform compatibility
+-----------------------------------------
+
+Distlib is intended to be used on and is tested on Python versions 2.7 and 3.6 - 3.10,
+pypy-2.7 and pypy3 on Linux, Windows, and macOS.
+
+Project status
+--------------
+
+The project has reached a mature status in its development: there is a comprehensive
+test suite and it has been exercised on Windows, Ubuntu and macOS. The project is used
+by well-known projects such as `pip `_ and `caniusepython3
+`_.
+
+This project was migrated from Mercurial to Git and from BitBucket to GitHub, and
+although all information of importance has been retained across the migration, some
+commit references in issues and issue comments may have become invalid.
+
+Code of Conduct
+---------------
+
+Everyone interacting in the distlib project's codebases, issue trackers, chat
+rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_.
+
+.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/
+
+
diff --git a/venv/lib/python3.8/site-packages/distlib-0.3.6.dist-info/RECORD b/venv/lib/python3.8/site-packages/distlib-0.3.6.dist-info/RECORD
new file mode 100644
index 0000000..5432cff
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/distlib-0.3.6.dist-info/RECORD
@@ -0,0 +1,38 @@
+distlib-0.3.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+distlib-0.3.6.dist-info/LICENSE.txt,sha256=gI4QyKarjesUn_mz-xn0R6gICUYG1xKpylf-rTVSWZ0,14531
+distlib-0.3.6.dist-info/METADATA,sha256=t7pBTqEAPspwna_64nyQC2sOA4OSqbCKqMwI1cr0th8,5112
+distlib-0.3.6.dist-info/RECORD,,
+distlib-0.3.6.dist-info/WHEEL,sha256=z9j0xAa_JmUKMpmz72K0ZGALSM_n-wQVmGbleXx2VHg,110
+distlib-0.3.6.dist-info/top_level.txt,sha256=9BERqitu_vzyeyILOcGzX9YyA2AB_xlC4-81V6xoizk,8
+distlib/__init__.py,sha256=acgfseOC55dNrVAzaBKpUiH3Z6V7Q1CaxsiQ3K7pC-E,581
+distlib/__pycache__/__init__.cpython-38.pyc,,
+distlib/__pycache__/compat.cpython-38.pyc,,
+distlib/__pycache__/database.cpython-38.pyc,,
+distlib/__pycache__/index.cpython-38.pyc,,
+distlib/__pycache__/locators.cpython-38.pyc,,
+distlib/__pycache__/manifest.cpython-38.pyc,,
+distlib/__pycache__/markers.cpython-38.pyc,,
+distlib/__pycache__/metadata.cpython-38.pyc,,
+distlib/__pycache__/resources.cpython-38.pyc,,
+distlib/__pycache__/scripts.cpython-38.pyc,,
+distlib/__pycache__/util.cpython-38.pyc,,
+distlib/__pycache__/version.cpython-38.pyc,,
+distlib/__pycache__/wheel.cpython-38.pyc,,
+distlib/compat.py,sha256=tfoMrj6tujk7G4UC2owL6ArgDuCKabgBxuJRGZSmpko,41259
+distlib/database.py,sha256=o_mw0fAr93NDAHHHfqG54Y1Hi9Rkfrp2BX15XWZYK50,51697
+distlib/index.py,sha256=HFiDG7LMoaBs829WuotrfIwcErOOExUOR_AeBtw_TCU,20834
+distlib/locators.py,sha256=wNzG-zERzS_XGls-nBPVVyLRHa2skUlkn0-5n0trMWA,51991
+distlib/manifest.py,sha256=nQEhYmgoreaBZzyFzwYsXxJARu3fo4EkunU163U16iE,14811
+distlib/markers.py,sha256=TpHHHLgkzyT7YHbwj-2i6weRaq-Ivy2-MUnrDkjau-U,5058
+distlib/metadata.py,sha256=g_DIiu8nBXRzA-mWPRpatHGbmFZqaFoss7z9TG7QSUU,39801
+distlib/resources.py,sha256=LwbPksc0A1JMbi6XnuPdMBUn83X7BPuFNWqPGEKI698,10820
+distlib/scripts.py,sha256=BmkTKmiTk4m2cj-iueliatwz3ut_9SsABBW51vnQnZU,18102
+distlib/t32.exe,sha256=a0GV5kCoWsMutvliiCKmIgV98eRZ33wXoS-XrqvJQVs,97792
+distlib/t64-arm.exe,sha256=68TAa32V504xVBnufojh0PcenpR3U4wAqTqf-MZqbPw,182784
+distlib/t64.exe,sha256=gaYY8hy4fbkHYTTnA4i26ct8IQZzkBG2pRdy0iyuBrc,108032
+distlib/util.py,sha256=31dPXn3Rfat0xZLeVoFpuniyhe6vsbl9_QN-qd9Lhlk,66262
+distlib/version.py,sha256=WG__LyAa2GwmA6qSoEJtvJE8REA1LZpbSizy8WvhJLk,23513
+distlib/w32.exe,sha256=R4csx3-OGM9kL4aPIzQKRo5TfmRSHZo6QWyLhDhNBks,91648
+distlib/w64-arm.exe,sha256=xdyYhKj0WDcVUOCb05blQYvzdYIKMbmJn2SZvzkcey4,168448
+distlib/w64.exe,sha256=ejGf-rojoBfXseGLpya6bFTFPWRG21X5KvU8J5iU-K0,101888
+distlib/wheel.py,sha256=Rgqs658VsJ3R2845qwnZD8XQryV2CzWw2mghwLvxxsI,43898
diff --git a/venv/lib/python3.8/site-packages/distlib-0.3.6.dist-info/WHEEL b/venv/lib/python3.8/site-packages/distlib-0.3.6.dist-info/WHEEL
new file mode 100644
index 0000000..0b18a28
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/distlib-0.3.6.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.1)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/venv/lib/python3.8/site-packages/distlib-0.3.6.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/distlib-0.3.6.dist-info/top_level.txt
new file mode 100644
index 0000000..f68bb07
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/distlib-0.3.6.dist-info/top_level.txt
@@ -0,0 +1 @@
+distlib
diff --git a/venv/lib/python3.8/site-packages/distlib/__init__.py b/venv/lib/python3.8/site-packages/distlib/__init__.py
new file mode 100644
index 0000000..962173c
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/distlib/__init__.py
@@ -0,0 +1,23 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012-2022 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+import logging
+
+__version__ = '0.3.6'
+
+class DistlibException(Exception):
+    pass
+
+try:
+    from logging import NullHandler
+except ImportError: # pragma: no cover
+    class NullHandler(logging.Handler):
+        def handle(self, record): pass
+        def emit(self, record): pass
+        def createLock(self): self.lock = None
+
+logger = logging.getLogger(__name__)
+logger.addHandler(NullHandler())
diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000..6c9550e
Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/__init__.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/compat.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/compat.cpython-38.pyc
new file mode 100644
index 0000000..beef615
Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/compat.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/database.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/database.cpython-38.pyc
new file mode 100644
index 0000000..ba7a10f
Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/database.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/index.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/index.cpython-38.pyc
new file mode 100644
index 0000000..9b76e42
Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/index.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/locators.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/locators.cpython-38.pyc
new file mode 100644
index 0000000..cf5ceec
Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/locators.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/manifest.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/manifest.cpython-38.pyc
new file mode 100644
index 0000000..c435493
Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/manifest.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/markers.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/markers.cpython-38.pyc
new file mode 100644
index 0000000..66cf377
Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/markers.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/metadata.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/metadata.cpython-38.pyc
new file mode 100644
index 0000000..483d905
Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/metadata.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/resources.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/resources.cpython-38.pyc
new file mode 100644
index 0000000..5e81f86
Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/resources.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/scripts.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/scripts.cpython-38.pyc
new file mode 100644
index 0000000..b2a91b9
Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/scripts.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/util.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/util.cpython-38.pyc
new file mode 100644
index 0000000..10f31da
Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/util.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/version.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/version.cpython-38.pyc
new file mode 100644
index 0000000..ecacfe5
Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/version.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/distlib/__pycache__/wheel.cpython-38.pyc b/venv/lib/python3.8/site-packages/distlib/__pycache__/wheel.cpython-38.pyc
new file mode 100644
index 0000000..6c96182
Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/__pycache__/wheel.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/distlib/compat.py b/venv/lib/python3.8/site-packages/distlib/compat.py
new file mode 100644
index 0000000..1fe3d22
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/distlib/compat.py
@@ -0,0 +1,1116 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2013-2017 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+from __future__ import absolute_import
+
+import os
+import re
+import sys
+
+try:
+    import ssl
+except ImportError:  # pragma: no cover
+    ssl = None
+
+if sys.version_info[0] < 3:  # pragma: no cover
+    from StringIO import StringIO
+    string_types = basestring,
+    text_type = unicode
+    from types import FileType as file_type
+    import __builtin__ as builtins
+    import ConfigParser as configparser
+    from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit
+    from urllib import (urlretrieve, quote as _quote, unquote, url2pathname,
+                        pathname2url, ContentTooShortError, splittype)
+
+    def quote(s):
+        if isinstance(s, unicode):
+            s = s.encode('utf-8')
+        return _quote(s)
+
+    import urllib2
+    from urllib2 import (Request, urlopen, URLError, HTTPError,
+                         HTTPBasicAuthHandler, HTTPPasswordMgr,
+                         HTTPHandler, HTTPRedirectHandler,
+                         build_opener)
+    if ssl:
+        from urllib2 import HTTPSHandler
+    import httplib
+    import xmlrpclib
+    import Queue as queue
+    from HTMLParser import HTMLParser
+    import htmlentitydefs
+    raw_input = raw_input
+    from itertools import ifilter as filter
+    from itertools import ifilterfalse as filterfalse
+
+    # Leaving this around for now, in case it needs resurrecting in some way
+    # _userprog = None
+    # def splituser(host):
+        # """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""
+        # global _userprog
+        # if _userprog is None:
+            # import re
+            # _userprog = re.compile('^(.*)@(.*)$')
+
+        # match = _userprog.match(host)
+        # if match: return match.group(1, 2)
+        # return None, host
+
+else:  # pragma: no cover
+    from io import StringIO
+    string_types = str,
+    text_type = str
+    from io import TextIOWrapper as file_type
+    import builtins
+    import configparser
+    import shutil
+    from urllib.parse import (urlparse, urlunparse, urljoin, quote,
+                              unquote, urlsplit, urlunsplit, splittype)
+    from urllib.request import (urlopen, urlretrieve, Request, url2pathname,
+                                pathname2url,
+                                HTTPBasicAuthHandler, HTTPPasswordMgr,
+                                HTTPHandler, HTTPRedirectHandler,
+                                build_opener)
+    if ssl:
+        from urllib.request import HTTPSHandler
+    from urllib.error import HTTPError, URLError, ContentTooShortError
+    import http.client as httplib
+    import urllib.request as urllib2
+    import xmlrpc.client as xmlrpclib
+    import queue
+    from html.parser import HTMLParser
+    import html.entities as htmlentitydefs
+    raw_input = input
+    from itertools import filterfalse
+    filter = filter
+
+
+try:
+    from ssl import match_hostname, CertificateError
+except ImportError: # pragma: no cover
+    class CertificateError(ValueError):
+        pass
+
+
+    def _dnsname_match(dn, hostname, max_wildcards=1):
+        """Matching according to RFC 6125, section 6.4.3
+
+        http://tools.ietf.org/html/rfc6125#section-6.4.3
+        """
+        pats = []
+        if not dn:
+            return False
+
+        parts = dn.split('.')
+        leftmost, remainder = parts[0], parts[1:]
+
+        wildcards = leftmost.count('*')
+        if wildcards > max_wildcards:
+            # Issue #17980: avoid denials of service by refusing more
+            # than one wildcard per fragment.  A survey of established
+            # policy among SSL implementations showed it to be a
+            # reasonable choice.
+            raise CertificateError(
+                "too many wildcards in certificate DNS name: " + repr(dn))
+
+        # speed up common case w/o wildcards
+        if not wildcards:
+            return dn.lower() == hostname.lower()
+
+        # RFC 6125, section 6.4.3, subitem 1.
+        # The client SHOULD NOT attempt to match a presented identifier in which
+        # the wildcard character comprises a label other than the left-most label.
+        if leftmost == '*':
+            # When '*' is a fragment by itself, it matches a non-empty dotless
+            # fragment.
+            pats.append('[^.]+')
+        elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
+            # RFC 6125, section 6.4.3, subitem 3.
+            # The client SHOULD NOT attempt to match a presented identifier
+            # where the wildcard character is embedded within an A-label or
+            # U-label of an internationalized domain name.
+            pats.append(re.escape(leftmost))
+        else:
+            # Otherwise, '*' matches any dotless string, e.g. www*
+            pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
+
+        # add the remaining fragments, ignore any wildcards
+        for frag in remainder:
+            pats.append(re.escape(frag))
+
+        pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
+        return pat.match(hostname)
+
+
+    def match_hostname(cert, hostname):
+        """Verify that *cert* (in decoded format as returned by
+        SSLSocket.getpeercert()) matches the *hostname*.  RFC 2818 and RFC 6125
+        rules are followed, but IP addresses are not accepted for *hostname*.
+
+        CertificateError is raised on failure. On success, the function
+        returns nothing.
+        """
+        if not cert:
+            raise ValueError("empty or no certificate, match_hostname needs a "
+                             "SSL socket or SSL context with either "
+                             "CERT_OPTIONAL or CERT_REQUIRED")
+        dnsnames = []
+        san = cert.get('subjectAltName', ())
+        for key, value in san:
+            if key == 'DNS':
+                if _dnsname_match(value, hostname):
+                    return
+                dnsnames.append(value)
+        if not dnsnames:
+            # The subject is only checked when there is no dNSName entry
+            # in subjectAltName
+            for sub in cert.get('subject', ()):
+                for key, value in sub:
+                    # XXX according to RFC 2818, the most specific Common Name
+                    # must be used.
+                    if key == 'commonName':
+                        if _dnsname_match(value, hostname):
+                            return
+                        dnsnames.append(value)
+        if len(dnsnames) > 1:
+            raise CertificateError("hostname %r "
+                "doesn't match either of %s"
+                % (hostname, ', '.join(map(repr, dnsnames))))
+        elif len(dnsnames) == 1:
+            raise CertificateError("hostname %r "
+                "doesn't match %r"
+                % (hostname, dnsnames[0]))
+        else:
+            raise CertificateError("no appropriate commonName or "
+                "subjectAltName fields were found")
+
+
+try:
+    from types import SimpleNamespace as Container
+except ImportError:  # pragma: no cover
+    class Container(object):
+        """
+        A generic container for when multiple values need to be returned
+        """
+        def __init__(self, **kwargs):
+            self.__dict__.update(kwargs)
+
+
+try:
+    from shutil import which
+except ImportError:  # pragma: no cover
+    # Implementation from Python 3.3
+    def which(cmd, mode=os.F_OK | os.X_OK, path=None):
+        """Given a command, mode, and a PATH string, return the path which
+        conforms to the given mode on the PATH, or None if there is no such
+        file.
+
+        `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
+        of os.environ.get("PATH"), or can be overridden with a custom search
+        path.
+
+        """
+        # Check that a given file can be accessed with the correct mode.
+        # Additionally check that `file` is not a directory, as on Windows
+        # directories pass the os.access check.
+        def _access_check(fn, mode):
+            return (os.path.exists(fn) and os.access(fn, mode)
+                    and not os.path.isdir(fn))
+
+        # If we're given a path with a directory part, look it up directly rather
+        # than referring to PATH directories. This includes checking relative to the
+        # current directory, e.g. ./script
+        if os.path.dirname(cmd):
+            if _access_check(cmd, mode):
+                return cmd
+            return None
+
+        if path is None:
+            path = os.environ.get("PATH", os.defpath)
+        if not path:
+            return None
+        path = path.split(os.pathsep)
+
+        if sys.platform == "win32":
+            # The current directory takes precedence on Windows.
+            if not os.curdir in path:
+                path.insert(0, os.curdir)
+
+            # PATHEXT is necessary to check on Windows.
+            pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
+            # See if the given file matches any of the expected path extensions.
+            # This will allow us to short circuit when given "python.exe".
+            # If it does match, only test that one, otherwise we have to try
+            # others.
+            if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
+                files = [cmd]
+            else:
+                files = [cmd + ext for ext in pathext]
+        else:
+            # On other platforms you don't have things like PATHEXT to tell you
+            # what file suffixes are executable, so just pass on cmd as-is.
+            files = [cmd]
+
+        seen = set()
+        for dir in path:
+            normdir = os.path.normcase(dir)
+            if not normdir in seen:
+                seen.add(normdir)
+                for thefile in files:
+                    name = os.path.join(dir, thefile)
+                    if _access_check(name, mode):
+                        return name
+        return None
+
+
+# ZipFile is a context manager in 2.7, but not in 2.6
+
+from zipfile import ZipFile as BaseZipFile
+
+if hasattr(BaseZipFile, '__enter__'):  # pragma: no cover
+    ZipFile = BaseZipFile
+else:  # pragma: no cover
+    from zipfile import ZipExtFile as BaseZipExtFile
+
+    class ZipExtFile(BaseZipExtFile):
+        def __init__(self, base):
+            self.__dict__.update(base.__dict__)
+
+        def __enter__(self):
+            return self
+
+        def __exit__(self, *exc_info):
+            self.close()
+            # return None, so if an exception occurred, it will propagate
+
+    class ZipFile(BaseZipFile):
+        def __enter__(self):
+            return self
+
+        def __exit__(self, *exc_info):
+            self.close()
+            # return None, so if an exception occurred, it will propagate
+
+        def open(self, *args, **kwargs):
+            base = BaseZipFile.open(self, *args, **kwargs)
+            return ZipExtFile(base)
+
+try:
+    from platform import python_implementation
+except ImportError: # pragma: no cover
+    def python_implementation():
+        """Return a string identifying the Python implementation."""
+        if 'PyPy' in sys.version:
+            return 'PyPy'
+        if os.name == 'java':
+            return 'Jython'
+        if sys.version.startswith('IronPython'):
+            return 'IronPython'
+        return 'CPython'
+
+import shutil
+import sysconfig
+
+try:
+    callable = callable
+except NameError:   # pragma: no cover
+    from collections.abc import Callable
+
+    def callable(obj):
+        return isinstance(obj, Callable)
+
+
+try:
+    fsencode = os.fsencode
+    fsdecode = os.fsdecode
+except AttributeError:  # pragma: no cover
+    # Issue #99: on some systems (e.g. containerised),
+    # sys.getfilesystemencoding() returns None, and we need a real value,
+    # so fall back to utf-8. From the CPython 2.7 docs relating to Unix and
+    # sys.getfilesystemencoding(): the return value is "the user’s preference
+    # according to the result of nl_langinfo(CODESET), or None if the
+    # nl_langinfo(CODESET) failed."
+    _fsencoding = sys.getfilesystemencoding() or 'utf-8'
+    if _fsencoding == 'mbcs':
+        _fserrors = 'strict'
+    else:
+        _fserrors = 'surrogateescape'
+
+    def fsencode(filename):
+        if isinstance(filename, bytes):
+            return filename
+        elif isinstance(filename, text_type):
+            return filename.encode(_fsencoding, _fserrors)
+        else:
+            raise TypeError("expect bytes or str, not %s" %
+                            type(filename).__name__)
+
+    def fsdecode(filename):
+        if isinstance(filename, text_type):
+            return filename
+        elif isinstance(filename, bytes):
+            return filename.decode(_fsencoding, _fserrors)
+        else:
+            raise TypeError("expect bytes or str, not %s" %
+                            type(filename).__name__)
+
+try:
+    from tokenize import detect_encoding
+except ImportError: # pragma: no cover
+    from codecs import BOM_UTF8, lookup
+    import re
+
+    cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)")
+
+    def _get_normal_name(orig_enc):
+        """Imitates get_normal_name in tokenizer.c."""
+        # Only care about the first 12 characters.
+        enc = orig_enc[:12].lower().replace("_", "-")
+        if enc == "utf-8" or enc.startswith("utf-8-"):
+            return "utf-8"
+        if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \
+           enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):
+            return "iso-8859-1"
+        return orig_enc
+
+    def detect_encoding(readline):
+        """
+        The detect_encoding() function is used to detect the encoding that should
+        be used to decode a Python source file.  It requires one argument, readline,
+        in the same way as the tokenize() generator.
+
+        It will call readline a maximum of twice, and return the encoding used
+        (as a string) and a list of any lines (left as bytes) it has read in.
+
+        It detects the encoding from the presence of a utf-8 bom or an encoding
+        cookie as specified in pep-0263.  If both a bom and a cookie are present,
+        but disagree, a SyntaxError will be raised.  If the encoding cookie is an
+        invalid charset, raise a SyntaxError.  Note that if a utf-8 bom is found,
+        'utf-8-sig' is returned.
+
+        If no encoding is specified, then the default of 'utf-8' will be returned.
+        """
+        try:
+            filename = readline.__self__.name
+        except AttributeError:
+            filename = None
+        bom_found = False
+        encoding = None
+        default = 'utf-8'
+        def read_or_stop():
+            try:
+                return readline()
+            except StopIteration:
+                return b''
+
+        def find_cookie(line):
+            try:
+                # Decode as UTF-8. Either the line is an encoding declaration,
+                # in which case it should be pure ASCII, or it must be UTF-8
+                # per default encoding.
+                line_string = line.decode('utf-8')
+            except UnicodeDecodeError:
+                msg = "invalid or missing encoding declaration"
+                if filename is not None:
+                    msg = '{} for {!r}'.format(msg, filename)
+                raise SyntaxError(msg)
+
+            matches = cookie_re.findall(line_string)
+            if not matches:
+                return None
+            encoding = _get_normal_name(matches[0])
+            try:
+                codec = lookup(encoding)
+            except LookupError:
+                # This behaviour mimics the Python interpreter
+                if filename is None:
+                    msg = "unknown encoding: " + encoding
+                else:
+                    msg = "unknown encoding for {!r}: {}".format(filename,
+                            encoding)
+                raise SyntaxError(msg)
+
+            if bom_found:
+                if codec.name != 'utf-8':
+                    # This behaviour mimics the Python interpreter
+                    if filename is None:
+                        msg = 'encoding problem: utf-8'
+                    else:
+                        msg = 'encoding problem for {!r}: utf-8'.format(filename)
+                    raise SyntaxError(msg)
+                encoding += '-sig'
+            return encoding
+
+        first = read_or_stop()
+        if first.startswith(BOM_UTF8):
+            bom_found = True
+            first = first[3:]
+            default = 'utf-8-sig'
+        if not first:
+            return default, []
+
+        encoding = find_cookie(first)
+        if encoding:
+            return encoding, [first]
+
+        second = read_or_stop()
+        if not second:
+            return default, [first]
+
+        encoding = find_cookie(second)
+        if encoding:
+            return encoding, [first, second]
+
+        return default, [first, second]
+
+# For converting & <-> & etc.
+try:
+    from html import escape
+except ImportError:
+    from cgi import escape
+if sys.version_info[:2] < (3, 4):
+    unescape = HTMLParser().unescape
+else:
+    from html import unescape
+
+try:
+    from collections import ChainMap
+except ImportError: # pragma: no cover
+    from collections import MutableMapping
+
+    try:
+        from reprlib import recursive_repr as _recursive_repr
+    except ImportError:
+        def _recursive_repr(fillvalue='...'):
+            '''
+            Decorator to make a repr function return fillvalue for a recursive
+            call
+            '''
+
+            def decorating_function(user_function):
+                repr_running = set()
+
+                def wrapper(self):
+                    key = id(self), get_ident()
+                    if key in repr_running:
+                        return fillvalue
+                    repr_running.add(key)
+                    try:
+                        result = user_function(self)
+                    finally:
+                        repr_running.discard(key)
+                    return result
+
+                # Can't use functools.wraps() here because of bootstrap issues
+                wrapper.__module__ = getattr(user_function, '__module__')
+                wrapper.__doc__ = getattr(user_function, '__doc__')
+                wrapper.__name__ = getattr(user_function, '__name__')
+                wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
+                return wrapper
+
+            return decorating_function
+
+    class ChainMap(MutableMapping):
+        ''' A ChainMap groups multiple dicts (or other mappings) together
+        to create a single, updateable view.
+
+        The underlying mappings are stored in a list.  That list is public and can
+        accessed or updated using the *maps* attribute.  There is no other state.
+
+        Lookups search the underlying mappings successively until a key is found.
+        In contrast, writes, updates, and deletions only operate on the first
+        mapping.
+
+        '''
+
+        def __init__(self, *maps):
+            '''Initialize a ChainMap by setting *maps* to the given mappings.
+            If no mappings are provided, a single empty dictionary is used.
+
+            '''
+            self.maps = list(maps) or [{}]          # always at least one map
+
+        def __missing__(self, key):
+            raise KeyError(key)
+
+        def __getitem__(self, key):
+            for mapping in self.maps:
+                try:
+                    return mapping[key]             # can't use 'key in mapping' with defaultdict
+                except KeyError:
+                    pass
+            return self.__missing__(key)            # support subclasses that define __missing__
+
+        def get(self, key, default=None):
+            return self[key] if key in self else default
+
+        def __len__(self):
+            return len(set().union(*self.maps))     # reuses stored hash values if possible
+
+        def __iter__(self):
+            return iter(set().union(*self.maps))
+
+        def __contains__(self, key):
+            return any(key in m for m in self.maps)
+
+        def __bool__(self):
+            return any(self.maps)
+
+        @_recursive_repr()
+        def __repr__(self):
+            return '{0.__class__.__name__}({1})'.format(
+                self, ', '.join(map(repr, self.maps)))
+
+        @classmethod
+        def fromkeys(cls, iterable, *args):
+            'Create a ChainMap with a single dict created from the iterable.'
+            return cls(dict.fromkeys(iterable, *args))
+
+        def copy(self):
+            'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]'
+            return self.__class__(self.maps[0].copy(), *self.maps[1:])
+
+        __copy__ = copy
+
+        def new_child(self):                        # like Django's Context.push()
+            'New ChainMap with a new dict followed by all previous maps.'
+            return self.__class__({}, *self.maps)
+
+        @property
+        def parents(self):                          # like Django's Context.pop()
+            'New ChainMap from maps[1:].'
+            return self.__class__(*self.maps[1:])
+
+        def __setitem__(self, key, value):
+            self.maps[0][key] = value
+
+        def __delitem__(self, key):
+            try:
+                del self.maps[0][key]
+            except KeyError:
+                raise KeyError('Key not found in the first mapping: {!r}'.format(key))
+
+        def popitem(self):
+            'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'
+            try:
+                return self.maps[0].popitem()
+            except KeyError:
+                raise KeyError('No keys found in the first mapping.')
+
+        def pop(self, key, *args):
+            'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].'
+            try:
+                return self.maps[0].pop(key, *args)
+            except KeyError:
+                raise KeyError('Key not found in the first mapping: {!r}'.format(key))
+
+        def clear(self):
+            'Clear maps[0], leaving maps[1:] intact.'
+            self.maps[0].clear()
+
+try:
+    from importlib.util import cache_from_source  # Python >= 3.4
+except ImportError:  # pragma: no cover
+    def cache_from_source(path, debug_override=None):
+        assert path.endswith('.py')
+        if debug_override is None:
+            debug_override = __debug__
+        if debug_override:
+            suffix = 'c'
+        else:
+            suffix = 'o'
+        return path + suffix
+
+try:
+    from collections import OrderedDict
+except ImportError: # pragma: no cover
+## {{{ http://code.activestate.com/recipes/576693/ (r9)
+# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
+# Passes Python2.7's test suite and incorporates all the latest updates.
+    try:
+        from thread import get_ident as _get_ident
+    except ImportError:
+        from dummy_thread import get_ident as _get_ident
+
+    try:
+        from _abcoll import KeysView, ValuesView, ItemsView
+    except ImportError:
+        pass
+
+
+    class OrderedDict(dict):
+        'Dictionary that remembers insertion order'
+        # An inherited dict maps keys to values.
+        # The inherited dict provides __getitem__, __len__, __contains__, and get.
+        # The remaining methods are order-aware.
+        # Big-O running times for all methods are the same as for regular dictionaries.
+
+        # The internal self.__map dictionary maps keys to links in a doubly linked list.
+        # The circular doubly linked list starts and ends with a sentinel element.
+        # The sentinel element never gets deleted (this simplifies the algorithm).
+        # Each link is stored as a list of length three:  [PREV, NEXT, KEY].
+
+        def __init__(self, *args, **kwds):
+            '''Initialize an ordered dictionary.  Signature is the same as for
+            regular dictionaries, but keyword arguments are not recommended
+            because their insertion order is arbitrary.
+
+            '''
+            if len(args) > 1:
+                raise TypeError('expected at most 1 arguments, got %d' % len(args))
+            try:
+                self.__root
+            except AttributeError:
+                self.__root = root = []                     # sentinel node
+                root[:] = [root, root, None]
+                self.__map = {}
+            self.__update(*args, **kwds)
+
+        def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
+            'od.__setitem__(i, y) <==> od[i]=y'
+            # Setting a new item creates a new link which goes at the end of the linked
+            # list, and the inherited dictionary is updated with the new key/value pair.
+            if key not in self:
+                root = self.__root
+                last = root[0]
+                last[1] = root[0] = self.__map[key] = [last, root, key]
+            dict_setitem(self, key, value)
+
+        def __delitem__(self, key, dict_delitem=dict.__delitem__):
+            'od.__delitem__(y) <==> del od[y]'
+            # Deleting an existing item uses self.__map to find the link which is
+            # then removed by updating the links in the predecessor and successor nodes.
+            dict_delitem(self, key)
+            link_prev, link_next, key = self.__map.pop(key)
+            link_prev[1] = link_next
+            link_next[0] = link_prev
+
+        def __iter__(self):
+            'od.__iter__() <==> iter(od)'
+            root = self.__root
+            curr = root[1]
+            while curr is not root:
+                yield curr[2]
+                curr = curr[1]
+
+        def __reversed__(self):
+            'od.__reversed__() <==> reversed(od)'
+            root = self.__root
+            curr = root[0]
+            while curr is not root:
+                yield curr[2]
+                curr = curr[0]
+
+        def clear(self):
+            'od.clear() -> None.  Remove all items from od.'
+            try:
+                for node in self.__map.itervalues():
+                    del node[:]
+                root = self.__root
+                root[:] = [root, root, None]
+                self.__map.clear()
+            except AttributeError:
+                pass
+            dict.clear(self)
+
+        def popitem(self, last=True):
+            '''od.popitem() -> (k, v), return and remove a (key, value) pair.
+            Pairs are returned in LIFO order if last is true or FIFO order if false.
+
+            '''
+            if not self:
+                raise KeyError('dictionary is empty')
+            root = self.__root
+            if last:
+                link = root[0]
+                link_prev = link[0]
+                link_prev[1] = root
+                root[0] = link_prev
+            else:
+                link = root[1]
+                link_next = link[1]
+                root[1] = link_next
+                link_next[0] = root
+            key = link[2]
+            del self.__map[key]
+            value = dict.pop(self, key)
+            return key, value
+
+        # -- the following methods do not depend on the internal structure --
+
+        def keys(self):
+            'od.keys() -> list of keys in od'
+            return list(self)
+
+        def values(self):
+            'od.values() -> list of values in od'
+            return [self[key] for key in self]
+
+        def items(self):
+            'od.items() -> list of (key, value) pairs in od'
+            return [(key, self[key]) for key in self]
+
+        def iterkeys(self):
+            'od.iterkeys() -> an iterator over the keys in od'
+            return iter(self)
+
+        def itervalues(self):
+            'od.itervalues -> an iterator over the values in od'
+            for k in self:
+                yield self[k]
+
+        def iteritems(self):
+            'od.iteritems -> an iterator over the (key, value) items in od'
+            for k in self:
+                yield (k, self[k])
+
+        def update(*args, **kwds):
+            '''od.update(E, **F) -> None.  Update od from dict/iterable E and F.
+
+            If E is a dict instance, does:           for k in E: od[k] = E[k]
+            If E has a .keys() method, does:         for k in E.keys(): od[k] = E[k]
+            Or if E is an iterable of items, does:   for k, v in E: od[k] = v
+            In either case, this is followed by:     for k, v in F.items(): od[k] = v
+
+            '''
+            if len(args) > 2:
+                raise TypeError('update() takes at most 2 positional '
+                                'arguments (%d given)' % (len(args),))
+            elif not args:
+                raise TypeError('update() takes at least 1 argument (0 given)')
+            self = args[0]
+            # Make progressively weaker assumptions about "other"
+            other = ()
+            if len(args) == 2:
+                other = args[1]
+            if isinstance(other, dict):
+                for key in other:
+                    self[key] = other[key]
+            elif hasattr(other, 'keys'):
+                for key in other.keys():
+                    self[key] = other[key]
+            else:
+                for key, value in other:
+                    self[key] = value
+            for key, value in kwds.items():
+                self[key] = value
+
+        __update = update  # let subclasses override update without breaking __init__
+
+        __marker = object()
+
+        def pop(self, key, default=__marker):
+            '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+            If key is not found, d is returned if given, otherwise KeyError is raised.
+
+            '''
+            if key in self:
+                result = self[key]
+                del self[key]
+                return result
+            if default is self.__marker:
+                raise KeyError(key)
+            return default
+
+        def setdefault(self, key, default=None):
+            'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
+            if key in self:
+                return self[key]
+            self[key] = default
+            return default
+
+        def __repr__(self, _repr_running=None):
+            'od.__repr__() <==> repr(od)'
+            if not _repr_running: _repr_running = {}
+            call_key = id(self), _get_ident()
+            if call_key in _repr_running:
+                return '...'
+            _repr_running[call_key] = 1
+            try:
+                if not self:
+                    return '%s()' % (self.__class__.__name__,)
+                return '%s(%r)' % (self.__class__.__name__, self.items())
+            finally:
+                del _repr_running[call_key]
+
+        def __reduce__(self):
+            'Return state information for pickling'
+            items = [[k, self[k]] for k in self]
+            inst_dict = vars(self).copy()
+            for k in vars(OrderedDict()):
+                inst_dict.pop(k, None)
+            if inst_dict:
+                return (self.__class__, (items,), inst_dict)
+            return self.__class__, (items,)
+
+        def copy(self):
+            'od.copy() -> a shallow copy of od'
+            return self.__class__(self)
+
+        @classmethod
+        def fromkeys(cls, iterable, value=None):
+            '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
+            and values equal to v (which defaults to None).
+
+            '''
+            d = cls()
+            for key in iterable:
+                d[key] = value
+            return d
+
+        def __eq__(self, other):
+            '''od.__eq__(y) <==> od==y.  Comparison to another OD is order-sensitive
+            while comparison to a regular mapping is order-insensitive.
+
+            '''
+            if isinstance(other, OrderedDict):
+                return len(self)==len(other) and self.items() == other.items()
+            return dict.__eq__(self, other)
+
+        def __ne__(self, other):
+            return not self == other
+
+        # -- the following methods are only used in Python 2.7 --
+
+        def viewkeys(self):
+            "od.viewkeys() -> a set-like object providing a view on od's keys"
+            return KeysView(self)
+
+        def viewvalues(self):
+            "od.viewvalues() -> an object providing a view on od's values"
+            return ValuesView(self)
+
+        def viewitems(self):
+            "od.viewitems() -> a set-like object providing a view on od's items"
+            return ItemsView(self)
+
+try:
+    from logging.config import BaseConfigurator, valid_ident
+except ImportError: # pragma: no cover
+    IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)
+
+
+    def valid_ident(s):
+        m = IDENTIFIER.match(s)
+        if not m:
+            raise ValueError('Not a valid Python identifier: %r' % s)
+        return True
+
+
+    # The ConvertingXXX classes are wrappers around standard Python containers,
+    # and they serve to convert any suitable values in the container. The
+    # conversion converts base dicts, lists and tuples to their wrapped
+    # equivalents, whereas strings which match a conversion format are converted
+    # appropriately.
+    #
+    # Each wrapper should have a configurator attribute holding the actual
+    # configurator to use for conversion.
+
+    class ConvertingDict(dict):
+        """A converting dictionary wrapper."""
+
+        def __getitem__(self, key):
+            value = dict.__getitem__(self, key)
+            result = self.configurator.convert(value)
+            #If the converted value is different, save for next time
+            if value is not result:
+                self[key] = result
+                if type(result) in (ConvertingDict, ConvertingList,
+                                    ConvertingTuple):
+                    result.parent = self
+                    result.key = key
+            return result
+
+        def get(self, key, default=None):
+            value = dict.get(self, key, default)
+            result = self.configurator.convert(value)
+            #If the converted value is different, save for next time
+            if value is not result:
+                self[key] = result
+                if type(result) in (ConvertingDict, ConvertingList,
+                                    ConvertingTuple):
+                    result.parent = self
+                    result.key = key
+            return result
+
+    def pop(self, key, default=None):
+        value = dict.pop(self, key, default)
+        result = self.configurator.convert(value)
+        if value is not result:
+            if type(result) in (ConvertingDict, ConvertingList,
+                                ConvertingTuple):
+                result.parent = self
+                result.key = key
+        return result
+
+    class ConvertingList(list):
+        """A converting list wrapper."""
+        def __getitem__(self, key):
+            value = list.__getitem__(self, key)
+            result = self.configurator.convert(value)
+            #If the converted value is different, save for next time
+            if value is not result:
+                self[key] = result
+                if type(result) in (ConvertingDict, ConvertingList,
+                                    ConvertingTuple):
+                    result.parent = self
+                    result.key = key
+            return result
+
+        def pop(self, idx=-1):
+            value = list.pop(self, idx)
+            result = self.configurator.convert(value)
+            if value is not result:
+                if type(result) in (ConvertingDict, ConvertingList,
+                                    ConvertingTuple):
+                    result.parent = self
+            return result
+
+    class ConvertingTuple(tuple):
+        """A converting tuple wrapper."""
+        def __getitem__(self, key):
+            value = tuple.__getitem__(self, key)
+            result = self.configurator.convert(value)
+            if value is not result:
+                if type(result) in (ConvertingDict, ConvertingList,
+                                    ConvertingTuple):
+                    result.parent = self
+                    result.key = key
+            return result
+
+    class BaseConfigurator(object):
+        """
+        The configurator base class which defines some useful defaults.
+        """
+
+        CONVERT_PATTERN = re.compile(r'^(?P[a-z]+)://(?P.*)$')
+
+        WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
+        DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
+        INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
+        DIGIT_PATTERN = re.compile(r'^\d+$')
+
+        value_converters = {
+            'ext' : 'ext_convert',
+            'cfg' : 'cfg_convert',
+        }
+
+        # We might want to use a different one, e.g. importlib
+        importer = staticmethod(__import__)
+
+        def __init__(self, config):
+            self.config = ConvertingDict(config)
+            self.config.configurator = self
+
+        def resolve(self, s):
+            """
+            Resolve strings to objects using standard import and attribute
+            syntax.
+            """
+            name = s.split('.')
+            used = name.pop(0)
+            try:
+                found = self.importer(used)
+                for frag in name:
+                    used += '.' + frag
+                    try:
+                        found = getattr(found, frag)
+                    except AttributeError:
+                        self.importer(used)
+                        found = getattr(found, frag)
+                return found
+            except ImportError:
+                e, tb = sys.exc_info()[1:]
+                v = ValueError('Cannot resolve %r: %s' % (s, e))
+                v.__cause__, v.__traceback__ = e, tb
+                raise v
+
+        def ext_convert(self, value):
+            """Default converter for the ext:// protocol."""
+            return self.resolve(value)
+
+        def cfg_convert(self, value):
+            """Default converter for the cfg:// protocol."""
+            rest = value
+            m = self.WORD_PATTERN.match(rest)
+            if m is None:
+                raise ValueError("Unable to convert %r" % value)
+            else:
+                rest = rest[m.end():]
+                d = self.config[m.groups()[0]]
+                #print d, rest
+                while rest:
+                    m = self.DOT_PATTERN.match(rest)
+                    if m:
+                        d = d[m.groups()[0]]
+                    else:
+                        m = self.INDEX_PATTERN.match(rest)
+                        if m:
+                            idx = m.groups()[0]
+                            if not self.DIGIT_PATTERN.match(idx):
+                                d = d[idx]
+                            else:
+                                try:
+                                    n = int(idx) # try as number first (most likely)
+                                    d = d[n]
+                                except TypeError:
+                                    d = d[idx]
+                    if m:
+                        rest = rest[m.end():]
+                    else:
+                        raise ValueError('Unable to convert '
+                                         '%r at %r' % (value, rest))
+            #rest should be empty
+            return d
+
+        def convert(self, value):
+            """
+            Convert values to an appropriate type. dicts, lists and tuples are
+            replaced by their converting alternatives. Strings are checked to
+            see if they have a conversion format and are converted if they do.
+            """
+            if not isinstance(value, ConvertingDict) and isinstance(value, dict):
+                value = ConvertingDict(value)
+                value.configurator = self
+            elif not isinstance(value, ConvertingList) and isinstance(value, list):
+                value = ConvertingList(value)
+                value.configurator = self
+            elif not isinstance(value, ConvertingTuple) and\
+                     isinstance(value, tuple):
+                value = ConvertingTuple(value)
+                value.configurator = self
+            elif isinstance(value, string_types):
+                m = self.CONVERT_PATTERN.match(value)
+                if m:
+                    d = m.groupdict()
+                    prefix = d['prefix']
+                    converter = self.value_converters.get(prefix, None)
+                    if converter:
+                        suffix = d['suffix']
+                        converter = getattr(self, converter)
+                        value = converter(suffix)
+            return value
+
+        def configure_custom(self, config):
+            """Configure an object with a user-supplied factory."""
+            c = config.pop('()')
+            if not callable(c):
+                c = self.resolve(c)
+            props = config.pop('.', None)
+            # Check for valid identifiers
+            kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
+            result = c(**kwargs)
+            if props:
+                for name, value in props.items():
+                    setattr(result, name, value)
+            return result
+
+        def as_tuple(self, value):
+            """Utility function which converts lists to tuples."""
+            if isinstance(value, list):
+                value = tuple(value)
+            return value
diff --git a/venv/lib/python3.8/site-packages/distlib/database.py b/venv/lib/python3.8/site-packages/distlib/database.py
new file mode 100644
index 0000000..5db5d7f
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/distlib/database.py
@@ -0,0 +1,1350 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012-2017 The Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+"""PEP 376 implementation."""
+
+from __future__ import unicode_literals
+
+import base64
+import codecs
+import contextlib
+import hashlib
+import logging
+import os
+import posixpath
+import sys
+import zipimport
+
+from . import DistlibException, resources
+from .compat import StringIO
+from .version import get_scheme, UnsupportedVersionError
+from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME,
+                       LEGACY_METADATA_FILENAME)
+from .util import (parse_requirement, cached_property, parse_name_and_version,
+                   read_exports, write_exports, CSVReader, CSVWriter)
+
+
+__all__ = ['Distribution', 'BaseInstalledDistribution',
+           'InstalledDistribution', 'EggInfoDistribution',
+           'DistributionPath']
+
+
+logger = logging.getLogger(__name__)
+
+EXPORTS_FILENAME = 'pydist-exports.json'
+COMMANDS_FILENAME = 'pydist-commands.json'
+
+DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED',
+              'RESOURCES', EXPORTS_FILENAME, 'SHARED')
+
+DISTINFO_EXT = '.dist-info'
+
+
+class _Cache(object):
+    """
+    A simple cache mapping names and .dist-info paths to distributions
+    """
+    def __init__(self):
+        """
+        Initialise an instance. There is normally one for each DistributionPath.
+        """
+        self.name = {}
+        self.path = {}
+        self.generated = False
+
+    def clear(self):
+        """
+        Clear the cache, setting it to its initial state.
+        """
+        self.name.clear()
+        self.path.clear()
+        self.generated = False
+
+    def add(self, dist):
+        """
+        Add a distribution to the cache.
+        :param dist: The distribution to add.
+        """
+        if dist.path not in self.path:
+            self.path[dist.path] = dist
+            self.name.setdefault(dist.key, []).append(dist)
+
+
+class DistributionPath(object):
+    """
+    Represents a set of distributions installed on a path (typically sys.path).
+    """
+    def __init__(self, path=None, include_egg=False):
+        """
+        Create an instance from a path, optionally including legacy (distutils/
+        setuptools/distribute) distributions.
+        :param path: The path to use, as a list of directories. If not specified,
+                     sys.path is used.
+        :param include_egg: If True, this instance will look for and return legacy
+                            distributions as well as those based on PEP 376.
+        """
+        if path is None:
+            path = sys.path
+        self.path = path
+        self._include_dist = True
+        self._include_egg = include_egg
+
+        self._cache = _Cache()
+        self._cache_egg = _Cache()
+        self._cache_enabled = True
+        self._scheme = get_scheme('default')
+
+    def _get_cache_enabled(self):
+        return self._cache_enabled
+
+    def _set_cache_enabled(self, value):
+        self._cache_enabled = value
+
+    cache_enabled = property(_get_cache_enabled, _set_cache_enabled)
+
+    def clear_cache(self):
+        """
+        Clears the internal cache.
+        """
+        self._cache.clear()
+        self._cache_egg.clear()
+
+
+    def _yield_distributions(self):
+        """
+        Yield .dist-info and/or .egg(-info) distributions.
+        """
+        # We need to check if we've seen some resources already, because on
+        # some Linux systems (e.g. some Debian/Ubuntu variants) there are
+        # symlinks which alias other files in the environment.
+        seen = set()
+        for path in self.path:
+            finder = resources.finder_for_path(path)
+            if finder is None:
+                continue
+            r = finder.find('')
+            if not r or not r.is_container:
+                continue
+            rset = sorted(r.resources)
+            for entry in rset:
+                r = finder.find(entry)
+                if not r or r.path in seen:
+                    continue
+                try:
+                    if self._include_dist and entry.endswith(DISTINFO_EXT):
+                        possible_filenames = [METADATA_FILENAME,
+                                              WHEEL_METADATA_FILENAME,
+                                              LEGACY_METADATA_FILENAME]
+                        for metadata_filename in possible_filenames:
+                            metadata_path = posixpath.join(entry, metadata_filename)
+                            pydist = finder.find(metadata_path)
+                            if pydist:
+                                break
+                        else:
+                            continue
+
+                        with contextlib.closing(pydist.as_stream()) as stream:
+                            metadata = Metadata(fileobj=stream, scheme='legacy')
+                        logger.debug('Found %s', r.path)
+                        seen.add(r.path)
+                        yield new_dist_class(r.path, metadata=metadata,
+                                             env=self)
+                    elif self._include_egg and entry.endswith(('.egg-info',
+                                                              '.egg')):
+                        logger.debug('Found %s', r.path)
+                        seen.add(r.path)
+                        yield old_dist_class(r.path, self)
+                except Exception as e:
+                    msg = 'Unable to read distribution at %s, perhaps due to bad metadata: %s'
+                    logger.warning(msg, r.path, e)
+                    import warnings
+                    warnings.warn(msg % (r.path, e), stacklevel=2)
+
+    def _generate_cache(self):
+        """
+        Scan the path for distributions and populate the cache with
+        those that are found.
+        """
+        gen_dist = not self._cache.generated
+        gen_egg = self._include_egg and not self._cache_egg.generated
+        if gen_dist or gen_egg:
+            for dist in self._yield_distributions():
+                if isinstance(dist, InstalledDistribution):
+                    self._cache.add(dist)
+                else:
+                    self._cache_egg.add(dist)
+
+            if gen_dist:
+                self._cache.generated = True
+            if gen_egg:
+                self._cache_egg.generated = True
+
+    @classmethod
+    def distinfo_dirname(cls, name, version):
+        """
+        The *name* and *version* parameters are converted into their
+        filename-escaped form, i.e. any ``'-'`` characters are replaced
+        with ``'_'`` other than the one in ``'dist-info'`` and the one
+        separating the name from the version number.
+
+        :parameter name: is converted to a standard distribution name by replacing
+                         any runs of non- alphanumeric characters with a single
+                         ``'-'``.
+        :type name: string
+        :parameter version: is converted to a standard version string. Spaces
+                            become dots, and all other non-alphanumeric characters
+                            (except dots) become dashes, with runs of multiple
+                            dashes condensed to a single dash.
+        :type version: string
+        :returns: directory name
+        :rtype: string"""
+        name = name.replace('-', '_')
+        return '-'.join([name, version]) + DISTINFO_EXT
+
+    def get_distributions(self):
+        """
+        Provides an iterator that looks for distributions and returns
+        :class:`InstalledDistribution` or
+        :class:`EggInfoDistribution` instances for each one of them.
+
+        :rtype: iterator of :class:`InstalledDistribution` and
+                :class:`EggInfoDistribution` instances
+        """
+        if not self._cache_enabled:
+            for dist in self._yield_distributions():
+                yield dist
+        else:
+            self._generate_cache()
+
+            for dist in self._cache.path.values():
+                yield dist
+
+            if self._include_egg:
+                for dist in self._cache_egg.path.values():
+                    yield dist
+
+    def get_distribution(self, name):
+        """
+        Looks for a named distribution on the path.
+
+        This function only returns the first result found, as no more than one
+        value is expected. If nothing is found, ``None`` is returned.
+
+        :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution`
+                or ``None``
+        """
+        result = None
+        name = name.lower()
+        if not self._cache_enabled:
+            for dist in self._yield_distributions():
+                if dist.key == name:
+                    result = dist
+                    break
+        else:
+            self._generate_cache()
+
+            if name in self._cache.name:
+                result = self._cache.name[name][0]
+            elif self._include_egg and name in self._cache_egg.name:
+                result = self._cache_egg.name[name][0]
+        return result
+
+    def provides_distribution(self, name, version=None):
+        """
+        Iterates over all distributions to find which distributions provide *name*.
+        If a *version* is provided, it will be used to filter the results.
+
+        This function only returns the first result found, since no more than
+        one values are expected. If the directory is not found, returns ``None``.
+
+        :parameter version: a version specifier that indicates the version
+                            required, conforming to the format in ``PEP-345``
+
+        :type name: string
+        :type version: string
+        """
+        matcher = None
+        if version is not None:
+            try:
+                matcher = self._scheme.matcher('%s (%s)' % (name, version))
+            except ValueError:
+                raise DistlibException('invalid name or version: %r, %r' %
+                                      (name, version))
+
+        for dist in self.get_distributions():
+            # We hit a problem on Travis where enum34 was installed and doesn't
+            # have a provides attribute ...
+            if not hasattr(dist, 'provides'):
+                logger.debug('No "provides": %s', dist)
+            else:
+                provided = dist.provides
+
+                for p in provided:
+                    p_name, p_ver = parse_name_and_version(p)
+                    if matcher is None:
+                        if p_name == name:
+                            yield dist
+                            break
+                    else:
+                        if p_name == name and matcher.match(p_ver):
+                            yield dist
+                            break
+
+    def get_file_path(self, name, relative_path):
+        """
+        Return the path to a resource file.
+        """
+        dist = self.get_distribution(name)
+        if dist is None:
+            raise LookupError('no distribution named %r found' % name)
+        return dist.get_resource_path(relative_path)
+
+    def get_exported_entries(self, category, name=None):
+        """
+        Return all of the exported entries in a particular category.
+
+        :param category: The category to search for entries.
+        :param name: If specified, only entries with that name are returned.
+        """
+        for dist in self.get_distributions():
+            r = dist.exports
+            if category in r:
+                d = r[category]
+                if name is not None:
+                    if name in d:
+                        yield d[name]
+                else:
+                    for v in d.values():
+                        yield v
+
+
+class Distribution(object):
+    """
+    A base class for distributions, whether installed or from indexes.
+    Either way, it must have some metadata, so that's all that's needed
+    for construction.
+    """
+
+    build_time_dependency = False
+    """
+    Set to True if it's known to be only a build-time dependency (i.e.
+    not needed after installation).
+    """
+
+    requested = False
+    """A boolean that indicates whether the ``REQUESTED`` metadata file is
+    present (in other words, whether the package was installed by user
+    request or it was installed as a dependency)."""
+
+    def __init__(self, metadata):
+        """
+        Initialise an instance.
+        :param metadata: The instance of :class:`Metadata` describing this
+        distribution.
+        """
+        self.metadata = metadata
+        self.name = metadata.name
+        self.key = self.name.lower()    # for case-insensitive comparisons
+        self.version = metadata.version
+        self.locator = None
+        self.digest = None
+        self.extras = None      # additional features requested
+        self.context = None     # environment marker overrides
+        self.download_urls = set()
+        self.digests = {}
+
+    @property
+    def source_url(self):
+        """
+        The source archive download URL for this distribution.
+        """
+        return self.metadata.source_url
+
+    download_url = source_url   # Backward compatibility
+
+    @property
+    def name_and_version(self):
+        """
+        A utility property which displays the name and version in parentheses.
+        """
+        return '%s (%s)' % (self.name, self.version)
+
+    @property
+    def provides(self):
+        """
+        A set of distribution names and versions provided by this distribution.
+        :return: A set of "name (version)" strings.
+        """
+        plist = self.metadata.provides
+        s = '%s (%s)' % (self.name, self.version)
+        if s not in plist:
+            plist.append(s)
+        return plist
+
+    def _get_requirements(self, req_attr):
+        md = self.metadata
+        reqts = getattr(md, req_attr)
+        logger.debug('%s: got requirements %r from metadata: %r', self.name, req_attr,
+                     reqts)
+        return set(md.get_requirements(reqts, extras=self.extras,
+                                       env=self.context))
+
+    @property
+    def run_requires(self):
+        return self._get_requirements('run_requires')
+
+    @property
+    def meta_requires(self):
+        return self._get_requirements('meta_requires')
+
+    @property
+    def build_requires(self):
+        return self._get_requirements('build_requires')
+
+    @property
+    def test_requires(self):
+        return self._get_requirements('test_requires')
+
+    @property
+    def dev_requires(self):
+        return self._get_requirements('dev_requires')
+
+    def matches_requirement(self, req):
+        """
+        Say if this instance matches (fulfills) a requirement.
+        :param req: The requirement to match.
+        :rtype req: str
+        :return: True if it matches, else False.
+        """
+        # Requirement may contain extras - parse to lose those
+        # from what's passed to the matcher
+        r = parse_requirement(req)
+        scheme = get_scheme(self.metadata.scheme)
+        try:
+            matcher = scheme.matcher(r.requirement)
+        except UnsupportedVersionError:
+            # XXX compat-mode if cannot read the version
+            logger.warning('could not read version %r - using name only',
+                           req)
+            name = req.split()[0]
+            matcher = scheme.matcher(name)
+
+        name = matcher.key   # case-insensitive
+
+        result = False
+        for p in self.provides:
+            p_name, p_ver = parse_name_and_version(p)
+            if p_name != name:
+                continue
+            try:
+                result = matcher.match(p_ver)
+                break
+            except UnsupportedVersionError:
+                pass
+        return result
+
+    def __repr__(self):
+        """
+        Return a textual representation of this instance,
+        """
+        if self.source_url:
+            suffix = ' [%s]' % self.source_url
+        else:
+            suffix = ''
+        return '' % (self.name, self.version, suffix)
+
+    def __eq__(self, other):
+        """
+        See if this distribution is the same as another.
+        :param other: The distribution to compare with. To be equal to one
+                      another. distributions must have the same type, name,
+                      version and source_url.
+        :return: True if it is the same, else False.
+        """
+        if type(other) is not type(self):
+            result = False
+        else:
+            result = (self.name == other.name and
+                      self.version == other.version and
+                      self.source_url == other.source_url)
+        return result
+
+    def __hash__(self):
+        """
+        Compute hash in a way which matches the equality test.
+        """
+        return hash(self.name) + hash(self.version) + hash(self.source_url)
+
+
+class BaseInstalledDistribution(Distribution):
+    """
+    This is the base class for installed distributions (whether PEP 376 or
+    legacy).
+    """
+
+    hasher = None
+
+    def __init__(self, metadata, path, env=None):
+        """
+        Initialise an instance.
+        :param metadata: An instance of :class:`Metadata` which describes the
+                         distribution. This will normally have been initialised
+                         from a metadata file in the ``path``.
+        :param path:     The path of the ``.dist-info`` or ``.egg-info``
+                         directory for the distribution.
+        :param env:      This is normally the :class:`DistributionPath`
+                         instance where this distribution was found.
+        """
+        super(BaseInstalledDistribution, self).__init__(metadata)
+        self.path = path
+        self.dist_path = env
+
+    def get_hash(self, data, hasher=None):
+        """
+        Get the hash of some data, using a particular hash algorithm, if
+        specified.
+
+        :param data: The data to be hashed.
+        :type data: bytes
+        :param hasher: The name of a hash implementation, supported by hashlib,
+                       or ``None``. Examples of valid values are ``'sha1'``,
+                       ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and
+                       ``'sha512'``. If no hasher is specified, the ``hasher``
+                       attribute of the :class:`InstalledDistribution` instance
+                       is used. If the hasher is determined to be ``None``, MD5
+                       is used as the hashing algorithm.
+        :returns: The hash of the data. If a hasher was explicitly specified,
+                  the returned hash will be prefixed with the specified hasher
+                  followed by '='.
+        :rtype: str
+        """
+        if hasher is None:
+            hasher = self.hasher
+        if hasher is None:
+            hasher = hashlib.md5
+            prefix = ''
+        else:
+            hasher = getattr(hashlib, hasher)
+            prefix = '%s=' % self.hasher
+        digest = hasher(data).digest()
+        digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii')
+        return '%s%s' % (prefix, digest)
+
+
+class InstalledDistribution(BaseInstalledDistribution):
+    """
+    Created with the *path* of the ``.dist-info`` directory provided to the
+    constructor. It reads the metadata contained in ``pydist.json`` when it is
+    instantiated., or uses a passed in Metadata instance (useful for when
+    dry-run mode is being used).
+    """
+
+    hasher = 'sha256'
+
+    def __init__(self, path, metadata=None, env=None):
+        self.modules = []
+        self.finder = finder = resources.finder_for_path(path)
+        if finder is None:
+            raise ValueError('finder unavailable for %s' % path)
+        if env and env._cache_enabled and path in env._cache.path:
+            metadata = env._cache.path[path].metadata
+        elif metadata is None:
+            r = finder.find(METADATA_FILENAME)
+            # Temporary - for Wheel 0.23 support
+            if r is None:
+                r = finder.find(WHEEL_METADATA_FILENAME)
+            # Temporary - for legacy support
+            if r is None:
+                r = finder.find(LEGACY_METADATA_FILENAME)
+            if r is None:
+                raise ValueError('no %s found in %s' % (METADATA_FILENAME,
+                                                        path))
+            with contextlib.closing(r.as_stream()) as stream:
+                metadata = Metadata(fileobj=stream, scheme='legacy')
+
+        super(InstalledDistribution, self).__init__(metadata, path, env)
+
+        if env and env._cache_enabled:
+            env._cache.add(self)
+
+        r = finder.find('REQUESTED')
+        self.requested = r is not None
+        p  = os.path.join(path, 'top_level.txt')
+        if os.path.exists(p):
+            with open(p, 'rb') as f:
+                data = f.read().decode('utf-8')
+            self.modules = data.splitlines()
+
+    def __repr__(self):
+        return '' % (
+            self.name, self.version, self.path)
+
+    def __str__(self):
+        return "%s %s" % (self.name, self.version)
+
+    def _get_records(self):
+        """
+        Get the list of installed files for the distribution
+        :return: A list of tuples of path, hash and size. Note that hash and
+                 size might be ``None`` for some entries. The path is exactly
+                 as stored in the file (which is as in PEP 376).
+        """
+        results = []
+        r = self.get_distinfo_resource('RECORD')
+        with contextlib.closing(r.as_stream()) as stream:
+            with CSVReader(stream=stream) as record_reader:
+                # Base location is parent dir of .dist-info dir
+                #base_location = os.path.dirname(self.path)
+                #base_location = os.path.abspath(base_location)
+                for row in record_reader:
+                    missing = [None for i in range(len(row), 3)]
+                    path, checksum, size = row + missing
+                    #if not os.path.isabs(path):
+                    #    path = path.replace('/', os.sep)
+                    #    path = os.path.join(base_location, path)
+                    results.append((path, checksum, size))
+        return results
+
+    @cached_property
+    def exports(self):
+        """
+        Return the information exported by this distribution.
+        :return: A dictionary of exports, mapping an export category to a dict
+                 of :class:`ExportEntry` instances describing the individual
+                 export entries, and keyed by name.
+        """
+        result = {}
+        r = self.get_distinfo_resource(EXPORTS_FILENAME)
+        if r:
+            result = self.read_exports()
+        return result
+
+    def read_exports(self):
+        """
+        Read exports data from a file in .ini format.
+
+        :return: A dictionary of exports, mapping an export category to a list
+                 of :class:`ExportEntry` instances describing the individual
+                 export entries.
+        """
+        result = {}
+        r = self.get_distinfo_resource(EXPORTS_FILENAME)
+        if r:
+            with contextlib.closing(r.as_stream()) as stream:
+                result = read_exports(stream)
+        return result
+
+    def write_exports(self, exports):
+        """
+        Write a dictionary of exports to a file in .ini format.
+        :param exports: A dictionary of exports, mapping an export category to
+                        a list of :class:`ExportEntry` instances describing the
+                        individual export entries.
+        """
+        rf = self.get_distinfo_file(EXPORTS_FILENAME)
+        with open(rf, 'w') as f:
+            write_exports(exports, f)
+
+    def get_resource_path(self, relative_path):
+        """
+        NOTE: This API may change in the future.
+
+        Return the absolute path to a resource file with the given relative
+        path.
+
+        :param relative_path: The path, relative to .dist-info, of the resource
+                              of interest.
+        :return: The absolute path where the resource is to be found.
+        """
+        r = self.get_distinfo_resource('RESOURCES')
+        with contextlib.closing(r.as_stream()) as stream:
+            with CSVReader(stream=stream) as resources_reader:
+                for relative, destination in resources_reader:
+                    if relative == relative_path:
+                        return destination
+        raise KeyError('no resource file with relative path %r '
+                       'is installed' % relative_path)
+
+    def list_installed_files(self):
+        """
+        Iterates over the ``RECORD`` entries and returns a tuple
+        ``(path, hash, size)`` for each line.
+
+        :returns: iterator of (path, hash, size)
+        """
+        for result in self._get_records():
+            yield result
+
+    def write_installed_files(self, paths, prefix, dry_run=False):
+        """
+        Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any
+        existing ``RECORD`` file is silently overwritten.
+
+        prefix is used to determine when to write absolute paths.
+        """
+        prefix = os.path.join(prefix, '')
+        base = os.path.dirname(self.path)
+        base_under_prefix = base.startswith(prefix)
+        base = os.path.join(base, '')
+        record_path = self.get_distinfo_file('RECORD')
+        logger.info('creating %s', record_path)
+        if dry_run:
+            return None
+        with CSVWriter(record_path) as writer:
+            for path in paths:
+                if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')):
+                    # do not put size and hash, as in PEP-376
+                    hash_value = size = ''
+                else:
+                    size = '%d' % os.path.getsize(path)
+                    with open(path, 'rb') as fp:
+                        hash_value = self.get_hash(fp.read())
+                if path.startswith(base) or (base_under_prefix and
+                                             path.startswith(prefix)):
+                    path = os.path.relpath(path, base)
+                writer.writerow((path, hash_value, size))
+
+            # add the RECORD file itself
+            if record_path.startswith(base):
+                record_path = os.path.relpath(record_path, base)
+            writer.writerow((record_path, '', ''))
+        return record_path
+
+    def check_installed_files(self):
+        """
+        Checks that the hashes and sizes of the files in ``RECORD`` are
+        matched by the files themselves. Returns a (possibly empty) list of
+        mismatches. Each entry in the mismatch list will be a tuple consisting
+        of the path, 'exists', 'size' or 'hash' according to what didn't match
+        (existence is checked first, then size, then hash), the expected
+        value and the actual value.
+        """
+        mismatches = []
+        base = os.path.dirname(self.path)
+        record_path = self.get_distinfo_file('RECORD')
+        for path, hash_value, size in self.list_installed_files():
+            if not os.path.isabs(path):
+                path = os.path.join(base, path)
+            if path == record_path:
+                continue
+            if not os.path.exists(path):
+                mismatches.append((path, 'exists', True, False))
+            elif os.path.isfile(path):
+                actual_size = str(os.path.getsize(path))
+                if size and actual_size != size:
+                    mismatches.append((path, 'size', size, actual_size))
+                elif hash_value:
+                    if '=' in hash_value:
+                        hasher = hash_value.split('=', 1)[0]
+                    else:
+                        hasher = None
+
+                    with open(path, 'rb') as f:
+                        actual_hash = self.get_hash(f.read(), hasher)
+                        if actual_hash != hash_value:
+                            mismatches.append((path, 'hash', hash_value, actual_hash))
+        return mismatches
+
+    @cached_property
+    def shared_locations(self):
+        """
+        A dictionary of shared locations whose keys are in the set 'prefix',
+        'purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'.
+        The corresponding value is the absolute path of that category for
+        this distribution, and takes into account any paths selected by the
+        user at installation time (e.g. via command-line arguments). In the
+        case of the 'namespace' key, this would be a list of absolute paths
+        for the roots of namespace packages in this distribution.
+
+        The first time this property is accessed, the relevant information is
+        read from the SHARED file in the .dist-info directory.
+        """
+        result = {}
+        shared_path = os.path.join(self.path, 'SHARED')
+        if os.path.isfile(shared_path):
+            with codecs.open(shared_path, 'r', encoding='utf-8') as f:
+                lines = f.read().splitlines()
+            for line in lines:
+                key, value = line.split('=', 1)
+                if key == 'namespace':
+                    result.setdefault(key, []).append(value)
+                else:
+                    result[key] = value
+        return result
+
+    def write_shared_locations(self, paths, dry_run=False):
+        """
+        Write shared location information to the SHARED file in .dist-info.
+        :param paths: A dictionary as described in the documentation for
+        :meth:`shared_locations`.
+        :param dry_run: If True, the action is logged but no file is actually
+                        written.
+        :return: The path of the file written to.
+        """
+        shared_path = os.path.join(self.path, 'SHARED')
+        logger.info('creating %s', shared_path)
+        if dry_run:
+            return None
+        lines = []
+        for key in ('prefix', 'lib', 'headers', 'scripts', 'data'):
+            path = paths[key]
+            if os.path.isdir(paths[key]):
+                lines.append('%s=%s' % (key,  path))
+        for ns in paths.get('namespace', ()):
+            lines.append('namespace=%s' % ns)
+
+        with codecs.open(shared_path, 'w', encoding='utf-8') as f:
+            f.write('\n'.join(lines))
+        return shared_path
+
+    def get_distinfo_resource(self, path):
+        if path not in DIST_FILES:
+            raise DistlibException('invalid path for a dist-info file: '
+                                   '%r at %r' % (path, self.path))
+        finder = resources.finder_for_path(self.path)
+        if finder is None:
+            raise DistlibException('Unable to get a finder for %s' % self.path)
+        return finder.find(path)
+
+    def get_distinfo_file(self, path):
+        """
+        Returns a path located under the ``.dist-info`` directory. Returns a
+        string representing the path.
+
+        :parameter path: a ``'/'``-separated path relative to the
+                         ``.dist-info`` directory or an absolute path;
+                         If *path* is an absolute path and doesn't start
+                         with the ``.dist-info`` directory path,
+                         a :class:`DistlibException` is raised
+        :type path: str
+        :rtype: str
+        """
+        # Check if it is an absolute path  # XXX use relpath, add tests
+        if path.find(os.sep) >= 0:
+            # it's an absolute path?
+            distinfo_dirname, path = path.split(os.sep)[-2:]
+            if distinfo_dirname != self.path.split(os.sep)[-1]:
+                raise DistlibException(
+                    'dist-info file %r does not belong to the %r %s '
+                    'distribution' % (path, self.name, self.version))
+
+        # The file must be relative
+        if path not in DIST_FILES:
+            raise DistlibException('invalid path for a dist-info file: '
+                                   '%r at %r' % (path, self.path))
+
+        return os.path.join(self.path, path)
+
+    def list_distinfo_files(self):
+        """
+        Iterates over the ``RECORD`` entries and returns paths for each line if
+        the path is pointing to a file located in the ``.dist-info`` directory
+        or one of its subdirectories.
+
+        :returns: iterator of paths
+        """
+        base = os.path.dirname(self.path)
+        for path, checksum, size in self._get_records():
+            # XXX add separator or use real relpath algo
+            if not os.path.isabs(path):
+                path = os.path.join(base, path)
+            if path.startswith(self.path):
+                yield path
+
+    def __eq__(self, other):
+        return (isinstance(other, InstalledDistribution) and
+                self.path == other.path)
+
+    # See http://docs.python.org/reference/datamodel#object.__hash__
+    __hash__ = object.__hash__
+
+
+class EggInfoDistribution(BaseInstalledDistribution):
+    """Created with the *path* of the ``.egg-info`` directory or file provided
+    to the constructor. It reads the metadata contained in the file itself, or
+    if the given path happens to be a directory, the metadata is read from the
+    file ``PKG-INFO`` under that directory."""
+
+    requested = True    # as we have no way of knowing, assume it was
+    shared_locations = {}
+
+    def __init__(self, path, env=None):
+        def set_name_and_version(s, n, v):
+            s.name = n
+            s.key = n.lower()   # for case-insensitive comparisons
+            s.version = v
+
+        self.path = path
+        self.dist_path = env
+        if env and env._cache_enabled and path in env._cache_egg.path:
+            metadata = env._cache_egg.path[path].metadata
+            set_name_and_version(self, metadata.name, metadata.version)
+        else:
+            metadata = self._get_metadata(path)
+
+            # Need to be set before caching
+            set_name_and_version(self, metadata.name, metadata.version)
+
+            if env and env._cache_enabled:
+                env._cache_egg.add(self)
+        super(EggInfoDistribution, self).__init__(metadata, path, env)
+
+    def _get_metadata(self, path):
+        requires = None
+
+        def parse_requires_data(data):
+            """Create a list of dependencies from a requires.txt file.
+
+            *data*: the contents of a setuptools-produced requires.txt file.
+            """
+            reqs = []
+            lines = data.splitlines()
+            for line in lines:
+                line = line.strip()
+                if line.startswith('['):
+                    logger.warning('Unexpected line: quitting requirement scan: %r',
+                                   line)
+                    break
+                r = parse_requirement(line)
+                if not r:
+                    logger.warning('Not recognised as a requirement: %r', line)
+                    continue
+                if r.extras:
+                    logger.warning('extra requirements in requires.txt are '
+                                   'not supported')
+                if not r.constraints:
+                    reqs.append(r.name)
+                else:
+                    cons = ', '.join('%s%s' % c for c in r.constraints)
+                    reqs.append('%s (%s)' % (r.name, cons))
+            return reqs
+
+        def parse_requires_path(req_path):
+            """Create a list of dependencies from a requires.txt file.
+
+            *req_path*: the path to a setuptools-produced requires.txt file.
+            """
+
+            reqs = []
+            try:
+                with codecs.open(req_path, 'r', 'utf-8') as fp:
+                    reqs = parse_requires_data(fp.read())
+            except IOError:
+                pass
+            return reqs
+
+        tl_path = tl_data = None
+        if path.endswith('.egg'):
+            if os.path.isdir(path):
+                p = os.path.join(path, 'EGG-INFO')
+                meta_path = os.path.join(p, 'PKG-INFO')
+                metadata = Metadata(path=meta_path, scheme='legacy')
+                req_path = os.path.join(p, 'requires.txt')
+                tl_path = os.path.join(p, 'top_level.txt')
+                requires = parse_requires_path(req_path)
+            else:
+                # FIXME handle the case where zipfile is not available
+                zipf = zipimport.zipimporter(path)
+                fileobj = StringIO(
+                    zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8'))
+                metadata = Metadata(fileobj=fileobj, scheme='legacy')
+                try:
+                    data = zipf.get_data('EGG-INFO/requires.txt')
+                    tl_data = zipf.get_data('EGG-INFO/top_level.txt').decode('utf-8')
+                    requires = parse_requires_data(data.decode('utf-8'))
+                except IOError:
+                    requires = None
+        elif path.endswith('.egg-info'):
+            if os.path.isdir(path):
+                req_path = os.path.join(path, 'requires.txt')
+                requires = parse_requires_path(req_path)
+                path = os.path.join(path, 'PKG-INFO')
+                tl_path = os.path.join(path, 'top_level.txt')
+            metadata = Metadata(path=path, scheme='legacy')
+        else:
+            raise DistlibException('path must end with .egg-info or .egg, '
+                                   'got %r' % path)
+
+        if requires:
+            metadata.add_requirements(requires)
+        # look for top-level modules in top_level.txt, if present
+        if tl_data is None:
+            if tl_path is not None and os.path.exists(tl_path):
+                with open(tl_path, 'rb') as f:
+                    tl_data = f.read().decode('utf-8')
+        if not tl_data:
+            tl_data = []
+        else:
+            tl_data = tl_data.splitlines()
+        self.modules = tl_data
+        return metadata
+
+    def __repr__(self):
+        return '' % (
+            self.name, self.version, self.path)
+
+    def __str__(self):
+        return "%s %s" % (self.name, self.version)
+
+    def check_installed_files(self):
+        """
+        Checks that the hashes and sizes of the files in ``RECORD`` are
+        matched by the files themselves. Returns a (possibly empty) list of
+        mismatches. Each entry in the mismatch list will be a tuple consisting
+        of the path, 'exists', 'size' or 'hash' according to what didn't match
+        (existence is checked first, then size, then hash), the expected
+        value and the actual value.
+        """
+        mismatches = []
+        record_path = os.path.join(self.path, 'installed-files.txt')
+        if os.path.exists(record_path):
+            for path, _, _ in self.list_installed_files():
+                if path == record_path:
+                    continue
+                if not os.path.exists(path):
+                    mismatches.append((path, 'exists', True, False))
+        return mismatches
+
+    def list_installed_files(self):
+        """
+        Iterates over the ``installed-files.txt`` entries and returns a tuple
+        ``(path, hash, size)`` for each line.
+
+        :returns: a list of (path, hash, size)
+        """
+
+        def _md5(path):
+            f = open(path, 'rb')
+            try:
+                content = f.read()
+            finally:
+                f.close()
+            return hashlib.md5(content).hexdigest()
+
+        def _size(path):
+            return os.stat(path).st_size
+
+        record_path = os.path.join(self.path, 'installed-files.txt')
+        result = []
+        if os.path.exists(record_path):
+            with codecs.open(record_path, 'r', encoding='utf-8') as f:
+                for line in f:
+                    line = line.strip()
+                    p = os.path.normpath(os.path.join(self.path, line))
+                    # "./" is present as a marker between installed files
+                    # and installation metadata files
+                    if not os.path.exists(p):
+                        logger.warning('Non-existent file: %s', p)
+                        if p.endswith(('.pyc', '.pyo')):
+                            continue
+                        #otherwise fall through and fail
+                    if not os.path.isdir(p):
+                        result.append((p, _md5(p), _size(p)))
+            result.append((record_path, None, None))
+        return result
+
+    def list_distinfo_files(self, absolute=False):
+        """
+        Iterates over the ``installed-files.txt`` entries and returns paths for
+        each line if the path is pointing to a file located in the
+        ``.egg-info`` directory or one of its subdirectories.
+
+        :parameter absolute: If *absolute* is ``True``, each returned path is
+                          transformed into a local absolute path. Otherwise the
+                          raw value from ``installed-files.txt`` is returned.
+        :type absolute: boolean
+        :returns: iterator of paths
+        """
+        record_path = os.path.join(self.path, 'installed-files.txt')
+        if os.path.exists(record_path):
+            skip = True
+            with codecs.open(record_path, 'r', encoding='utf-8') as f:
+                for line in f:
+                    line = line.strip()
+                    if line == './':
+                        skip = False
+                        continue
+                    if not skip:
+                        p = os.path.normpath(os.path.join(self.path, line))
+                        if p.startswith(self.path):
+                            if absolute:
+                                yield p
+                            else:
+                                yield line
+
+    def __eq__(self, other):
+        return (isinstance(other, EggInfoDistribution) and
+                self.path == other.path)
+
+    # See http://docs.python.org/reference/datamodel#object.__hash__
+    __hash__ = object.__hash__
+
+new_dist_class = InstalledDistribution
+old_dist_class = EggInfoDistribution
+
+
+class DependencyGraph(object):
+    """
+    Represents a dependency graph between distributions.
+
+    The dependency relationships are stored in an ``adjacency_list`` that maps
+    distributions to a list of ``(other, label)`` tuples where  ``other``
+    is a distribution and the edge is labeled with ``label`` (i.e. the version
+    specifier, if such was provided). Also, for more efficient traversal, for
+    every distribution ``x``, a list of predecessors is kept in
+    ``reverse_list[x]``. An edge from distribution ``a`` to
+    distribution ``b`` means that ``a`` depends on ``b``. If any missing
+    dependencies are found, they are stored in ``missing``, which is a
+    dictionary that maps distributions to a list of requirements that were not
+    provided by any other distributions.
+    """
+
+    def __init__(self):
+        self.adjacency_list = {}
+        self.reverse_list = {}
+        self.missing = {}
+
+    def add_distribution(self, distribution):
+        """Add the *distribution* to the graph.
+
+        :type distribution: :class:`distutils2.database.InstalledDistribution`
+                            or :class:`distutils2.database.EggInfoDistribution`
+        """
+        self.adjacency_list[distribution] = []
+        self.reverse_list[distribution] = []
+        #self.missing[distribution] = []
+
+    def add_edge(self, x, y, label=None):
+        """Add an edge from distribution *x* to distribution *y* with the given
+        *label*.
+
+        :type x: :class:`distutils2.database.InstalledDistribution` or
+                 :class:`distutils2.database.EggInfoDistribution`
+        :type y: :class:`distutils2.database.InstalledDistribution` or
+                 :class:`distutils2.database.EggInfoDistribution`
+        :type label: ``str`` or ``None``
+        """
+        self.adjacency_list[x].append((y, label))
+        # multiple edges are allowed, so be careful
+        if x not in self.reverse_list[y]:
+            self.reverse_list[y].append(x)
+
+    def add_missing(self, distribution, requirement):
+        """
+        Add a missing *requirement* for the given *distribution*.
+
+        :type distribution: :class:`distutils2.database.InstalledDistribution`
+                            or :class:`distutils2.database.EggInfoDistribution`
+        :type requirement: ``str``
+        """
+        logger.debug('%s missing %r', distribution, requirement)
+        self.missing.setdefault(distribution, []).append(requirement)
+
+    def _repr_dist(self, dist):
+        return '%s %s' % (dist.name, dist.version)
+
+    def repr_node(self, dist, level=1):
+        """Prints only a subgraph"""
+        output = [self._repr_dist(dist)]
+        for other, label in self.adjacency_list[dist]:
+            dist = self._repr_dist(other)
+            if label is not None:
+                dist = '%s [%s]' % (dist, label)
+            output.append('    ' * level + str(dist))
+            suboutput = self.repr_node(other, level + 1)
+            subs = suboutput.split('\n')
+            output.extend(subs[1:])
+        return '\n'.join(output)
+
+    def to_dot(self, f, skip_disconnected=True):
+        """Writes a DOT output for the graph to the provided file *f*.
+
+        If *skip_disconnected* is set to ``True``, then all distributions
+        that are not dependent on any other distribution are skipped.
+
+        :type f: has to support ``file``-like operations
+        :type skip_disconnected: ``bool``
+        """
+        disconnected = []
+
+        f.write("digraph dependencies {\n")
+        for dist, adjs in self.adjacency_list.items():
+            if len(adjs) == 0 and not skip_disconnected:
+                disconnected.append(dist)
+            for other, label in adjs:
+                if not label is None:
+                    f.write('"%s" -> "%s" [label="%s"]\n' %
+                            (dist.name, other.name, label))
+                else:
+                    f.write('"%s" -> "%s"\n' % (dist.name, other.name))
+        if not skip_disconnected and len(disconnected) > 0:
+            f.write('subgraph disconnected {\n')
+            f.write('label = "Disconnected"\n')
+            f.write('bgcolor = red\n')
+
+            for dist in disconnected:
+                f.write('"%s"' % dist.name)
+                f.write('\n')
+            f.write('}\n')
+        f.write('}\n')
+
+    def topological_sort(self):
+        """
+        Perform a topological sort of the graph.
+        :return: A tuple, the first element of which is a topologically sorted
+                 list of distributions, and the second element of which is a
+                 list of distributions that cannot be sorted because they have
+                 circular dependencies and so form a cycle.
+        """
+        result = []
+        # Make a shallow copy of the adjacency list
+        alist = {}
+        for k, v in self.adjacency_list.items():
+            alist[k] = v[:]
+        while True:
+            # See what we can remove in this run
+            to_remove = []
+            for k, v in list(alist.items())[:]:
+                if not v:
+                    to_remove.append(k)
+                    del alist[k]
+            if not to_remove:
+                # What's left in alist (if anything) is a cycle.
+                break
+            # Remove from the adjacency list of others
+            for k, v in alist.items():
+                alist[k] = [(d, r) for d, r in v if d not in to_remove]
+            logger.debug('Moving to result: %s',
+                         ['%s (%s)' % (d.name, d.version) for d in to_remove])
+            result.extend(to_remove)
+        return result, list(alist.keys())
+
+    def __repr__(self):
+        """Representation of the graph"""
+        output = []
+        for dist, adjs in self.adjacency_list.items():
+            output.append(self.repr_node(dist))
+        return '\n'.join(output)
+
+
+def make_graph(dists, scheme='default'):
+    """Makes a dependency graph from the given distributions.
+
+    :parameter dists: a list of distributions
+    :type dists: list of :class:`distutils2.database.InstalledDistribution` and
+                 :class:`distutils2.database.EggInfoDistribution` instances
+    :rtype: a :class:`DependencyGraph` instance
+    """
+    scheme = get_scheme(scheme)
+    graph = DependencyGraph()
+    provided = {}  # maps names to lists of (version, dist) tuples
+
+    # first, build the graph and find out what's provided
+    for dist in dists:
+        graph.add_distribution(dist)
+
+        for p in dist.provides:
+            name, version = parse_name_and_version(p)
+            logger.debug('Add to provided: %s, %s, %s', name, version, dist)
+            provided.setdefault(name, []).append((version, dist))
+
+    # now make the edges
+    for dist in dists:
+        requires = (dist.run_requires | dist.meta_requires |
+                    dist.build_requires | dist.dev_requires)
+        for req in requires:
+            try:
+                matcher = scheme.matcher(req)
+            except UnsupportedVersionError:
+                # XXX compat-mode if cannot read the version
+                logger.warning('could not read version %r - using name only',
+                               req)
+                name = req.split()[0]
+                matcher = scheme.matcher(name)
+
+            name = matcher.key   # case-insensitive
+
+            matched = False
+            if name in provided:
+                for version, provider in provided[name]:
+                    try:
+                        match = matcher.match(version)
+                    except UnsupportedVersionError:
+                        match = False
+
+                    if match:
+                        graph.add_edge(dist, provider, req)
+                        matched = True
+                        break
+            if not matched:
+                graph.add_missing(dist, req)
+    return graph
+
+
+def get_dependent_dists(dists, dist):
+    """Recursively generate a list of distributions from *dists* that are
+    dependent on *dist*.
+
+    :param dists: a list of distributions
+    :param dist: a distribution, member of *dists* for which we are interested
+    """
+    if dist not in dists:
+        raise DistlibException('given distribution %r is not a member '
+                               'of the list' % dist.name)
+    graph = make_graph(dists)
+
+    dep = [dist]  # dependent distributions
+    todo = graph.reverse_list[dist]  # list of nodes we should inspect
+
+    while todo:
+        d = todo.pop()
+        dep.append(d)
+        for succ in graph.reverse_list[d]:
+            if succ not in dep:
+                todo.append(succ)
+
+    dep.pop(0)  # remove dist from dep, was there to prevent infinite loops
+    return dep
+
+
+def get_required_dists(dists, dist):
+    """Recursively generate a list of distributions from *dists* that are
+    required by *dist*.
+
+    :param dists: a list of distributions
+    :param dist: a distribution, member of *dists* for which we are interested
+                 in finding the dependencies.
+    """
+    if dist not in dists:
+        raise DistlibException('given distribution %r is not a member '
+                               'of the list' % dist.name)
+    graph = make_graph(dists)
+
+    req = set()  # required distributions
+    todo = graph.adjacency_list[dist]  # list of nodes we should inspect
+    seen = set(t[0] for t in todo) # already added to todo
+
+    while todo:
+        d = todo.pop()[0]
+        req.add(d)
+        pred_list = graph.adjacency_list[d]
+        for pred in pred_list:
+            d = pred[0]
+            if d not in req and d not in seen:
+                seen.add(d)
+                todo.append(pred)
+    return req
+
+
+def make_dist(name, version, **kwargs):
+    """
+    A convenience method for making a dist given just a name and version.
+    """
+    summary = kwargs.pop('summary', 'Placeholder for summary')
+    md = Metadata(**kwargs)
+    md.name = name
+    md.version = version
+    md.summary = summary or 'Placeholder for summary'
+    return Distribution(md)
diff --git a/venv/lib/python3.8/site-packages/distlib/index.py b/venv/lib/python3.8/site-packages/distlib/index.py
new file mode 100644
index 0000000..9b6d129
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/distlib/index.py
@@ -0,0 +1,508 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2013 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+import hashlib
+import logging
+import os
+import shutil
+import subprocess
+import tempfile
+try:
+    from threading import Thread
+except ImportError:  # pragma: no cover
+    from dummy_threading import Thread
+
+from . import DistlibException
+from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr,
+                     urlparse, build_opener, string_types)
+from .util import zip_dir, ServerProxy
+
+logger = logging.getLogger(__name__)
+
+DEFAULT_INDEX = 'https://pypi.org/pypi'
+DEFAULT_REALM = 'pypi'
+
+class PackageIndex(object):
+    """
+    This class represents a package index compatible with PyPI, the Python
+    Package Index.
+    """
+
+    boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$'
+
+    def __init__(self, url=None):
+        """
+        Initialise an instance.
+
+        :param url: The URL of the index. If not specified, the URL for PyPI is
+                    used.
+        """
+        self.url = url or DEFAULT_INDEX
+        self.read_configuration()
+        scheme, netloc, path, params, query, frag = urlparse(self.url)
+        if params or query or frag or scheme not in ('http', 'https'):
+            raise DistlibException('invalid repository: %s' % self.url)
+        self.password_handler = None
+        self.ssl_verifier = None
+        self.gpg = None
+        self.gpg_home = None
+        with open(os.devnull, 'w') as sink:
+            # Use gpg by default rather than gpg2, as gpg2 insists on
+            # prompting for passwords
+            for s in ('gpg', 'gpg2'):
+                try:
+                    rc = subprocess.check_call([s, '--version'], stdout=sink,
+                                               stderr=sink)
+                    if rc == 0:
+                        self.gpg = s
+                        break
+                except OSError:
+                    pass
+
+    def _get_pypirc_command(self):
+        """
+        Get the distutils command for interacting with PyPI configurations.
+        :return: the command.
+        """
+        from .util import _get_pypirc_command as cmd
+        return cmd()
+
+    def read_configuration(self):
+        """
+        Read the PyPI access configuration as supported by distutils. This populates
+        ``username``, ``password``, ``realm`` and ``url`` attributes from the
+        configuration.
+        """
+        from .util import _load_pypirc
+        cfg = _load_pypirc(self)
+        self.username = cfg.get('username')
+        self.password = cfg.get('password')
+        self.realm = cfg.get('realm', 'pypi')
+        self.url = cfg.get('repository', self.url)
+
+    def save_configuration(self):
+        """
+        Save the PyPI access configuration. You must have set ``username`` and
+        ``password`` attributes before calling this method.
+        """
+        self.check_credentials()
+        from .util import _store_pypirc
+        _store_pypirc(self)
+
+    def check_credentials(self):
+        """
+        Check that ``username`` and ``password`` have been set, and raise an
+        exception if not.
+        """
+        if self.username is None or self.password is None:
+            raise DistlibException('username and password must be set')
+        pm = HTTPPasswordMgr()
+        _, netloc, _, _, _, _ = urlparse(self.url)
+        pm.add_password(self.realm, netloc, self.username, self.password)
+        self.password_handler = HTTPBasicAuthHandler(pm)
+
+    def register(self, metadata):  # pragma: no cover
+        """
+        Register a distribution on PyPI, using the provided metadata.
+
+        :param metadata: A :class:`Metadata` instance defining at least a name
+                         and version number for the distribution to be
+                         registered.
+        :return: The HTTP response received from PyPI upon submission of the
+                request.
+        """
+        self.check_credentials()
+        metadata.validate()
+        d = metadata.todict()
+        d[':action'] = 'verify'
+        request = self.encode_request(d.items(), [])
+        response = self.send_request(request)
+        d[':action'] = 'submit'
+        request = self.encode_request(d.items(), [])
+        return self.send_request(request)
+
+    def _reader(self, name, stream, outbuf):
+        """
+        Thread runner for reading lines of from a subprocess into a buffer.
+
+        :param name: The logical name of the stream (used for logging only).
+        :param stream: The stream to read from. This will typically a pipe
+                       connected to the output stream of a subprocess.
+        :param outbuf: The list to append the read lines to.
+        """
+        while True:
+            s = stream.readline()
+            if not s:
+                break
+            s = s.decode('utf-8').rstrip()
+            outbuf.append(s)
+            logger.debug('%s: %s' % (name, s))
+        stream.close()
+
+    def get_sign_command(self, filename, signer, sign_password, keystore=None):  # pragma: no cover
+        """
+        Return a suitable command for signing a file.
+
+        :param filename: The pathname to the file to be signed.
+        :param signer: The identifier of the signer of the file.
+        :param sign_password: The passphrase for the signer's
+                              private key used for signing.
+        :param keystore: The path to a directory which contains the keys
+                         used in verification. If not specified, the
+                         instance's ``gpg_home`` attribute is used instead.
+        :return: The signing command as a list suitable to be
+                 passed to :class:`subprocess.Popen`.
+        """
+        cmd = [self.gpg, '--status-fd', '2', '--no-tty']
+        if keystore is None:
+            keystore = self.gpg_home
+        if keystore:
+            cmd.extend(['--homedir', keystore])
+        if sign_password is not None:
+            cmd.extend(['--batch', '--passphrase-fd', '0'])
+        td = tempfile.mkdtemp()
+        sf = os.path.join(td, os.path.basename(filename) + '.asc')
+        cmd.extend(['--detach-sign', '--armor', '--local-user',
+                    signer, '--output', sf, filename])
+        logger.debug('invoking: %s', ' '.join(cmd))
+        return cmd, sf
+
+    def run_command(self, cmd, input_data=None):
+        """
+        Run a command in a child process , passing it any input data specified.
+
+        :param cmd: The command to run.
+        :param input_data: If specified, this must be a byte string containing
+                           data to be sent to the child process.
+        :return: A tuple consisting of the subprocess' exit code, a list of
+                 lines read from the subprocess' ``stdout``, and a list of
+                 lines read from the subprocess' ``stderr``.
+        """
+        kwargs = {
+            'stdout': subprocess.PIPE,
+            'stderr': subprocess.PIPE,
+        }
+        if input_data is not None:
+            kwargs['stdin'] = subprocess.PIPE
+        stdout = []
+        stderr = []
+        p = subprocess.Popen(cmd, **kwargs)
+        # We don't use communicate() here because we may need to
+        # get clever with interacting with the command
+        t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout))
+        t1.start()
+        t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr))
+        t2.start()
+        if input_data is not None:
+            p.stdin.write(input_data)
+            p.stdin.close()
+
+        p.wait()
+        t1.join()
+        t2.join()
+        return p.returncode, stdout, stderr
+
+    def sign_file(self, filename, signer, sign_password, keystore=None):  # pragma: no cover
+        """
+        Sign a file.
+
+        :param filename: The pathname to the file to be signed.
+        :param signer: The identifier of the signer of the file.
+        :param sign_password: The passphrase for the signer's
+                              private key used for signing.
+        :param keystore: The path to a directory which contains the keys
+                         used in signing. If not specified, the instance's
+                         ``gpg_home`` attribute is used instead.
+        :return: The absolute pathname of the file where the signature is
+                 stored.
+        """
+        cmd, sig_file = self.get_sign_command(filename, signer, sign_password,
+                                              keystore)
+        rc, stdout, stderr = self.run_command(cmd,
+                                              sign_password.encode('utf-8'))
+        if rc != 0:
+            raise DistlibException('sign command failed with error '
+                                   'code %s' % rc)
+        return sig_file
+
+    def upload_file(self, metadata, filename, signer=None, sign_password=None,
+                    filetype='sdist', pyversion='source', keystore=None):
+        """
+        Upload a release file to the index.
+
+        :param metadata: A :class:`Metadata` instance defining at least a name
+                         and version number for the file to be uploaded.
+        :param filename: The pathname of the file to be uploaded.
+        :param signer: The identifier of the signer of the file.
+        :param sign_password: The passphrase for the signer's
+                              private key used for signing.
+        :param filetype: The type of the file being uploaded. This is the
+                        distutils command which produced that file, e.g.
+                        ``sdist`` or ``bdist_wheel``.
+        :param pyversion: The version of Python which the release relates
+                          to. For code compatible with any Python, this would
+                          be ``source``, otherwise it would be e.g. ``3.2``.
+        :param keystore: The path to a directory which contains the keys
+                         used in signing. If not specified, the instance's
+                         ``gpg_home`` attribute is used instead.
+        :return: The HTTP response received from PyPI upon submission of the
+                request.
+        """
+        self.check_credentials()
+        if not os.path.exists(filename):
+            raise DistlibException('not found: %s' % filename)
+        metadata.validate()
+        d = metadata.todict()
+        sig_file = None
+        if signer:
+            if not self.gpg:
+                logger.warning('no signing program available - not signed')
+            else:
+                sig_file = self.sign_file(filename, signer, sign_password,
+                                          keystore)
+        with open(filename, 'rb') as f:
+            file_data = f.read()
+        md5_digest = hashlib.md5(file_data).hexdigest()
+        sha256_digest = hashlib.sha256(file_data).hexdigest()
+        d.update({
+            ':action': 'file_upload',
+            'protocol_version': '1',
+            'filetype': filetype,
+            'pyversion': pyversion,
+            'md5_digest': md5_digest,
+            'sha256_digest': sha256_digest,
+        })
+        files = [('content', os.path.basename(filename), file_data)]
+        if sig_file:
+            with open(sig_file, 'rb') as f:
+                sig_data = f.read()
+            files.append(('gpg_signature', os.path.basename(sig_file),
+                         sig_data))
+            shutil.rmtree(os.path.dirname(sig_file))
+        request = self.encode_request(d.items(), files)
+        return self.send_request(request)
+
+    def upload_documentation(self, metadata, doc_dir):  # pragma: no cover
+        """
+        Upload documentation to the index.
+
+        :param metadata: A :class:`Metadata` instance defining at least a name
+                         and version number for the documentation to be
+                         uploaded.
+        :param doc_dir: The pathname of the directory which contains the
+                        documentation. This should be the directory that
+                        contains the ``index.html`` for the documentation.
+        :return: The HTTP response received from PyPI upon submission of the
+                request.
+        """
+        self.check_credentials()
+        if not os.path.isdir(doc_dir):
+            raise DistlibException('not a directory: %r' % doc_dir)
+        fn = os.path.join(doc_dir, 'index.html')
+        if not os.path.exists(fn):
+            raise DistlibException('not found: %r' % fn)
+        metadata.validate()
+        name, version = metadata.name, metadata.version
+        zip_data = zip_dir(doc_dir).getvalue()
+        fields = [(':action', 'doc_upload'),
+                  ('name', name), ('version', version)]
+        files = [('content', name, zip_data)]
+        request = self.encode_request(fields, files)
+        return self.send_request(request)
+
+    def get_verify_command(self, signature_filename, data_filename,
+                           keystore=None):
+        """
+        Return a suitable command for verifying a file.
+
+        :param signature_filename: The pathname to the file containing the
+                                   signature.
+        :param data_filename: The pathname to the file containing the
+                              signed data.
+        :param keystore: The path to a directory which contains the keys
+                         used in verification. If not specified, the
+                         instance's ``gpg_home`` attribute is used instead.
+        :return: The verifying command as a list suitable to be
+                 passed to :class:`subprocess.Popen`.
+        """
+        cmd = [self.gpg, '--status-fd', '2', '--no-tty']
+        if keystore is None:
+            keystore = self.gpg_home
+        if keystore:
+            cmd.extend(['--homedir', keystore])
+        cmd.extend(['--verify', signature_filename, data_filename])
+        logger.debug('invoking: %s', ' '.join(cmd))
+        return cmd
+
+    def verify_signature(self, signature_filename, data_filename,
+                         keystore=None):
+        """
+        Verify a signature for a file.
+
+        :param signature_filename: The pathname to the file containing the
+                                   signature.
+        :param data_filename: The pathname to the file containing the
+                              signed data.
+        :param keystore: The path to a directory which contains the keys
+                         used in verification. If not specified, the
+                         instance's ``gpg_home`` attribute is used instead.
+        :return: True if the signature was verified, else False.
+        """
+        if not self.gpg:
+            raise DistlibException('verification unavailable because gpg '
+                                   'unavailable')
+        cmd = self.get_verify_command(signature_filename, data_filename,
+                                      keystore)
+        rc, stdout, stderr = self.run_command(cmd)
+        if rc not in (0, 1):
+            raise DistlibException('verify command failed with error '
+                             'code %s' % rc)
+        return rc == 0
+
+    def download_file(self, url, destfile, digest=None, reporthook=None):
+        """
+        This is a convenience method for downloading a file from an URL.
+        Normally, this will be a file from the index, though currently
+        no check is made for this (i.e. a file can be downloaded from
+        anywhere).
+
+        The method is just like the :func:`urlretrieve` function in the
+        standard library, except that it allows digest computation to be
+        done during download and checking that the downloaded data
+        matched any expected value.
+
+        :param url: The URL of the file to be downloaded (assumed to be
+                    available via an HTTP GET request).
+        :param destfile: The pathname where the downloaded file is to be
+                         saved.
+        :param digest: If specified, this must be a (hasher, value)
+                       tuple, where hasher is the algorithm used (e.g.
+                       ``'md5'``) and ``value`` is the expected value.
+        :param reporthook: The same as for :func:`urlretrieve` in the
+                           standard library.
+        """
+        if digest is None:
+            digester = None
+            logger.debug('No digest specified')
+        else:
+            if isinstance(digest, (list, tuple)):
+                hasher, digest = digest
+            else:
+                hasher = 'md5'
+            digester = getattr(hashlib, hasher)()
+            logger.debug('Digest specified: %s' % digest)
+        # The following code is equivalent to urlretrieve.
+        # We need to do it this way so that we can compute the
+        # digest of the file as we go.
+        with open(destfile, 'wb') as dfp:
+            # addinfourl is not a context manager on 2.x
+            # so we have to use try/finally
+            sfp = self.send_request(Request(url))
+            try:
+                headers = sfp.info()
+                blocksize = 8192
+                size = -1
+                read = 0
+                blocknum = 0
+                if "content-length" in headers:
+                    size = int(headers["Content-Length"])
+                if reporthook:
+                    reporthook(blocknum, blocksize, size)
+                while True:
+                    block = sfp.read(blocksize)
+                    if not block:
+                        break
+                    read += len(block)
+                    dfp.write(block)
+                    if digester:
+                        digester.update(block)
+                    blocknum += 1
+                    if reporthook:
+                        reporthook(blocknum, blocksize, size)
+            finally:
+                sfp.close()
+
+        # check that we got the whole file, if we can
+        if size >= 0 and read < size:
+            raise DistlibException(
+                'retrieval incomplete: got only %d out of %d bytes'
+                % (read, size))
+        # if we have a digest, it must match.
+        if digester:
+            actual = digester.hexdigest()
+            if digest != actual:
+                raise DistlibException('%s digest mismatch for %s: expected '
+                                       '%s, got %s' % (hasher, destfile,
+                                                       digest, actual))
+            logger.debug('Digest verified: %s', digest)
+
+    def send_request(self, req):
+        """
+        Send a standard library :class:`Request` to PyPI and return its
+        response.
+
+        :param req: The request to send.
+        :return: The HTTP response from PyPI (a standard library HTTPResponse).
+        """
+        handlers = []
+        if self.password_handler:
+            handlers.append(self.password_handler)
+        if self.ssl_verifier:
+            handlers.append(self.ssl_verifier)
+        opener = build_opener(*handlers)
+        return opener.open(req)
+
+    def encode_request(self, fields, files):
+        """
+        Encode fields and files for posting to an HTTP server.
+
+        :param fields: The fields to send as a list of (fieldname, value)
+                       tuples.
+        :param files: The files to send as a list of (fieldname, filename,
+                      file_bytes) tuple.
+        """
+        # Adapted from packaging, which in turn was adapted from
+        # http://code.activestate.com/recipes/146306
+
+        parts = []
+        boundary = self.boundary
+        for k, values in fields:
+            if not isinstance(values, (list, tuple)):
+                values = [values]
+
+            for v in values:
+                parts.extend((
+                    b'--' + boundary,
+                    ('Content-Disposition: form-data; name="%s"' %
+                     k).encode('utf-8'),
+                    b'',
+                    v.encode('utf-8')))
+        for key, filename, value in files:
+            parts.extend((
+                b'--' + boundary,
+                ('Content-Disposition: form-data; name="%s"; filename="%s"' %
+                 (key, filename)).encode('utf-8'),
+                b'',
+                value))
+
+        parts.extend((b'--' + boundary + b'--', b''))
+
+        body = b'\r\n'.join(parts)
+        ct = b'multipart/form-data; boundary=' + boundary
+        headers = {
+            'Content-type': ct,
+            'Content-length': str(len(body))
+        }
+        return Request(self.url, body, headers)
+
+    def search(self, terms, operator=None):  # pragma: no cover
+        if isinstance(terms, string_types):
+            terms = {'name': terms}
+        rpc_proxy = ServerProxy(self.url, timeout=3.0)
+        try:
+            return rpc_proxy.search(terms, operator or 'and')
+        finally:
+            rpc_proxy('close')()
diff --git a/venv/lib/python3.8/site-packages/distlib/locators.py b/venv/lib/python3.8/site-packages/distlib/locators.py
new file mode 100644
index 0000000..966ebc0
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/distlib/locators.py
@@ -0,0 +1,1300 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012-2015 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+
+import gzip
+from io import BytesIO
+import json
+import logging
+import os
+import posixpath
+import re
+try:
+    import threading
+except ImportError:  # pragma: no cover
+    import dummy_threading as threading
+import zlib
+
+from . import DistlibException
+from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url,
+                     queue, quote, unescape, build_opener,
+                     HTTPRedirectHandler as BaseRedirectHandler, text_type,
+                     Request, HTTPError, URLError)
+from .database import Distribution, DistributionPath, make_dist
+from .metadata import Metadata, MetadataInvalidError
+from .util import (cached_property, ensure_slash, split_filename, get_project_data,
+                   parse_requirement, parse_name_and_version, ServerProxy,
+                   normalize_name)
+from .version import get_scheme, UnsupportedVersionError
+from .wheel import Wheel, is_compatible
+
+logger = logging.getLogger(__name__)
+
+HASHER_HASH = re.compile(r'^(\w+)=([a-f0-9]+)')
+CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I)
+HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml')
+DEFAULT_INDEX = 'https://pypi.org/pypi'
+
+def get_all_distribution_names(url=None):
+    """
+    Return all distribution names known by an index.
+    :param url: The URL of the index.
+    :return: A list of all known distribution names.
+    """
+    if url is None:
+        url = DEFAULT_INDEX
+    client = ServerProxy(url, timeout=3.0)
+    try:
+        return client.list_packages()
+    finally:
+        client('close')()
+
+class RedirectHandler(BaseRedirectHandler):
+    """
+    A class to work around a bug in some Python 3.2.x releases.
+    """
+    # There's a bug in the base version for some 3.2.x
+    # (e.g. 3.2.2 on Ubuntu Oneiric). If a Location header
+    # returns e.g. /abc, it bails because it says the scheme ''
+    # is bogus, when actually it should use the request's
+    # URL for the scheme. See Python issue #13696.
+    def http_error_302(self, req, fp, code, msg, headers):
+        # Some servers (incorrectly) return multiple Location headers
+        # (so probably same goes for URI).  Use first header.
+        newurl = None
+        for key in ('location', 'uri'):
+            if key in headers:
+                newurl = headers[key]
+                break
+        if newurl is None:  # pragma: no cover
+            return
+        urlparts = urlparse(newurl)
+        if urlparts.scheme == '':
+            newurl = urljoin(req.get_full_url(), newurl)
+            if hasattr(headers, 'replace_header'):
+                headers.replace_header(key, newurl)
+            else:
+                headers[key] = newurl
+        return BaseRedirectHandler.http_error_302(self, req, fp, code, msg,
+                                                  headers)
+
+    http_error_301 = http_error_303 = http_error_307 = http_error_302
+
+class Locator(object):
+    """
+    A base class for locators - things that locate distributions.
+    """
+    source_extensions = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz')
+    binary_extensions = ('.egg', '.exe', '.whl')
+    excluded_extensions = ('.pdf',)
+
+    # A list of tags indicating which wheels you want to match. The default
+    # value of None matches against the tags compatible with the running
+    # Python. If you want to match other values, set wheel_tags on a locator
+    # instance to a list of tuples (pyver, abi, arch) which you want to match.
+    wheel_tags = None
+
+    downloadable_extensions = source_extensions + ('.whl',)
+
+    def __init__(self, scheme='default'):
+        """
+        Initialise an instance.
+        :param scheme: Because locators look for most recent versions, they
+                       need to know the version scheme to use. This specifies
+                       the current PEP-recommended scheme - use ``'legacy'``
+                       if you need to support existing distributions on PyPI.
+        """
+        self._cache = {}
+        self.scheme = scheme
+        # Because of bugs in some of the handlers on some of the platforms,
+        # we use our own opener rather than just using urlopen.
+        self.opener = build_opener(RedirectHandler())
+        # If get_project() is called from locate(), the matcher instance
+        # is set from the requirement passed to locate(). See issue #18 for
+        # why this can be useful to know.
+        self.matcher = None
+        self.errors = queue.Queue()
+
+    def get_errors(self):
+        """
+        Return any errors which have occurred.
+        """
+        result = []
+        while not self.errors.empty():  # pragma: no cover
+            try:
+                e = self.errors.get(False)
+                result.append(e)
+            except self.errors.Empty:
+                continue
+            self.errors.task_done()
+        return result
+
+    def clear_errors(self):
+        """
+        Clear any errors which may have been logged.
+        """
+        # Just get the errors and throw them away
+        self.get_errors()
+
+    def clear_cache(self):
+        self._cache.clear()
+
+    def _get_scheme(self):
+        return self._scheme
+
+    def _set_scheme(self, value):
+        self._scheme = value
+
+    scheme = property(_get_scheme, _set_scheme)
+
+    def _get_project(self, name):
+        """
+        For a given project, get a dictionary mapping available versions to Distribution
+        instances.
+
+        This should be implemented in subclasses.
+
+        If called from a locate() request, self.matcher will be set to a
+        matcher for the requirement to satisfy, otherwise it will be None.
+        """
+        raise NotImplementedError('Please implement in the subclass')
+
+    def get_distribution_names(self):
+        """
+        Return all the distribution names known to this locator.
+        """
+        raise NotImplementedError('Please implement in the subclass')
+
+    def get_project(self, name):
+        """
+        For a given project, get a dictionary mapping available versions to Distribution
+        instances.
+
+        This calls _get_project to do all the work, and just implements a caching layer on top.
+        """
+        if self._cache is None:  # pragma: no cover
+            result = self._get_project(name)
+        elif name in self._cache:
+            result = self._cache[name]
+        else:
+            self.clear_errors()
+            result = self._get_project(name)
+            self._cache[name] = result
+        return result
+
+    def score_url(self, url):
+        """
+        Give an url a score which can be used to choose preferred URLs
+        for a given project release.
+        """
+        t = urlparse(url)
+        basename = posixpath.basename(t.path)
+        compatible = True
+        is_wheel = basename.endswith('.whl')
+        is_downloadable = basename.endswith(self.downloadable_extensions)
+        if is_wheel:
+            compatible = is_compatible(Wheel(basename), self.wheel_tags)
+        return (t.scheme == 'https', 'pypi.org' in t.netloc,
+                is_downloadable, is_wheel, compatible, basename)
+
+    def prefer_url(self, url1, url2):
+        """
+        Choose one of two URLs where both are candidates for distribution
+        archives for the same version of a distribution (for example,
+        .tar.gz vs. zip).
+
+        The current implementation favours https:// URLs over http://, archives
+        from PyPI over those from other locations, wheel compatibility (if a
+        wheel) and then the archive name.
+        """
+        result = url2
+        if url1:
+            s1 = self.score_url(url1)
+            s2 = self.score_url(url2)
+            if s1 > s2:
+                result = url1
+            if result != url2:
+                logger.debug('Not replacing %r with %r', url1, url2)
+            else:
+                logger.debug('Replacing %r with %r', url1, url2)
+        return result
+
+    def split_filename(self, filename, project_name):
+        """
+        Attempt to split a filename in project name, version and Python version.
+        """
+        return split_filename(filename, project_name)
+
+    def convert_url_to_download_info(self, url, project_name):
+        """
+        See if a URL is a candidate for a download URL for a project (the URL
+        has typically been scraped from an HTML page).
+
+        If it is, a dictionary is returned with keys "name", "version",
+        "filename" and "url"; otherwise, None is returned.
+        """
+        def same_project(name1, name2):
+            return normalize_name(name1) == normalize_name(name2)
+
+        result = None
+        scheme, netloc, path, params, query, frag = urlparse(url)
+        if frag.lower().startswith('egg='):  # pragma: no cover
+            logger.debug('%s: version hint in fragment: %r',
+                         project_name, frag)
+        m = HASHER_HASH.match(frag)
+        if m:
+            algo, digest = m.groups()
+        else:
+            algo, digest = None, None
+        origpath = path
+        if path and path[-1] == '/':  # pragma: no cover
+            path = path[:-1]
+        if path.endswith('.whl'):
+            try:
+                wheel = Wheel(path)
+                if not is_compatible(wheel, self.wheel_tags):
+                    logger.debug('Wheel not compatible: %s', path)
+                else:
+                    if project_name is None:
+                        include = True
+                    else:
+                        include = same_project(wheel.name, project_name)
+                    if include:
+                        result = {
+                            'name': wheel.name,
+                            'version': wheel.version,
+                            'filename': wheel.filename,
+                            'url': urlunparse((scheme, netloc, origpath,
+                                               params, query, '')),
+                            'python-version': ', '.join(
+                                ['.'.join(list(v[2:])) for v in wheel.pyver]),
+                        }
+            except Exception as e:  # pragma: no cover
+                logger.warning('invalid path for wheel: %s', path)
+        elif not path.endswith(self.downloadable_extensions):  # pragma: no cover
+            logger.debug('Not downloadable: %s', path)
+        else:  # downloadable extension
+            path = filename = posixpath.basename(path)
+            for ext in self.downloadable_extensions:
+                if path.endswith(ext):
+                    path = path[:-len(ext)]
+                    t = self.split_filename(path, project_name)
+                    if not t:  # pragma: no cover
+                        logger.debug('No match for project/version: %s', path)
+                    else:
+                        name, version, pyver = t
+                        if not project_name or same_project(project_name, name):
+                            result = {
+                                'name': name,
+                                'version': version,
+                                'filename': filename,
+                                'url': urlunparse((scheme, netloc, origpath,
+                                                   params, query, '')),
+                                #'packagetype': 'sdist',
+                            }
+                            if pyver:  # pragma: no cover
+                                result['python-version'] = pyver
+                    break
+        if result and algo:
+            result['%s_digest' % algo] = digest
+        return result
+
+    def _get_digest(self, info):
+        """
+        Get a digest from a dictionary by looking at a "digests" dictionary
+        or keys of the form 'algo_digest'.
+
+        Returns a 2-tuple (algo, digest) if found, else None. Currently
+        looks only for SHA256, then MD5.
+        """
+        result = None
+        if 'digests' in info:
+            digests = info['digests']
+            for algo in ('sha256', 'md5'):
+                if algo in digests:
+                    result = (algo, digests[algo])
+                    break
+        if not result:
+            for algo in ('sha256', 'md5'):
+                key = '%s_digest' % algo
+                if key in info:
+                    result = (algo, info[key])
+                    break
+        return result
+
+    def _update_version_data(self, result, info):
+        """
+        Update a result dictionary (the final result from _get_project) with a
+        dictionary for a specific version, which typically holds information
+        gleaned from a filename or URL for an archive for the distribution.
+        """
+        name = info.pop('name')
+        version = info.pop('version')
+        if version in result:
+            dist = result[version]
+            md = dist.metadata
+        else:
+            dist = make_dist(name, version, scheme=self.scheme)
+            md = dist.metadata
+        dist.digest = digest = self._get_digest(info)
+        url = info['url']
+        result['digests'][url] = digest
+        if md.source_url != info['url']:
+            md.source_url = self.prefer_url(md.source_url, url)
+            result['urls'].setdefault(version, set()).add(url)
+        dist.locator = self
+        result[version] = dist
+
+    def locate(self, requirement, prereleases=False):
+        """
+        Find the most recent distribution which matches the given
+        requirement.
+
+        :param requirement: A requirement of the form 'foo (1.0)' or perhaps
+                            'foo (>= 1.0, < 2.0, != 1.3)'
+        :param prereleases: If ``True``, allow pre-release versions
+                            to be located. Otherwise, pre-release versions
+                            are not returned.
+        :return: A :class:`Distribution` instance, or ``None`` if no such
+                 distribution could be located.
+        """
+        result = None
+        r = parse_requirement(requirement)
+        if r is None:  # pragma: no cover
+            raise DistlibException('Not a valid requirement: %r' % requirement)
+        scheme = get_scheme(self.scheme)
+        self.matcher = matcher = scheme.matcher(r.requirement)
+        logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__)
+        versions = self.get_project(r.name)
+        if len(versions) > 2:   # urls and digests keys are present
+            # sometimes, versions are invalid
+            slist = []
+            vcls = matcher.version_class
+            for k in versions:
+                if k in ('urls', 'digests'):
+                    continue
+                try:
+                    if not matcher.match(k):
+                        pass  # logger.debug('%s did not match %r', matcher, k)
+                    else:
+                        if prereleases or not vcls(k).is_prerelease:
+                            slist.append(k)
+                        # else:
+                            # logger.debug('skipping pre-release '
+                                         # 'version %s of %s', k, matcher.name)
+                except Exception:  # pragma: no cover
+                    logger.warning('error matching %s with %r', matcher, k)
+                    pass # slist.append(k)
+            if len(slist) > 1:
+                slist = sorted(slist, key=scheme.key)
+            if slist:
+                logger.debug('sorted list: %s', slist)
+                version = slist[-1]
+                result = versions[version]
+        if result:
+            if r.extras:
+                result.extras = r.extras
+            result.download_urls = versions.get('urls', {}).get(version, set())
+            d = {}
+            sd = versions.get('digests', {})
+            for url in result.download_urls:
+                if url in sd:  # pragma: no cover
+                    d[url] = sd[url]
+            result.digests = d
+        self.matcher = None
+        return result
+
+
+class PyPIRPCLocator(Locator):
+    """
+    This locator uses XML-RPC to locate distributions. It therefore
+    cannot be used with simple mirrors (that only mirror file content).
+    """
+    def __init__(self, url, **kwargs):
+        """
+        Initialise an instance.
+
+        :param url: The URL to use for XML-RPC.
+        :param kwargs: Passed to the superclass constructor.
+        """
+        super(PyPIRPCLocator, self).__init__(**kwargs)
+        self.base_url = url
+        self.client = ServerProxy(url, timeout=3.0)
+
+    def get_distribution_names(self):
+        """
+        Return all the distribution names known to this locator.
+        """
+        return set(self.client.list_packages())
+
+    def _get_project(self, name):
+        result = {'urls': {}, 'digests': {}}
+        versions = self.client.package_releases(name, True)
+        for v in versions:
+            urls = self.client.release_urls(name, v)
+            data = self.client.release_data(name, v)
+            metadata = Metadata(scheme=self.scheme)
+            metadata.name = data['name']
+            metadata.version = data['version']
+            metadata.license = data.get('license')
+            metadata.keywords = data.get('keywords', [])
+            metadata.summary = data.get('summary')
+            dist = Distribution(metadata)
+            if urls:
+                info = urls[0]
+                metadata.source_url = info['url']
+                dist.digest = self._get_digest(info)
+                dist.locator = self
+                result[v] = dist
+                for info in urls:
+                    url = info['url']
+                    digest = self._get_digest(info)
+                    result['urls'].setdefault(v, set()).add(url)
+                    result['digests'][url] = digest
+        return result
+
+class PyPIJSONLocator(Locator):
+    """
+    This locator uses PyPI's JSON interface. It's very limited in functionality
+    and probably not worth using.
+    """
+    def __init__(self, url, **kwargs):
+        super(PyPIJSONLocator, self).__init__(**kwargs)
+        self.base_url = ensure_slash(url)
+
+    def get_distribution_names(self):
+        """
+        Return all the distribution names known to this locator.
+        """
+        raise NotImplementedError('Not available from this locator')
+
+    def _get_project(self, name):
+        result = {'urls': {}, 'digests': {}}
+        url = urljoin(self.base_url, '%s/json' % quote(name))
+        try:
+            resp = self.opener.open(url)
+            data = resp.read().decode() # for now
+            d = json.loads(data)
+            md = Metadata(scheme=self.scheme)
+            data = d['info']
+            md.name = data['name']
+            md.version = data['version']
+            md.license = data.get('license')
+            md.keywords = data.get('keywords', [])
+            md.summary = data.get('summary')
+            dist = Distribution(md)
+            dist.locator = self
+            urls = d['urls']
+            result[md.version] = dist
+            for info in d['urls']:
+                url = info['url']
+                dist.download_urls.add(url)
+                dist.digests[url] = self._get_digest(info)
+                result['urls'].setdefault(md.version, set()).add(url)
+                result['digests'][url] = self._get_digest(info)
+            # Now get other releases
+            for version, infos in d['releases'].items():
+                if version == md.version:
+                    continue    # already done
+                omd = Metadata(scheme=self.scheme)
+                omd.name = md.name
+                omd.version = version
+                odist = Distribution(omd)
+                odist.locator = self
+                result[version] = odist
+                for info in infos:
+                    url = info['url']
+                    odist.download_urls.add(url)
+                    odist.digests[url] = self._get_digest(info)
+                    result['urls'].setdefault(version, set()).add(url)
+                    result['digests'][url] = self._get_digest(info)
+#            for info in urls:
+#                md.source_url = info['url']
+#                dist.digest = self._get_digest(info)
+#                dist.locator = self
+#                for info in urls:
+#                    url = info['url']
+#                    result['urls'].setdefault(md.version, set()).add(url)
+#                    result['digests'][url] = self._get_digest(info)
+        except Exception as e:
+            self.errors.put(text_type(e))
+            logger.exception('JSON fetch failed: %s', e)
+        return result
+
+
+class Page(object):
+    """
+    This class represents a scraped HTML page.
+    """
+    # The following slightly hairy-looking regex just looks for the contents of
+    # an anchor link, which has an attribute "href" either immediately preceded
+    # or immediately followed by a "rel" attribute. The attribute values can be
+    # declared with double quotes, single quotes or no quotes - which leads to
+    # the length of the expression.
+    _href = re.compile("""
+(rel\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*))\\s+)?
+href\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*))
+(\\s+rel\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*)))?
+""", re.I | re.S | re.X)
+    _base = re.compile(r"""]+)""", re.I | re.S)
+
+    def __init__(self, data, url):
+        """
+        Initialise an instance with the Unicode page contents and the URL they
+        came from.
+        """
+        self.data = data
+        self.base_url = self.url = url
+        m = self._base.search(self.data)
+        if m:
+            self.base_url = m.group(1)
+
+    _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
+
+    @cached_property
+    def links(self):
+        """
+        Return the URLs of all the links on a page together with information
+        about their "rel" attribute, for determining which ones to treat as
+        downloads and which ones to queue for further scraping.
+        """
+        def clean(url):
+            "Tidy up an URL."
+            scheme, netloc, path, params, query, frag = urlparse(url)
+            return urlunparse((scheme, netloc, quote(path),
+                               params, query, frag))
+
+        result = set()
+        for match in self._href.finditer(self.data):
+            d = match.groupdict('')
+            rel = (d['rel1'] or d['rel2'] or d['rel3'] or
+                   d['rel4'] or d['rel5'] or d['rel6'])
+            url = d['url1'] or d['url2'] or d['url3']
+            url = urljoin(self.base_url, url)
+            url = unescape(url)
+            url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url)
+            result.add((url, rel))
+        # We sort the result, hoping to bring the most recent versions
+        # to the front
+        result = sorted(result, key=lambda t: t[0], reverse=True)
+        return result
+
+
+class SimpleScrapingLocator(Locator):
+    """
+    A locator which scrapes HTML pages to locate downloads for a distribution.
+    This runs multiple threads to do the I/O; performance is at least as good
+    as pip's PackageFinder, which works in an analogous fashion.
+    """
+
+    # These are used to deal with various Content-Encoding schemes.
+    decoders = {
+        'deflate': zlib.decompress,
+        'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(b)).read(),
+        'none': lambda b: b,
+    }
+
+    def __init__(self, url, timeout=None, num_workers=10, **kwargs):
+        """
+        Initialise an instance.
+        :param url: The root URL to use for scraping.
+        :param timeout: The timeout, in seconds, to be applied to requests.
+                        This defaults to ``None`` (no timeout specified).
+        :param num_workers: The number of worker threads you want to do I/O,
+                            This defaults to 10.
+        :param kwargs: Passed to the superclass.
+        """
+        super(SimpleScrapingLocator, self).__init__(**kwargs)
+        self.base_url = ensure_slash(url)
+        self.timeout = timeout
+        self._page_cache = {}
+        self._seen = set()
+        self._to_fetch = queue.Queue()
+        self._bad_hosts = set()
+        self.skip_externals = False
+        self.num_workers = num_workers
+        self._lock = threading.RLock()
+        # See issue #45: we need to be resilient when the locator is used
+        # in a thread, e.g. with concurrent.futures. We can't use self._lock
+        # as it is for coordinating our internal threads - the ones created
+        # in _prepare_threads.
+        self._gplock = threading.RLock()
+        self.platform_check = False  # See issue #112
+
+    def _prepare_threads(self):
+        """
+        Threads are created only when get_project is called, and terminate
+        before it returns. They are there primarily to parallelise I/O (i.e.
+        fetching web pages).
+        """
+        self._threads = []
+        for i in range(self.num_workers):
+            t = threading.Thread(target=self._fetch)
+            t.daemon = True
+            t.start()
+            self._threads.append(t)
+
+    def _wait_threads(self):
+        """
+        Tell all the threads to terminate (by sending a sentinel value) and
+        wait for them to do so.
+        """
+        # Note that you need two loops, since you can't say which
+        # thread will get each sentinel
+        for t in self._threads:
+            self._to_fetch.put(None)    # sentinel
+        for t in self._threads:
+            t.join()
+        self._threads = []
+
+    def _get_project(self, name):
+        result = {'urls': {}, 'digests': {}}
+        with self._gplock:
+            self.result = result
+            self.project_name = name
+            url = urljoin(self.base_url, '%s/' % quote(name))
+            self._seen.clear()
+            self._page_cache.clear()
+            self._prepare_threads()
+            try:
+                logger.debug('Queueing %s', url)
+                self._to_fetch.put(url)
+                self._to_fetch.join()
+            finally:
+                self._wait_threads()
+            del self.result
+        return result
+
+    platform_dependent = re.compile(r'\b(linux_(i\d86|x86_64|arm\w+)|'
+                                    r'win(32|_amd64)|macosx_?\d+)\b', re.I)
+
+    def _is_platform_dependent(self, url):
+        """
+        Does an URL refer to a platform-specific download?
+        """
+        return self.platform_dependent.search(url)
+
+    def _process_download(self, url):
+        """
+        See if an URL is a suitable download for a project.
+
+        If it is, register information in the result dictionary (for
+        _get_project) about the specific version it's for.
+
+        Note that the return value isn't actually used other than as a boolean
+        value.
+        """
+        if self.platform_check and self._is_platform_dependent(url):
+            info = None
+        else:
+            info = self.convert_url_to_download_info(url, self.project_name)
+        logger.debug('process_download: %s -> %s', url, info)
+        if info:
+            with self._lock:    # needed because self.result is shared
+                self._update_version_data(self.result, info)
+        return info
+
+    def _should_queue(self, link, referrer, rel):
+        """
+        Determine whether a link URL from a referring page and with a
+        particular "rel" attribute should be queued for scraping.
+        """
+        scheme, netloc, path, _, _, _ = urlparse(link)
+        if path.endswith(self.source_extensions + self.binary_extensions +
+                         self.excluded_extensions):
+            result = False
+        elif self.skip_externals and not link.startswith(self.base_url):
+            result = False
+        elif not referrer.startswith(self.base_url):
+            result = False
+        elif rel not in ('homepage', 'download'):
+            result = False
+        elif scheme not in ('http', 'https', 'ftp'):
+            result = False
+        elif self._is_platform_dependent(link):
+            result = False
+        else:
+            host = netloc.split(':', 1)[0]
+            if host.lower() == 'localhost':
+                result = False
+            else:
+                result = True
+        logger.debug('should_queue: %s (%s) from %s -> %s', link, rel,
+                     referrer, result)
+        return result
+
+    def _fetch(self):
+        """
+        Get a URL to fetch from the work queue, get the HTML page, examine its
+        links for download candidates and candidates for further scraping.
+
+        This is a handy method to run in a thread.
+        """
+        while True:
+            url = self._to_fetch.get()
+            try:
+                if url:
+                    page = self.get_page(url)
+                    if page is None:    # e.g. after an error
+                        continue
+                    for link, rel in page.links:
+                        if link not in self._seen:
+                            try:
+                                self._seen.add(link)
+                                if (not self._process_download(link) and
+                                    self._should_queue(link, url, rel)):
+                                    logger.debug('Queueing %s from %s', link, url)
+                                    self._to_fetch.put(link)
+                            except MetadataInvalidError:  # e.g. invalid versions
+                                pass
+            except Exception as e:  # pragma: no cover
+                self.errors.put(text_type(e))
+            finally:
+                # always do this, to avoid hangs :-)
+                self._to_fetch.task_done()
+            if not url:
+                #logger.debug('Sentinel seen, quitting.')
+                break
+
+    def get_page(self, url):
+        """
+        Get the HTML for an URL, possibly from an in-memory cache.
+
+        XXX TODO Note: this cache is never actually cleared. It's assumed that
+        the data won't get stale over the lifetime of a locator instance (not
+        necessarily true for the default_locator).
+        """
+        # http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api
+        scheme, netloc, path, _, _, _ = urlparse(url)
+        if scheme == 'file' and os.path.isdir(url2pathname(path)):
+            url = urljoin(ensure_slash(url), 'index.html')
+
+        if url in self._page_cache:
+            result = self._page_cache[url]
+            logger.debug('Returning %s from cache: %s', url, result)
+        else:
+            host = netloc.split(':', 1)[0]
+            result = None
+            if host in self._bad_hosts:
+                logger.debug('Skipping %s due to bad host %s', url, host)
+            else:
+                req = Request(url, headers={'Accept-encoding': 'identity'})
+                try:
+                    logger.debug('Fetching %s', url)
+                    resp = self.opener.open(req, timeout=self.timeout)
+                    logger.debug('Fetched %s', url)
+                    headers = resp.info()
+                    content_type = headers.get('Content-Type', '')
+                    if HTML_CONTENT_TYPE.match(content_type):
+                        final_url = resp.geturl()
+                        data = resp.read()
+                        encoding = headers.get('Content-Encoding')
+                        if encoding:
+                            decoder = self.decoders[encoding]   # fail if not found
+                            data = decoder(data)
+                        encoding = 'utf-8'
+                        m = CHARSET.search(content_type)
+                        if m:
+                            encoding = m.group(1)
+                        try:
+                            data = data.decode(encoding)
+                        except UnicodeError:  # pragma: no cover
+                            data = data.decode('latin-1')    # fallback
+                        result = Page(data, final_url)
+                        self._page_cache[final_url] = result
+                except HTTPError as e:
+                    if e.code != 404:
+                        logger.exception('Fetch failed: %s: %s', url, e)
+                except URLError as e:  # pragma: no cover
+                    logger.exception('Fetch failed: %s: %s', url, e)
+                    with self._lock:
+                        self._bad_hosts.add(host)
+                except Exception as e:  # pragma: no cover
+                    logger.exception('Fetch failed: %s: %s', url, e)
+                finally:
+                    self._page_cache[url] = result   # even if None (failure)
+        return result
+
+    _distname_re = re.compile(']*>([^<]+)<')
+
+    def get_distribution_names(self):
+        """
+        Return all the distribution names known to this locator.
+        """
+        result = set()
+        page = self.get_page(self.base_url)
+        if not page:
+            raise DistlibException('Unable to get %s' % self.base_url)
+        for match in self._distname_re.finditer(page.data):
+            result.add(match.group(1))
+        return result
+
+class DirectoryLocator(Locator):
+    """
+    This class locates distributions in a directory tree.
+    """
+
+    def __init__(self, path, **kwargs):
+        """
+        Initialise an instance.
+        :param path: The root of the directory tree to search.
+        :param kwargs: Passed to the superclass constructor,
+                       except for:
+                       * recursive - if True (the default), subdirectories are
+                         recursed into. If False, only the top-level directory
+                         is searched,
+        """
+        self.recursive = kwargs.pop('recursive', True)
+        super(DirectoryLocator, self).__init__(**kwargs)
+        path = os.path.abspath(path)
+        if not os.path.isdir(path):  # pragma: no cover
+            raise DistlibException('Not a directory: %r' % path)
+        self.base_dir = path
+
+    def should_include(self, filename, parent):
+        """
+        Should a filename be considered as a candidate for a distribution
+        archive? As well as the filename, the directory which contains it
+        is provided, though not used by the current implementation.
+        """
+        return filename.endswith(self.downloadable_extensions)
+
+    def _get_project(self, name):
+        result = {'urls': {}, 'digests': {}}
+        for root, dirs, files in os.walk(self.base_dir):
+            for fn in files:
+                if self.should_include(fn, root):
+                    fn = os.path.join(root, fn)
+                    url = urlunparse(('file', '',
+                                      pathname2url(os.path.abspath(fn)),
+                                      '', '', ''))
+                    info = self.convert_url_to_download_info(url, name)
+                    if info:
+                        self._update_version_data(result, info)
+            if not self.recursive:
+                break
+        return result
+
+    def get_distribution_names(self):
+        """
+        Return all the distribution names known to this locator.
+        """
+        result = set()
+        for root, dirs, files in os.walk(self.base_dir):
+            for fn in files:
+                if self.should_include(fn, root):
+                    fn = os.path.join(root, fn)
+                    url = urlunparse(('file', '',
+                                      pathname2url(os.path.abspath(fn)),
+                                      '', '', ''))
+                    info = self.convert_url_to_download_info(url, None)
+                    if info:
+                        result.add(info['name'])
+            if not self.recursive:
+                break
+        return result
+
+class JSONLocator(Locator):
+    """
+    This locator uses special extended metadata (not available on PyPI) and is
+    the basis of performant dependency resolution in distlib. Other locators
+    require archive downloads before dependencies can be determined! As you
+    might imagine, that can be slow.
+    """
+    def get_distribution_names(self):
+        """
+        Return all the distribution names known to this locator.
+        """
+        raise NotImplementedError('Not available from this locator')
+
+    def _get_project(self, name):
+        result = {'urls': {}, 'digests': {}}
+        data = get_project_data(name)
+        if data:
+            for info in data.get('files', []):
+                if info['ptype'] != 'sdist' or info['pyversion'] != 'source':
+                    continue
+                # We don't store summary in project metadata as it makes
+                # the data bigger for no benefit during dependency
+                # resolution
+                dist = make_dist(data['name'], info['version'],
+                                 summary=data.get('summary',
+                                                  'Placeholder for summary'),
+                                 scheme=self.scheme)
+                md = dist.metadata
+                md.source_url = info['url']
+                # TODO SHA256 digest
+                if 'digest' in info and info['digest']:
+                    dist.digest = ('md5', info['digest'])
+                md.dependencies = info.get('requirements', {})
+                dist.exports = info.get('exports', {})
+                result[dist.version] = dist
+                result['urls'].setdefault(dist.version, set()).add(info['url'])
+        return result
+
+class DistPathLocator(Locator):
+    """
+    This locator finds installed distributions in a path. It can be useful for
+    adding to an :class:`AggregatingLocator`.
+    """
+    def __init__(self, distpath, **kwargs):
+        """
+        Initialise an instance.
+
+        :param distpath: A :class:`DistributionPath` instance to search.
+        """
+        super(DistPathLocator, self).__init__(**kwargs)
+        assert isinstance(distpath, DistributionPath)
+        self.distpath = distpath
+
+    def _get_project(self, name):
+        dist = self.distpath.get_distribution(name)
+        if dist is None:
+            result = {'urls': {}, 'digests': {}}
+        else:
+            result = {
+                dist.version: dist,
+                'urls': {dist.version: set([dist.source_url])},
+                'digests': {dist.version: set([None])}
+            }
+        return result
+
+
+class AggregatingLocator(Locator):
+    """
+    This class allows you to chain and/or merge a list of locators.
+    """
+    def __init__(self, *locators, **kwargs):
+        """
+        Initialise an instance.
+
+        :param locators: The list of locators to search.
+        :param kwargs: Passed to the superclass constructor,
+                       except for:
+                       * merge - if False (the default), the first successful
+                         search from any of the locators is returned. If True,
+                         the results from all locators are merged (this can be
+                         slow).
+        """
+        self.merge = kwargs.pop('merge', False)
+        self.locators = locators
+        super(AggregatingLocator, self).__init__(**kwargs)
+
+    def clear_cache(self):
+        super(AggregatingLocator, self).clear_cache()
+        for locator in self.locators:
+            locator.clear_cache()
+
+    def _set_scheme(self, value):
+        self._scheme = value
+        for locator in self.locators:
+            locator.scheme = value
+
+    scheme = property(Locator.scheme.fget, _set_scheme)
+
+    def _get_project(self, name):
+        result = {}
+        for locator in self.locators:
+            d = locator.get_project(name)
+            if d:
+                if self.merge:
+                    files = result.get('urls', {})
+                    digests = result.get('digests', {})
+                    # next line could overwrite result['urls'], result['digests']
+                    result.update(d)
+                    df = result.get('urls')
+                    if files and df:
+                        for k, v in files.items():
+                            if k in df:
+                                df[k] |= v
+                            else:
+                                df[k] = v
+                    dd = result.get('digests')
+                    if digests and dd:
+                        dd.update(digests)
+                else:
+                    # See issue #18. If any dists are found and we're looking
+                    # for specific constraints, we only return something if
+                    # a match is found. For example, if a DirectoryLocator
+                    # returns just foo (1.0) while we're looking for
+                    # foo (>= 2.0), we'll pretend there was nothing there so
+                    # that subsequent locators can be queried. Otherwise we
+                    # would just return foo (1.0) which would then lead to a
+                    # failure to find foo (>= 2.0), because other locators
+                    # weren't searched. Note that this only matters when
+                    # merge=False.
+                    if self.matcher is None:
+                        found = True
+                    else:
+                        found = False
+                        for k in d:
+                            if self.matcher.match(k):
+                                found = True
+                                break
+                    if found:
+                        result = d
+                        break
+        return result
+
+    def get_distribution_names(self):
+        """
+        Return all the distribution names known to this locator.
+        """
+        result = set()
+        for locator in self.locators:
+            try:
+                result |= locator.get_distribution_names()
+            except NotImplementedError:
+                pass
+        return result
+
+
+# We use a legacy scheme simply because most of the dists on PyPI use legacy
+# versions which don't conform to PEP 440.
+default_locator = AggregatingLocator(
+                    # JSONLocator(), # don't use as PEP 426 is withdrawn
+                    SimpleScrapingLocator('https://pypi.org/simple/',
+                                          timeout=3.0),
+                    scheme='legacy')
+
+locate = default_locator.locate
+
+
+class DependencyFinder(object):
+    """
+    Locate dependencies for distributions.
+    """
+
+    def __init__(self, locator=None):
+        """
+        Initialise an instance, using the specified locator
+        to locate distributions.
+        """
+        self.locator = locator or default_locator
+        self.scheme = get_scheme(self.locator.scheme)
+
+    def add_distribution(self, dist):
+        """
+        Add a distribution to the finder. This will update internal information
+        about who provides what.
+        :param dist: The distribution to add.
+        """
+        logger.debug('adding distribution %s', dist)
+        name = dist.key
+        self.dists_by_name[name] = dist
+        self.dists[(name, dist.version)] = dist
+        for p in dist.provides:
+            name, version = parse_name_and_version(p)
+            logger.debug('Add to provided: %s, %s, %s', name, version, dist)
+            self.provided.setdefault(name, set()).add((version, dist))
+
+    def remove_distribution(self, dist):
+        """
+        Remove a distribution from the finder. This will update internal
+        information about who provides what.
+        :param dist: The distribution to remove.
+        """
+        logger.debug('removing distribution %s', dist)
+        name = dist.key
+        del self.dists_by_name[name]
+        del self.dists[(name, dist.version)]
+        for p in dist.provides:
+            name, version = parse_name_and_version(p)
+            logger.debug('Remove from provided: %s, %s, %s', name, version, dist)
+            s = self.provided[name]
+            s.remove((version, dist))
+            if not s:
+                del self.provided[name]
+
+    def get_matcher(self, reqt):
+        """
+        Get a version matcher for a requirement.
+        :param reqt: The requirement
+        :type reqt: str
+        :return: A version matcher (an instance of
+                 :class:`distlib.version.Matcher`).
+        """
+        try:
+            matcher = self.scheme.matcher(reqt)
+        except UnsupportedVersionError:  # pragma: no cover
+            # XXX compat-mode if cannot read the version
+            name = reqt.split()[0]
+            matcher = self.scheme.matcher(name)
+        return matcher
+
+    def find_providers(self, reqt):
+        """
+        Find the distributions which can fulfill a requirement.
+
+        :param reqt: The requirement.
+         :type reqt: str
+        :return: A set of distribution which can fulfill the requirement.
+        """
+        matcher = self.get_matcher(reqt)
+        name = matcher.key   # case-insensitive
+        result = set()
+        provided = self.provided
+        if name in provided:
+            for version, provider in provided[name]:
+                try:
+                    match = matcher.match(version)
+                except UnsupportedVersionError:
+                    match = False
+
+                if match:
+                    result.add(provider)
+                    break
+        return result
+
+    def try_to_replace(self, provider, other, problems):
+        """
+        Attempt to replace one provider with another. This is typically used
+        when resolving dependencies from multiple sources, e.g. A requires
+        (B >= 1.0) while C requires (B >= 1.1).
+
+        For successful replacement, ``provider`` must meet all the requirements
+        which ``other`` fulfills.
+
+        :param provider: The provider we are trying to replace with.
+        :param other: The provider we're trying to replace.
+        :param problems: If False is returned, this will contain what
+                         problems prevented replacement. This is currently
+                         a tuple of the literal string 'cantreplace',
+                         ``provider``, ``other``  and the set of requirements
+                         that ``provider`` couldn't fulfill.
+        :return: True if we can replace ``other`` with ``provider``, else
+                 False.
+        """
+        rlist = self.reqts[other]
+        unmatched = set()
+        for s in rlist:
+            matcher = self.get_matcher(s)
+            if not matcher.match(provider.version):
+                unmatched.add(s)
+        if unmatched:
+            # can't replace other with provider
+            problems.add(('cantreplace', provider, other,
+                          frozenset(unmatched)))
+            result = False
+        else:
+            # can replace other with provider
+            self.remove_distribution(other)
+            del self.reqts[other]
+            for s in rlist:
+                self.reqts.setdefault(provider, set()).add(s)
+            self.add_distribution(provider)
+            result = True
+        return result
+
+    def find(self, requirement, meta_extras=None, prereleases=False):
+        """
+        Find a distribution and all distributions it depends on.
+
+        :param requirement: The requirement specifying the distribution to
+                            find, or a Distribution instance.
+        :param meta_extras: A list of meta extras such as :test:, :build: and
+                            so on.
+        :param prereleases: If ``True``, allow pre-release versions to be
+                            returned - otherwise, don't return prereleases
+                            unless they're all that's available.
+
+        Return a set of :class:`Distribution` instances and a set of
+        problems.
+
+        The distributions returned should be such that they have the
+        :attr:`required` attribute set to ``True`` if they were
+        from the ``requirement`` passed to ``find()``, and they have the
+        :attr:`build_time_dependency` attribute set to ``True`` unless they
+        are post-installation dependencies of the ``requirement``.
+
+        The problems should be a tuple consisting of the string
+        ``'unsatisfied'`` and the requirement which couldn't be satisfied
+        by any distribution known to the locator.
+        """
+
+        self.provided = {}
+        self.dists = {}
+        self.dists_by_name = {}
+        self.reqts = {}
+
+        meta_extras = set(meta_extras or [])
+        if ':*:' in meta_extras:
+            meta_extras.remove(':*:')
+            # :meta: and :run: are implicitly included
+            meta_extras |= set([':test:', ':build:', ':dev:'])
+
+        if isinstance(requirement, Distribution):
+            dist = odist = requirement
+            logger.debug('passed %s as requirement', odist)
+        else:
+            dist = odist = self.locator.locate(requirement,
+                                               prereleases=prereleases)
+            if dist is None:
+                raise DistlibException('Unable to locate %r' % requirement)
+            logger.debug('located %s', odist)
+        dist.requested = True
+        problems = set()
+        todo = set([dist])
+        install_dists = set([odist])
+        while todo:
+            dist = todo.pop()
+            name = dist.key     # case-insensitive
+            if name not in self.dists_by_name:
+                self.add_distribution(dist)
+            else:
+                #import pdb; pdb.set_trace()
+                other = self.dists_by_name[name]
+                if other != dist:
+                    self.try_to_replace(dist, other, problems)
+
+            ireqts = dist.run_requires | dist.meta_requires
+            sreqts = dist.build_requires
+            ereqts = set()
+            if meta_extras and dist in install_dists:
+                for key in ('test', 'build', 'dev'):
+                    e = ':%s:' % key
+                    if e in meta_extras:
+                        ereqts |= getattr(dist, '%s_requires' % key)
+            all_reqts = ireqts | sreqts | ereqts
+            for r in all_reqts:
+                providers = self.find_providers(r)
+                if not providers:
+                    logger.debug('No providers found for %r', r)
+                    provider = self.locator.locate(r, prereleases=prereleases)
+                    # If no provider is found and we didn't consider
+                    # prereleases, consider them now.
+                    if provider is None and not prereleases:
+                        provider = self.locator.locate(r, prereleases=True)
+                    if provider is None:
+                        logger.debug('Cannot satisfy %r', r)
+                        problems.add(('unsatisfied', r))
+                    else:
+                        n, v = provider.key, provider.version
+                        if (n, v) not in self.dists:
+                            todo.add(provider)
+                        providers.add(provider)
+                        if r in ireqts and dist in install_dists:
+                            install_dists.add(provider)
+                            logger.debug('Adding %s to install_dists',
+                                         provider.name_and_version)
+                for p in providers:
+                    name = p.key
+                    if name not in self.dists_by_name:
+                        self.reqts.setdefault(p, set()).add(r)
+                    else:
+                        other = self.dists_by_name[name]
+                        if other != p:
+                            # see if other can be replaced by p
+                            self.try_to_replace(p, other, problems)
+
+        dists = set(self.dists.values())
+        for dist in dists:
+            dist.build_time_dependency = dist not in install_dists
+            if dist.build_time_dependency:
+                logger.debug('%s is a build-time dependency only.',
+                             dist.name_and_version)
+        logger.debug('find done for %s', odist)
+        return dists, problems
diff --git a/venv/lib/python3.8/site-packages/distlib/manifest.py b/venv/lib/python3.8/site-packages/distlib/manifest.py
new file mode 100644
index 0000000..ca0fe44
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/distlib/manifest.py
@@ -0,0 +1,393 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012-2013 Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+"""
+Class representing the list of files in a distribution.
+
+Equivalent to distutils.filelist, but fixes some problems.
+"""
+import fnmatch
+import logging
+import os
+import re
+import sys
+
+from . import DistlibException
+from .compat import fsdecode
+from .util import convert_path
+
+
+__all__ = ['Manifest']
+
+logger = logging.getLogger(__name__)
+
+# a \ followed by some spaces + EOL
+_COLLAPSE_PATTERN = re.compile('\\\\w*\n', re.M)
+_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S)
+
+#
+# Due to the different results returned by fnmatch.translate, we need
+# to do slightly different processing for Python 2.7 and 3.2 ... this needed
+# to be brought in for Python 3.6 onwards.
+#
+_PYTHON_VERSION = sys.version_info[:2]
+
+class Manifest(object):
+    """A list of files built by on exploring the filesystem and filtered by
+    applying various patterns to what we find there.
+    """
+
+    def __init__(self, base=None):
+        """
+        Initialise an instance.
+
+        :param base: The base directory to explore under.
+        """
+        self.base = os.path.abspath(os.path.normpath(base or os.getcwd()))
+        self.prefix = self.base + os.sep
+        self.allfiles = None
+        self.files = set()
+
+    #
+    # Public API
+    #
+
+    def findall(self):
+        """Find all files under the base and set ``allfiles`` to the absolute
+        pathnames of files found.
+        """
+        from stat import S_ISREG, S_ISDIR, S_ISLNK
+
+        self.allfiles = allfiles = []
+        root = self.base
+        stack = [root]
+        pop = stack.pop
+        push = stack.append
+
+        while stack:
+            root = pop()
+            names = os.listdir(root)
+
+            for name in names:
+                fullname = os.path.join(root, name)
+
+                # Avoid excess stat calls -- just one will do, thank you!
+                stat = os.stat(fullname)
+                mode = stat.st_mode
+                if S_ISREG(mode):
+                    allfiles.append(fsdecode(fullname))
+                elif S_ISDIR(mode) and not S_ISLNK(mode):
+                    push(fullname)
+
+    def add(self, item):
+        """
+        Add a file to the manifest.
+
+        :param item: The pathname to add. This can be relative to the base.
+        """
+        if not item.startswith(self.prefix):
+            item = os.path.join(self.base, item)
+        self.files.add(os.path.normpath(item))
+
+    def add_many(self, items):
+        """
+        Add a list of files to the manifest.
+
+        :param items: The pathnames to add. These can be relative to the base.
+        """
+        for item in items:
+            self.add(item)
+
+    def sorted(self, wantdirs=False):
+        """
+        Return sorted files in directory order
+        """
+
+        def add_dir(dirs, d):
+            dirs.add(d)
+            logger.debug('add_dir added %s', d)
+            if d != self.base:
+                parent, _ = os.path.split(d)
+                assert parent not in ('', '/')
+                add_dir(dirs, parent)
+
+        result = set(self.files)    # make a copy!
+        if wantdirs:
+            dirs = set()
+            for f in result:
+                add_dir(dirs, os.path.dirname(f))
+            result |= dirs
+        return [os.path.join(*path_tuple) for path_tuple in
+                sorted(os.path.split(path) for path in result)]
+
+    def clear(self):
+        """Clear all collected files."""
+        self.files = set()
+        self.allfiles = []
+
+    def process_directive(self, directive):
+        """
+        Process a directive which either adds some files from ``allfiles`` to
+        ``files``, or removes some files from ``files``.
+
+        :param directive: The directive to process. This should be in a format
+                     compatible with distutils ``MANIFEST.in`` files:
+
+                     http://docs.python.org/distutils/sourcedist.html#commands
+        """
+        # Parse the line: split it up, make sure the right number of words
+        # is there, and return the relevant words.  'action' is always
+        # defined: it's the first word of the line.  Which of the other
+        # three are defined depends on the action; it'll be either
+        # patterns, (dir and patterns), or (dirpattern).
+        action, patterns, thedir, dirpattern = self._parse_directive(directive)
+
+        # OK, now we know that the action is valid and we have the
+        # right number of words on the line for that action -- so we
+        # can proceed with minimal error-checking.
+        if action == 'include':
+            for pattern in patterns:
+                if not self._include_pattern(pattern, anchor=True):
+                    logger.warning('no files found matching %r', pattern)
+
+        elif action == 'exclude':
+            for pattern in patterns:
+                found = self._exclude_pattern(pattern, anchor=True)
+                #if not found:
+                #    logger.warning('no previously-included files '
+                #                   'found matching %r', pattern)
+
+        elif action == 'global-include':
+            for pattern in patterns:
+                if not self._include_pattern(pattern, anchor=False):
+                    logger.warning('no files found matching %r '
+                                   'anywhere in distribution', pattern)
+
+        elif action == 'global-exclude':
+            for pattern in patterns:
+                found = self._exclude_pattern(pattern, anchor=False)
+                #if not found:
+                #    logger.warning('no previously-included files '
+                #                   'matching %r found anywhere in '
+                #                   'distribution', pattern)
+
+        elif action == 'recursive-include':
+            for pattern in patterns:
+                if not self._include_pattern(pattern, prefix=thedir):
+                    logger.warning('no files found matching %r '
+                                   'under directory %r', pattern, thedir)
+
+        elif action == 'recursive-exclude':
+            for pattern in patterns:
+                found = self._exclude_pattern(pattern, prefix=thedir)
+                #if not found:
+                #    logger.warning('no previously-included files '
+                #                   'matching %r found under directory %r',
+                #                   pattern, thedir)
+
+        elif action == 'graft':
+            if not self._include_pattern(None, prefix=dirpattern):
+                logger.warning('no directories found matching %r',
+                               dirpattern)
+
+        elif action == 'prune':
+            if not self._exclude_pattern(None, prefix=dirpattern):
+                logger.warning('no previously-included directories found '
+                               'matching %r', dirpattern)
+        else:   # pragma: no cover
+            # This should never happen, as it should be caught in
+            # _parse_template_line
+            raise DistlibException(
+                'invalid action %r' % action)
+
+    #
+    # Private API
+    #
+
+    def _parse_directive(self, directive):
+        """
+        Validate a directive.
+        :param directive: The directive to validate.
+        :return: A tuple of action, patterns, thedir, dir_patterns
+        """
+        words = directive.split()
+        if len(words) == 1 and words[0] not in ('include', 'exclude',
+                                                'global-include',
+                                                'global-exclude',
+                                                'recursive-include',
+                                                'recursive-exclude',
+                                                'graft', 'prune'):
+            # no action given, let's use the default 'include'
+            words.insert(0, 'include')
+
+        action = words[0]
+        patterns = thedir = dir_pattern = None
+
+        if action in ('include', 'exclude',
+                      'global-include', 'global-exclude'):
+            if len(words) < 2:
+                raise DistlibException(
+                    '%r expects   ...' % action)
+
+            patterns = [convert_path(word) for word in words[1:]]
+
+        elif action in ('recursive-include', 'recursive-exclude'):
+            if len(words) < 3:
+                raise DistlibException(
+                    '%r expects    ...' % action)
+
+            thedir = convert_path(words[1])
+            patterns = [convert_path(word) for word in words[2:]]
+
+        elif action in ('graft', 'prune'):
+            if len(words) != 2:
+                raise DistlibException(
+                    '%r expects a single ' % action)
+
+            dir_pattern = convert_path(words[1])
+
+        else:
+            raise DistlibException('unknown action %r' % action)
+
+        return action, patterns, thedir, dir_pattern
+
+    def _include_pattern(self, pattern, anchor=True, prefix=None,
+                         is_regex=False):
+        """Select strings (presumably filenames) from 'self.files' that
+        match 'pattern', a Unix-style wildcard (glob) pattern.
+
+        Patterns are not quite the same as implemented by the 'fnmatch'
+        module: '*' and '?'  match non-special characters, where "special"
+        is platform-dependent: slash on Unix; colon, slash, and backslash on
+        DOS/Windows; and colon on Mac OS.
+
+        If 'anchor' is true (the default), then the pattern match is more
+        stringent: "*.py" will match "foo.py" but not "foo/bar.py".  If
+        'anchor' is false, both of these will match.
+
+        If 'prefix' is supplied, then only filenames starting with 'prefix'
+        (itself a pattern) and ending with 'pattern', with anything in between
+        them, will match.  'anchor' is ignored in this case.
+
+        If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
+        'pattern' is assumed to be either a string containing a regex or a
+        regex object -- no translation is done, the regex is just compiled
+        and used as-is.
+
+        Selected strings will be added to self.files.
+
+        Return True if files are found.
+        """
+        # XXX docstring lying about what the special chars are?
+        found = False
+        pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
+
+        # delayed loading of allfiles list
+        if self.allfiles is None:
+            self.findall()
+
+        for name in self.allfiles:
+            if pattern_re.search(name):
+                self.files.add(name)
+                found = True
+        return found
+
+    def _exclude_pattern(self, pattern, anchor=True, prefix=None,
+                         is_regex=False):
+        """Remove strings (presumably filenames) from 'files' that match
+        'pattern'.
+
+        Other parameters are the same as for 'include_pattern()', above.
+        The list 'self.files' is modified in place. Return True if files are
+        found.
+
+        This API is public to allow e.g. exclusion of SCM subdirs, e.g. when
+        packaging source distributions
+        """
+        found = False
+        pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
+        for f in list(self.files):
+            if pattern_re.search(f):
+                self.files.remove(f)
+                found = True
+        return found
+
+    def _translate_pattern(self, pattern, anchor=True, prefix=None,
+                           is_regex=False):
+        """Translate a shell-like wildcard pattern to a compiled regular
+        expression.
+
+        Return the compiled regex.  If 'is_regex' true,
+        then 'pattern' is directly compiled to a regex (if it's a string)
+        or just returned as-is (assumes it's a regex object).
+        """
+        if is_regex:
+            if isinstance(pattern, str):
+                return re.compile(pattern)
+            else:
+                return pattern
+
+        if _PYTHON_VERSION > (3, 2):
+            # ditch start and end characters
+            start, _, end = self._glob_to_re('_').partition('_')
+
+        if pattern:
+            pattern_re = self._glob_to_re(pattern)
+            if _PYTHON_VERSION > (3, 2):
+                assert pattern_re.startswith(start) and pattern_re.endswith(end)
+        else:
+            pattern_re = ''
+
+        base = re.escape(os.path.join(self.base, ''))
+        if prefix is not None:
+            # ditch end of pattern character
+            if _PYTHON_VERSION <= (3, 2):
+                empty_pattern = self._glob_to_re('')
+                prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)]
+            else:
+                prefix_re = self._glob_to_re(prefix)
+                assert prefix_re.startswith(start) and prefix_re.endswith(end)
+                prefix_re = prefix_re[len(start): len(prefix_re) - len(end)]
+            sep = os.sep
+            if os.sep == '\\':
+                sep = r'\\'
+            if _PYTHON_VERSION <= (3, 2):
+                pattern_re = '^' + base + sep.join((prefix_re,
+                                                    '.*' + pattern_re))
+            else:
+                pattern_re = pattern_re[len(start): len(pattern_re) - len(end)]
+                pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep,
+                                                  pattern_re, end)
+        else:  # no prefix -- respect anchor flag
+            if anchor:
+                if _PYTHON_VERSION <= (3, 2):
+                    pattern_re = '^' + base + pattern_re
+                else:
+                    pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):])
+
+        return re.compile(pattern_re)
+
+    def _glob_to_re(self, pattern):
+        """Translate a shell-like glob pattern to a regular expression.
+
+        Return a string containing the regex.  Differs from
+        'fnmatch.translate()' in that '*' does not match "special characters"
+        (which are platform-specific).
+        """
+        pattern_re = fnmatch.translate(pattern)
+
+        # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
+        # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
+        # and by extension they shouldn't match such "special characters" under
+        # any OS.  So change all non-escaped dots in the RE to match any
+        # character except the special characters (currently: just os.sep).
+        sep = os.sep
+        if os.sep == '\\':
+            # we're using a regex to manipulate a regex, so we need
+            # to escape the backslash twice
+            sep = r'\\\\'
+        escaped = r'\1[^%s]' % sep
+        pattern_re = re.sub(r'((? y,
+        '!=': lambda x, y: x != y,
+        '<':  lambda x, y: x < y,
+        '<=':  lambda x, y: x == y or x < y,
+        '>':  lambda x, y: x > y,
+        '>=':  lambda x, y: x == y or x > y,
+        'and': lambda x, y: x and y,
+        'or': lambda x, y: x or y,
+        'in': lambda x, y: x in y,
+        'not in': lambda x, y: x not in y,
+    }
+
+    def evaluate(self, expr, context):
+        """
+        Evaluate a marker expression returned by the :func:`parse_requirement`
+        function in the specified context.
+        """
+        if isinstance(expr, string_types):
+            if expr[0] in '\'"':
+                result = expr[1:-1]
+            else:
+                if expr not in context:
+                    raise SyntaxError('unknown variable: %s' % expr)
+                result = context[expr]
+        else:
+            assert isinstance(expr, dict)
+            op = expr['op']
+            if op not in self.operations:
+                raise NotImplementedError('op not implemented: %s' % op)
+            elhs = expr['lhs']
+            erhs = expr['rhs']
+            if _is_literal(expr['lhs']) and _is_literal(expr['rhs']):
+                raise SyntaxError('invalid comparison: %s %s %s' % (elhs, op, erhs))
+
+            lhs = self.evaluate(elhs, context)
+            rhs = self.evaluate(erhs, context)
+            if ((elhs == 'python_version' or erhs == 'python_version') and
+                op in ('<', '<=', '>', '>=', '===', '==', '!=', '~=')):
+                lhs = NV(lhs)
+                rhs = NV(rhs)
+            elif elhs == 'python_version' and op in ('in', 'not in'):
+                lhs = NV(lhs)
+                rhs = _get_versions(rhs)
+            result = self.operations[op](lhs, rhs)
+        return result
+
+_DIGITS = re.compile(r'\d+\.\d+')
+
+def default_context():
+    def format_full_version(info):
+        version = '%s.%s.%s' % (info.major, info.minor, info.micro)
+        kind = info.releaselevel
+        if kind != 'final':
+            version += kind[0] + str(info.serial)
+        return version
+
+    if hasattr(sys, 'implementation'):
+        implementation_version = format_full_version(sys.implementation.version)
+        implementation_name = sys.implementation.name
+    else:
+        implementation_version = '0'
+        implementation_name = ''
+
+    ppv = platform.python_version()
+    m = _DIGITS.match(ppv)
+    pv = m.group(0)
+    result = {
+        'implementation_name': implementation_name,
+        'implementation_version': implementation_version,
+        'os_name': os.name,
+        'platform_machine': platform.machine(),
+        'platform_python_implementation': platform.python_implementation(),
+        'platform_release': platform.release(),
+        'platform_system': platform.system(),
+        'platform_version': platform.version(),
+        'platform_in_venv': str(in_venv()),
+        'python_full_version': ppv,
+        'python_version': pv,
+        'sys_platform': sys.platform,
+    }
+    return result
+
+DEFAULT_CONTEXT = default_context()
+del default_context
+
+evaluator = Evaluator()
+
+def interpret(marker, execution_context=None):
+    """
+    Interpret a marker and return a result depending on environment.
+
+    :param marker: The marker to interpret.
+    :type marker: str
+    :param execution_context: The context used for name lookup.
+    :type execution_context: mapping
+    """
+    try:
+        expr, rest = parse_marker(marker)
+    except Exception as e:
+        raise SyntaxError('Unable to interpret marker syntax: %s: %s' % (marker, e))
+    if rest and rest[0] != '#':
+        raise SyntaxError('unexpected trailing data in marker: %s: %s' % (marker, rest))
+    context = dict(DEFAULT_CONTEXT)
+    if execution_context:
+        context.update(execution_context)
+    return evaluator.evaluate(expr, context)
diff --git a/venv/lib/python3.8/site-packages/distlib/metadata.py b/venv/lib/python3.8/site-packages/distlib/metadata.py
new file mode 100644
index 0000000..c329e19
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/distlib/metadata.py
@@ -0,0 +1,1076 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012 The Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+"""Implementation of the Metadata for Python packages PEPs.
+
+Supports all metadata formats (1.0, 1.1, 1.2, 1.3/2.1 and 2.2).
+"""
+from __future__ import unicode_literals
+
+import codecs
+from email import message_from_file
+import json
+import logging
+import re
+
+
+from . import DistlibException, __version__
+from .compat import StringIO, string_types, text_type
+from .markers import interpret
+from .util import extract_by_key, get_extras
+from .version import get_scheme, PEP440_VERSION_RE
+
+logger = logging.getLogger(__name__)
+
+
+class MetadataMissingError(DistlibException):
+    """A required metadata is missing"""
+
+
+class MetadataConflictError(DistlibException):
+    """Attempt to read or write metadata fields that are conflictual."""
+
+
+class MetadataUnrecognizedVersionError(DistlibException):
+    """Unknown metadata version number."""
+
+
+class MetadataInvalidError(DistlibException):
+    """A metadata value is invalid"""
+
+# public API of this module
+__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION']
+
+# Encoding used for the PKG-INFO files
+PKG_INFO_ENCODING = 'utf-8'
+
+# preferred version. Hopefully will be changed
+# to 1.2 once PEP 345 is supported everywhere
+PKG_INFO_PREFERRED_VERSION = '1.1'
+
+_LINE_PREFIX_1_2 = re.compile('\n       \\|')
+_LINE_PREFIX_PRE_1_2 = re.compile('\n        ')
+_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+               'Summary', 'Description',
+               'Keywords', 'Home-page', 'Author', 'Author-email',
+               'License')
+
+_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+               'Supported-Platform', 'Summary', 'Description',
+               'Keywords', 'Home-page', 'Author', 'Author-email',
+               'License', 'Classifier', 'Download-URL', 'Obsoletes',
+               'Provides', 'Requires')
+
+_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier',
+                'Download-URL')
+
+_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+               'Supported-Platform', 'Summary', 'Description',
+               'Keywords', 'Home-page', 'Author', 'Author-email',
+               'Maintainer', 'Maintainer-email', 'License',
+               'Classifier', 'Download-URL', 'Obsoletes-Dist',
+               'Project-URL', 'Provides-Dist', 'Requires-Dist',
+               'Requires-Python', 'Requires-External')
+
+_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python',
+                'Obsoletes-Dist', 'Requires-External', 'Maintainer',
+                'Maintainer-email', 'Project-URL')
+
+_426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+               'Supported-Platform', 'Summary', 'Description',
+               'Keywords', 'Home-page', 'Author', 'Author-email',
+               'Maintainer', 'Maintainer-email', 'License',
+               'Classifier', 'Download-URL', 'Obsoletes-Dist',
+               'Project-URL', 'Provides-Dist', 'Requires-Dist',
+               'Requires-Python', 'Requires-External', 'Private-Version',
+               'Obsoleted-By', 'Setup-Requires-Dist', 'Extension',
+               'Provides-Extra')
+
+_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By',
+                'Setup-Requires-Dist', 'Extension')
+
+# See issue #106: Sometimes 'Requires' and 'Provides' occur wrongly in
+# the metadata. Include them in the tuple literal below to allow them
+# (for now).
+# Ditto for Obsoletes - see issue #140.
+_566_FIELDS = _426_FIELDS + ('Description-Content-Type',
+                             'Requires', 'Provides', 'Obsoletes')
+
+_566_MARKERS = ('Description-Content-Type',)
+
+_643_MARKERS = ('Dynamic', 'License-File')
+
+_643_FIELDS = _566_FIELDS + _643_MARKERS
+
+_ALL_FIELDS = set()
+_ALL_FIELDS.update(_241_FIELDS)
+_ALL_FIELDS.update(_314_FIELDS)
+_ALL_FIELDS.update(_345_FIELDS)
+_ALL_FIELDS.update(_426_FIELDS)
+_ALL_FIELDS.update(_566_FIELDS)
+_ALL_FIELDS.update(_643_FIELDS)
+
+EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''')
+
+
+def _version2fieldlist(version):
+    if version == '1.0':
+        return _241_FIELDS
+    elif version == '1.1':
+        return _314_FIELDS
+    elif version == '1.2':
+        return _345_FIELDS
+    elif version in ('1.3', '2.1'):
+        # avoid adding field names if already there
+        return _345_FIELDS + tuple(f for f in _566_FIELDS if f not in _345_FIELDS)
+    elif version == '2.0':
+        raise ValueError('Metadata 2.0 is withdrawn and not supported')
+        # return _426_FIELDS
+    elif version == '2.2':
+        return _643_FIELDS
+    raise MetadataUnrecognizedVersionError(version)
+
+
+def _best_version(fields):
+    """Detect the best version depending on the fields used."""
+    def _has_marker(keys, markers):
+        for marker in markers:
+            if marker in keys:
+                return True
+        return False
+
+    keys = []
+    for key, value in fields.items():
+        if value in ([], 'UNKNOWN', None):
+            continue
+        keys.append(key)
+
+    possible_versions = ['1.0', '1.1', '1.2', '1.3', '2.1', '2.2']  # 2.0 removed
+
+    # first let's try to see if a field is not part of one of the version
+    for key in keys:
+        if key not in _241_FIELDS and '1.0' in possible_versions:
+            possible_versions.remove('1.0')
+            logger.debug('Removed 1.0 due to %s', key)
+        if key not in _314_FIELDS and '1.1' in possible_versions:
+            possible_versions.remove('1.1')
+            logger.debug('Removed 1.1 due to %s', key)
+        if key not in _345_FIELDS and '1.2' in possible_versions:
+            possible_versions.remove('1.2')
+            logger.debug('Removed 1.2 due to %s', key)
+        if key not in _566_FIELDS and '1.3' in possible_versions:
+            possible_versions.remove('1.3')
+            logger.debug('Removed 1.3 due to %s', key)
+        if key not in _566_FIELDS and '2.1' in possible_versions:
+            if key != 'Description':  # In 2.1, description allowed after headers
+                possible_versions.remove('2.1')
+                logger.debug('Removed 2.1 due to %s', key)
+        if key not in _643_FIELDS and '2.2' in possible_versions:
+            possible_versions.remove('2.2')
+            logger.debug('Removed 2.2 due to %s', key)
+        # if key not in _426_FIELDS and '2.0' in possible_versions:
+            # possible_versions.remove('2.0')
+            # logger.debug('Removed 2.0 due to %s', key)
+
+    # possible_version contains qualified versions
+    if len(possible_versions) == 1:
+        return possible_versions[0]   # found !
+    elif len(possible_versions) == 0:
+        logger.debug('Out of options - unknown metadata set: %s', fields)
+        raise MetadataConflictError('Unknown metadata set')
+
+    # let's see if one unique marker is found
+    is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS)
+    is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS)
+    is_2_1 = '2.1' in possible_versions and _has_marker(keys, _566_MARKERS)
+    # is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS)
+    is_2_2 = '2.2' in possible_versions and _has_marker(keys, _643_MARKERS)
+    if int(is_1_1) + int(is_1_2) + int(is_2_1) + int(is_2_2) > 1:
+        raise MetadataConflictError('You used incompatible 1.1/1.2/2.1/2.2 fields')
+
+    # we have the choice, 1.0, or 1.2, 2.1 or 2.2
+    #   - 1.0 has a broken Summary field but works with all tools
+    #   - 1.1 is to avoid
+    #   - 1.2 fixes Summary but has little adoption
+    #   - 2.1 adds more features
+    #   - 2.2 is the latest
+    if not is_1_1 and not is_1_2 and not is_2_1 and not is_2_2:
+        # we couldn't find any specific marker
+        if PKG_INFO_PREFERRED_VERSION in possible_versions:
+            return PKG_INFO_PREFERRED_VERSION
+    if is_1_1:
+        return '1.1'
+    if is_1_2:
+        return '1.2'
+    if is_2_1:
+        return '2.1'
+    # if is_2_2:
+        # return '2.2'
+
+    return '2.2'
+
+# This follows the rules about transforming keys as described in
+# https://www.python.org/dev/peps/pep-0566/#id17
+_ATTR2FIELD = {
+    name.lower().replace("-", "_"): name for name in _ALL_FIELDS
+}
+_FIELD2ATTR = {field: attr for attr, field in _ATTR2FIELD.items()}
+
+_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist')
+_VERSIONS_FIELDS = ('Requires-Python',)
+_VERSION_FIELDS = ('Version',)
+_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes',
+               'Requires', 'Provides', 'Obsoletes-Dist',
+               'Provides-Dist', 'Requires-Dist', 'Requires-External',
+               'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist',
+               'Provides-Extra', 'Extension', 'License-File')
+_LISTTUPLEFIELDS = ('Project-URL',)
+
+_ELEMENTSFIELD = ('Keywords',)
+
+_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description')
+
+_MISSING = object()
+
+_FILESAFE = re.compile('[^A-Za-z0-9.]+')
+
+
+def _get_name_and_version(name, version, for_filename=False):
+    """Return the distribution name with version.
+
+    If for_filename is true, return a filename-escaped form."""
+    if for_filename:
+        # For both name and version any runs of non-alphanumeric or '.'
+        # characters are replaced with a single '-'.  Additionally any
+        # spaces in the version string become '.'
+        name = _FILESAFE.sub('-', name)
+        version = _FILESAFE.sub('-', version.replace(' ', '.'))
+    return '%s-%s' % (name, version)
+
+
+class LegacyMetadata(object):
+    """The legacy metadata of a release.
+
+    Supports versions 1.0, 1.1, 1.2, 2.0 and 1.3/2.1 (auto-detected). You can
+    instantiate the class with one of these arguments (or none):
+    - *path*, the path to a metadata file
+    - *fileobj* give a file-like object with metadata as content
+    - *mapping* is a dict-like object
+    - *scheme* is a version scheme name
+    """
+    # TODO document the mapping API and UNKNOWN default key
+
+    def __init__(self, path=None, fileobj=None, mapping=None,
+                 scheme='default'):
+        if [path, fileobj, mapping].count(None) < 2:
+            raise TypeError('path, fileobj and mapping are exclusive')
+        self._fields = {}
+        self.requires_files = []
+        self._dependencies = None
+        self.scheme = scheme
+        if path is not None:
+            self.read(path)
+        elif fileobj is not None:
+            self.read_file(fileobj)
+        elif mapping is not None:
+            self.update(mapping)
+            self.set_metadata_version()
+
+    def set_metadata_version(self):
+        self._fields['Metadata-Version'] = _best_version(self._fields)
+
+    def _write_field(self, fileobj, name, value):
+        fileobj.write('%s: %s\n' % (name, value))
+
+    def __getitem__(self, name):
+        return self.get(name)
+
+    def __setitem__(self, name, value):
+        return self.set(name, value)
+
+    def __delitem__(self, name):
+        field_name = self._convert_name(name)
+        try:
+            del self._fields[field_name]
+        except KeyError:
+            raise KeyError(name)
+
+    def __contains__(self, name):
+        return (name in self._fields or
+                self._convert_name(name) in self._fields)
+
+    def _convert_name(self, name):
+        if name in _ALL_FIELDS:
+            return name
+        name = name.replace('-', '_').lower()
+        return _ATTR2FIELD.get(name, name)
+
+    def _default_value(self, name):
+        if name in _LISTFIELDS or name in _ELEMENTSFIELD:
+            return []
+        return 'UNKNOWN'
+
+    def _remove_line_prefix(self, value):
+        if self.metadata_version in ('1.0', '1.1'):
+            return _LINE_PREFIX_PRE_1_2.sub('\n', value)
+        else:
+            return _LINE_PREFIX_1_2.sub('\n', value)
+
+    def __getattr__(self, name):
+        if name in _ATTR2FIELD:
+            return self[name]
+        raise AttributeError(name)
+
+    #
+    # Public API
+    #
+
+#    dependencies = property(_get_dependencies, _set_dependencies)
+
+    def get_fullname(self, filesafe=False):
+        """Return the distribution name with version.
+
+        If filesafe is true, return a filename-escaped form."""
+        return _get_name_and_version(self['Name'], self['Version'], filesafe)
+
+    def is_field(self, name):
+        """return True if name is a valid metadata key"""
+        name = self._convert_name(name)
+        return name in _ALL_FIELDS
+
+    def is_multi_field(self, name):
+        name = self._convert_name(name)
+        return name in _LISTFIELDS
+
+    def read(self, filepath):
+        """Read the metadata values from a file path."""
+        fp = codecs.open(filepath, 'r', encoding='utf-8')
+        try:
+            self.read_file(fp)
+        finally:
+            fp.close()
+
+    def read_file(self, fileob):
+        """Read the metadata values from a file object."""
+        msg = message_from_file(fileob)
+        self._fields['Metadata-Version'] = msg['metadata-version']
+
+        # When reading, get all the fields we can
+        for field in _ALL_FIELDS:
+            if field not in msg:
+                continue
+            if field in _LISTFIELDS:
+                # we can have multiple lines
+                values = msg.get_all(field)
+                if field in _LISTTUPLEFIELDS and values is not None:
+                    values = [tuple(value.split(',')) for value in values]
+                self.set(field, values)
+            else:
+                # single line
+                value = msg[field]
+                if value is not None and value != 'UNKNOWN':
+                    self.set(field, value)
+
+        # PEP 566 specifies that the body be used for the description, if
+        # available
+        body = msg.get_payload()
+        self["Description"] = body if body else self["Description"]
+        # logger.debug('Attempting to set metadata for %s', self)
+        # self.set_metadata_version()
+
+    def write(self, filepath, skip_unknown=False):
+        """Write the metadata fields to filepath."""
+        fp = codecs.open(filepath, 'w', encoding='utf-8')
+        try:
+            self.write_file(fp, skip_unknown)
+        finally:
+            fp.close()
+
+    def write_file(self, fileobject, skip_unknown=False):
+        """Write the PKG-INFO format data to a file object."""
+        self.set_metadata_version()
+
+        for field in _version2fieldlist(self['Metadata-Version']):
+            values = self.get(field)
+            if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']):
+                continue
+            if field in _ELEMENTSFIELD:
+                self._write_field(fileobject, field, ','.join(values))
+                continue
+            if field not in _LISTFIELDS:
+                if field == 'Description':
+                    if self.metadata_version in ('1.0', '1.1'):
+                        values = values.replace('\n', '\n        ')
+                    else:
+                        values = values.replace('\n', '\n       |')
+                values = [values]
+
+            if field in _LISTTUPLEFIELDS:
+                values = [','.join(value) for value in values]
+
+            for value in values:
+                self._write_field(fileobject, field, value)
+
+    def update(self, other=None, **kwargs):
+        """Set metadata values from the given iterable `other` and kwargs.
+
+        Behavior is like `dict.update`: If `other` has a ``keys`` method,
+        they are looped over and ``self[key]`` is assigned ``other[key]``.
+        Else, ``other`` is an iterable of ``(key, value)`` iterables.
+
+        Keys that don't match a metadata field or that have an empty value are
+        dropped.
+        """
+        def _set(key, value):
+            if key in _ATTR2FIELD and value:
+                self.set(self._convert_name(key), value)
+
+        if not other:
+            # other is None or empty container
+            pass
+        elif hasattr(other, 'keys'):
+            for k in other.keys():
+                _set(k, other[k])
+        else:
+            for k, v in other:
+                _set(k, v)
+
+        if kwargs:
+            for k, v in kwargs.items():
+                _set(k, v)
+
+    def set(self, name, value):
+        """Control then set a metadata field."""
+        name = self._convert_name(name)
+
+        if ((name in _ELEMENTSFIELD or name == 'Platform') and
+            not isinstance(value, (list, tuple))):
+            if isinstance(value, string_types):
+                value = [v.strip() for v in value.split(',')]
+            else:
+                value = []
+        elif (name in _LISTFIELDS and
+              not isinstance(value, (list, tuple))):
+            if isinstance(value, string_types):
+                value = [value]
+            else:
+                value = []
+
+        if logger.isEnabledFor(logging.WARNING):
+            project_name = self['Name']
+
+            scheme = get_scheme(self.scheme)
+            if name in _PREDICATE_FIELDS and value is not None:
+                for v in value:
+                    # check that the values are valid
+                    if not scheme.is_valid_matcher(v.split(';')[0]):
+                        logger.warning(
+                            "'%s': '%s' is not valid (field '%s')",
+                            project_name, v, name)
+            # FIXME this rejects UNKNOWN, is that right?
+            elif name in _VERSIONS_FIELDS and value is not None:
+                if not scheme.is_valid_constraint_list(value):
+                    logger.warning("'%s': '%s' is not a valid version (field '%s')",
+                                   project_name, value, name)
+            elif name in _VERSION_FIELDS and value is not None:
+                if not scheme.is_valid_version(value):
+                    logger.warning("'%s': '%s' is not a valid version (field '%s')",
+                                   project_name, value, name)
+
+        if name in _UNICODEFIELDS:
+            if name == 'Description':
+                value = self._remove_line_prefix(value)
+
+        self._fields[name] = value
+
+    def get(self, name, default=_MISSING):
+        """Get a metadata field."""
+        name = self._convert_name(name)
+        if name not in self._fields:
+            if default is _MISSING:
+                default = self._default_value(name)
+            return default
+        if name in _UNICODEFIELDS:
+            value = self._fields[name]
+            return value
+        elif name in _LISTFIELDS:
+            value = self._fields[name]
+            if value is None:
+                return []
+            res = []
+            for val in value:
+                if name not in _LISTTUPLEFIELDS:
+                    res.append(val)
+                else:
+                    # That's for Project-URL
+                    res.append((val[0], val[1]))
+            return res
+
+        elif name in _ELEMENTSFIELD:
+            value = self._fields[name]
+            if isinstance(value, string_types):
+                return value.split(',')
+        return self._fields[name]
+
+    def check(self, strict=False):
+        """Check if the metadata is compliant. If strict is True then raise if
+        no Name or Version are provided"""
+        self.set_metadata_version()
+
+        # XXX should check the versions (if the file was loaded)
+        missing, warnings = [], []
+
+        for attr in ('Name', 'Version'):  # required by PEP 345
+            if attr not in self:
+                missing.append(attr)
+
+        if strict and missing != []:
+            msg = 'missing required metadata: %s' % ', '.join(missing)
+            raise MetadataMissingError(msg)
+
+        for attr in ('Home-page', 'Author'):
+            if attr not in self:
+                missing.append(attr)
+
+        # checking metadata 1.2 (XXX needs to check 1.1, 1.0)
+        if self['Metadata-Version'] != '1.2':
+            return missing, warnings
+
+        scheme = get_scheme(self.scheme)
+
+        def are_valid_constraints(value):
+            for v in value:
+                if not scheme.is_valid_matcher(v.split(';')[0]):
+                    return False
+            return True
+
+        for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints),
+                                   (_VERSIONS_FIELDS,
+                                    scheme.is_valid_constraint_list),
+                                   (_VERSION_FIELDS,
+                                    scheme.is_valid_version)):
+            for field in fields:
+                value = self.get(field, None)
+                if value is not None and not controller(value):
+                    warnings.append("Wrong value for '%s': %s" % (field, value))
+
+        return missing, warnings
+
+    def todict(self, skip_missing=False):
+        """Return fields as a dict.
+
+        Field names will be converted to use the underscore-lowercase style
+        instead of hyphen-mixed case (i.e. home_page instead of Home-page).
+        This is as per https://www.python.org/dev/peps/pep-0566/#id17.
+        """
+        self.set_metadata_version()
+
+        fields = _version2fieldlist(self['Metadata-Version'])
+
+        data = {}
+
+        for field_name in fields:
+            if not skip_missing or field_name in self._fields:
+                key = _FIELD2ATTR[field_name]
+                if key != 'project_url':
+                    data[key] = self[field_name]
+                else:
+                    data[key] = [','.join(u) for u in self[field_name]]
+
+        return data
+
+    def add_requirements(self, requirements):
+        if self['Metadata-Version'] == '1.1':
+            # we can't have 1.1 metadata *and* Setuptools requires
+            for field in ('Obsoletes', 'Requires', 'Provides'):
+                if field in self:
+                    del self[field]
+        self['Requires-Dist'] += requirements
+
+    # Mapping API
+    # TODO could add iter* variants
+
+    def keys(self):
+        return list(_version2fieldlist(self['Metadata-Version']))
+
+    def __iter__(self):
+        for key in self.keys():
+            yield key
+
+    def values(self):
+        return [self[key] for key in self.keys()]
+
+    def items(self):
+        return [(key, self[key]) for key in self.keys()]
+
+    def __repr__(self):
+        return '<%s %s %s>' % (self.__class__.__name__, self.name,
+                               self.version)
+
+
+METADATA_FILENAME = 'pydist.json'
+WHEEL_METADATA_FILENAME = 'metadata.json'
+LEGACY_METADATA_FILENAME = 'METADATA'
+
+
+class Metadata(object):
+    """
+    The metadata of a release. This implementation uses 2.1
+    metadata where possible. If not possible, it wraps a LegacyMetadata
+    instance which handles the key-value metadata format.
+    """
+
+    METADATA_VERSION_MATCHER = re.compile(r'^\d+(\.\d+)*$')
+
+    NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I)
+
+    FIELDNAME_MATCHER = re.compile('^[A-Z]([0-9A-Z-]*[0-9A-Z])?$', re.I)
+
+    VERSION_MATCHER = PEP440_VERSION_RE
+
+    SUMMARY_MATCHER = re.compile('.{1,2047}')
+
+    METADATA_VERSION = '2.0'
+
+    GENERATOR = 'distlib (%s)' % __version__
+
+    MANDATORY_KEYS = {
+        'name': (),
+        'version': (),
+        'summary': ('legacy',),
+    }
+
+    INDEX_KEYS = ('name version license summary description author '
+                  'author_email keywords platform home_page classifiers '
+                  'download_url')
+
+    DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires '
+                       'dev_requires provides meta_requires obsoleted_by '
+                       'supports_environments')
+
+    SYNTAX_VALIDATORS = {
+        'metadata_version': (METADATA_VERSION_MATCHER, ()),
+        'name': (NAME_MATCHER, ('legacy',)),
+        'version': (VERSION_MATCHER, ('legacy',)),
+        'summary': (SUMMARY_MATCHER, ('legacy',)),
+        'dynamic': (FIELDNAME_MATCHER, ('legacy',)),
+    }
+
+    __slots__ = ('_legacy', '_data', 'scheme')
+
+    def __init__(self, path=None, fileobj=None, mapping=None,
+                 scheme='default'):
+        if [path, fileobj, mapping].count(None) < 2:
+            raise TypeError('path, fileobj and mapping are exclusive')
+        self._legacy = None
+        self._data = None
+        self.scheme = scheme
+        #import pdb; pdb.set_trace()
+        if mapping is not None:
+            try:
+                self._validate_mapping(mapping, scheme)
+                self._data = mapping
+            except MetadataUnrecognizedVersionError:
+                self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme)
+                self.validate()
+        else:
+            data = None
+            if path:
+                with open(path, 'rb') as f:
+                    data = f.read()
+            elif fileobj:
+                data = fileobj.read()
+            if data is None:
+                # Initialised with no args - to be added
+                self._data = {
+                    'metadata_version': self.METADATA_VERSION,
+                    'generator': self.GENERATOR,
+                }
+            else:
+                if not isinstance(data, text_type):
+                    data = data.decode('utf-8')
+                try:
+                    self._data = json.loads(data)
+                    self._validate_mapping(self._data, scheme)
+                except ValueError:
+                    # Note: MetadataUnrecognizedVersionError does not
+                    # inherit from ValueError (it's a DistlibException,
+                    # which should not inherit from ValueError).
+                    # The ValueError comes from the json.load - if that
+                    # succeeds and we get a validation error, we want
+                    # that to propagate
+                    self._legacy = LegacyMetadata(fileobj=StringIO(data),
+                                                  scheme=scheme)
+                    self.validate()
+
+    common_keys = set(('name', 'version', 'license', 'keywords', 'summary'))
+
+    none_list = (None, list)
+    none_dict = (None, dict)
+
+    mapped_keys = {
+        'run_requires': ('Requires-Dist', list),
+        'build_requires': ('Setup-Requires-Dist', list),
+        'dev_requires': none_list,
+        'test_requires': none_list,
+        'meta_requires': none_list,
+        'extras': ('Provides-Extra', list),
+        'modules': none_list,
+        'namespaces': none_list,
+        'exports': none_dict,
+        'commands': none_dict,
+        'classifiers': ('Classifier', list),
+        'source_url': ('Download-URL', None),
+        'metadata_version': ('Metadata-Version', None),
+    }
+
+    del none_list, none_dict
+
+    def __getattribute__(self, key):
+        common = object.__getattribute__(self, 'common_keys')
+        mapped = object.__getattribute__(self, 'mapped_keys')
+        if key in mapped:
+            lk, maker = mapped[key]
+            if self._legacy:
+                if lk is None:
+                    result = None if maker is None else maker()
+                else:
+                    result = self._legacy.get(lk)
+            else:
+                value = None if maker is None else maker()
+                if key not in ('commands', 'exports', 'modules', 'namespaces',
+                               'classifiers'):
+                    result = self._data.get(key, value)
+                else:
+                    # special cases for PEP 459
+                    sentinel = object()
+                    result = sentinel
+                    d = self._data.get('extensions')
+                    if d:
+                        if key == 'commands':
+                            result = d.get('python.commands', value)
+                        elif key == 'classifiers':
+                            d = d.get('python.details')
+                            if d:
+                                result = d.get(key, value)
+                        else:
+                            d = d.get('python.exports')
+                            if not d:
+                                d = self._data.get('python.exports')
+                            if d:
+                                result = d.get(key, value)
+                    if result is sentinel:
+                        result = value
+        elif key not in common:
+            result = object.__getattribute__(self, key)
+        elif self._legacy:
+            result = self._legacy.get(key)
+        else:
+            result = self._data.get(key)
+        return result
+
+    def _validate_value(self, key, value, scheme=None):
+        if key in self.SYNTAX_VALIDATORS:
+            pattern, exclusions = self.SYNTAX_VALIDATORS[key]
+            if (scheme or self.scheme) not in exclusions:
+                m = pattern.match(value)
+                if not m:
+                    raise MetadataInvalidError("'%s' is an invalid value for "
+                                               "the '%s' property" % (value,
+                                                                    key))
+
+    def __setattr__(self, key, value):
+        self._validate_value(key, value)
+        common = object.__getattribute__(self, 'common_keys')
+        mapped = object.__getattribute__(self, 'mapped_keys')
+        if key in mapped:
+            lk, _ = mapped[key]
+            if self._legacy:
+                if lk is None:
+                    raise NotImplementedError
+                self._legacy[lk] = value
+            elif key not in ('commands', 'exports', 'modules', 'namespaces',
+                             'classifiers'):
+                self._data[key] = value
+            else:
+                # special cases for PEP 459
+                d = self._data.setdefault('extensions', {})
+                if key == 'commands':
+                    d['python.commands'] = value
+                elif key == 'classifiers':
+                    d = d.setdefault('python.details', {})
+                    d[key] = value
+                else:
+                    d = d.setdefault('python.exports', {})
+                    d[key] = value
+        elif key not in common:
+            object.__setattr__(self, key, value)
+        else:
+            if key == 'keywords':
+                if isinstance(value, string_types):
+                    value = value.strip()
+                    if value:
+                        value = value.split()
+                    else:
+                        value = []
+            if self._legacy:
+                self._legacy[key] = value
+            else:
+                self._data[key] = value
+
+    @property
+    def name_and_version(self):
+        return _get_name_and_version(self.name, self.version, True)
+
+    @property
+    def provides(self):
+        if self._legacy:
+            result = self._legacy['Provides-Dist']
+        else:
+            result = self._data.setdefault('provides', [])
+        s = '%s (%s)' % (self.name, self.version)
+        if s not in result:
+            result.append(s)
+        return result
+
+    @provides.setter
+    def provides(self, value):
+        if self._legacy:
+            self._legacy['Provides-Dist'] = value
+        else:
+            self._data['provides'] = value
+
+    def get_requirements(self, reqts, extras=None, env=None):
+        """
+        Base method to get dependencies, given a set of extras
+        to satisfy and an optional environment context.
+        :param reqts: A list of sometimes-wanted dependencies,
+                      perhaps dependent on extras and environment.
+        :param extras: A list of optional components being requested.
+        :param env: An optional environment for marker evaluation.
+        """
+        if self._legacy:
+            result = reqts
+        else:
+            result = []
+            extras = get_extras(extras or [], self.extras)
+            for d in reqts:
+                if 'extra' not in d and 'environment' not in d:
+                    # unconditional
+                    include = True
+                else:
+                    if 'extra' not in d:
+                        # Not extra-dependent - only environment-dependent
+                        include = True
+                    else:
+                        include = d.get('extra') in extras
+                    if include:
+                        # Not excluded because of extras, check environment
+                        marker = d.get('environment')
+                        if marker:
+                            include = interpret(marker, env)
+                if include:
+                    result.extend(d['requires'])
+            for key in ('build', 'dev', 'test'):
+                e = ':%s:' % key
+                if e in extras:
+                    extras.remove(e)
+                    # A recursive call, but it should terminate since 'test'
+                    # has been removed from the extras
+                    reqts = self._data.get('%s_requires' % key, [])
+                    result.extend(self.get_requirements(reqts, extras=extras,
+                                                        env=env))
+        return result
+
+    @property
+    def dictionary(self):
+        if self._legacy:
+            return self._from_legacy()
+        return self._data
+
+    @property
+    def dependencies(self):
+        if self._legacy:
+            raise NotImplementedError
+        else:
+            return extract_by_key(self._data, self.DEPENDENCY_KEYS)
+
+    @dependencies.setter
+    def dependencies(self, value):
+        if self._legacy:
+            raise NotImplementedError
+        else:
+            self._data.update(value)
+
+    def _validate_mapping(self, mapping, scheme):
+        if mapping.get('metadata_version') != self.METADATA_VERSION:
+            raise MetadataUnrecognizedVersionError()
+        missing = []
+        for key, exclusions in self.MANDATORY_KEYS.items():
+            if key not in mapping:
+                if scheme not in exclusions:
+                    missing.append(key)
+        if missing:
+            msg = 'Missing metadata items: %s' % ', '.join(missing)
+            raise MetadataMissingError(msg)
+        for k, v in mapping.items():
+            self._validate_value(k, v, scheme)
+
+    def validate(self):
+        if self._legacy:
+            missing, warnings = self._legacy.check(True)
+            if missing or warnings:
+                logger.warning('Metadata: missing: %s, warnings: %s',
+                               missing, warnings)
+        else:
+            self._validate_mapping(self._data, self.scheme)
+
+    def todict(self):
+        if self._legacy:
+            return self._legacy.todict(True)
+        else:
+            result = extract_by_key(self._data, self.INDEX_KEYS)
+            return result
+
+    def _from_legacy(self):
+        assert self._legacy and not self._data
+        result = {
+            'metadata_version': self.METADATA_VERSION,
+            'generator': self.GENERATOR,
+        }
+        lmd = self._legacy.todict(True)     # skip missing ones
+        for k in ('name', 'version', 'license', 'summary', 'description',
+                  'classifier'):
+            if k in lmd:
+                if k == 'classifier':
+                    nk = 'classifiers'
+                else:
+                    nk = k
+                result[nk] = lmd[k]
+        kw = lmd.get('Keywords', [])
+        if kw == ['']:
+            kw = []
+        result['keywords'] = kw
+        keys = (('requires_dist', 'run_requires'),
+                ('setup_requires_dist', 'build_requires'))
+        for ok, nk in keys:
+            if ok in lmd and lmd[ok]:
+                result[nk] = [{'requires': lmd[ok]}]
+        result['provides'] = self.provides
+        author = {}
+        maintainer = {}
+        return result
+
+    LEGACY_MAPPING = {
+        'name': 'Name',
+        'version': 'Version',
+        ('extensions', 'python.details', 'license'): 'License',
+        'summary': 'Summary',
+        'description': 'Description',
+        ('extensions', 'python.project', 'project_urls', 'Home'): 'Home-page',
+        ('extensions', 'python.project', 'contacts', 0, 'name'): 'Author',
+        ('extensions', 'python.project', 'contacts', 0, 'email'): 'Author-email',
+        'source_url': 'Download-URL',
+        ('extensions', 'python.details', 'classifiers'): 'Classifier',
+    }
+
+    def _to_legacy(self):
+        def process_entries(entries):
+            reqts = set()
+            for e in entries:
+                extra = e.get('extra')
+                env = e.get('environment')
+                rlist = e['requires']
+                for r in rlist:
+                    if not env and not extra:
+                        reqts.add(r)
+                    else:
+                        marker = ''
+                        if extra:
+                            marker = 'extra == "%s"' % extra
+                        if env:
+                            if marker:
+                                marker = '(%s) and %s' % (env, marker)
+                            else:
+                                marker = env
+                        reqts.add(';'.join((r, marker)))
+            return reqts
+
+        assert self._data and not self._legacy
+        result = LegacyMetadata()
+        nmd = self._data
+        # import pdb; pdb.set_trace()
+        for nk, ok in self.LEGACY_MAPPING.items():
+            if not isinstance(nk, tuple):
+                if nk in nmd:
+                    result[ok] = nmd[nk]
+            else:
+                d = nmd
+                found = True
+                for k in nk:
+                    try:
+                        d = d[k]
+                    except (KeyError, IndexError):
+                        found = False
+                        break
+                if found:
+                    result[ok] = d
+        r1 = process_entries(self.run_requires + self.meta_requires)
+        r2 = process_entries(self.build_requires + self.dev_requires)
+        if self.extras:
+            result['Provides-Extra'] = sorted(self.extras)
+        result['Requires-Dist'] = sorted(r1)
+        result['Setup-Requires-Dist'] = sorted(r2)
+        # TODO: any other fields wanted
+        return result
+
+    def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True):
+        if [path, fileobj].count(None) != 1:
+            raise ValueError('Exactly one of path and fileobj is needed')
+        self.validate()
+        if legacy:
+            if self._legacy:
+                legacy_md = self._legacy
+            else:
+                legacy_md = self._to_legacy()
+            if path:
+                legacy_md.write(path, skip_unknown=skip_unknown)
+            else:
+                legacy_md.write_file(fileobj, skip_unknown=skip_unknown)
+        else:
+            if self._legacy:
+                d = self._from_legacy()
+            else:
+                d = self._data
+            if fileobj:
+                json.dump(d, fileobj, ensure_ascii=True, indent=2,
+                          sort_keys=True)
+            else:
+                with codecs.open(path, 'w', 'utf-8') as f:
+                    json.dump(d, f, ensure_ascii=True, indent=2,
+                              sort_keys=True)
+
+    def add_requirements(self, requirements):
+        if self._legacy:
+            self._legacy.add_requirements(requirements)
+        else:
+            run_requires = self._data.setdefault('run_requires', [])
+            always = None
+            for entry in run_requires:
+                if 'environment' not in entry and 'extra' not in entry:
+                    always = entry
+                    break
+            if always is None:
+                always = { 'requires': requirements }
+                run_requires.insert(0, always)
+            else:
+                rset = set(always['requires']) | set(requirements)
+                always['requires'] = sorted(rset)
+
+    def __repr__(self):
+        name = self.name or '(no name)'
+        version = self.version or 'no version'
+        return '<%s %s %s (%s)>' % (self.__class__.__name__,
+                                    self.metadata_version, name, version)
diff --git a/venv/lib/python3.8/site-packages/distlib/resources.py b/venv/lib/python3.8/site-packages/distlib/resources.py
new file mode 100644
index 0000000..fef52aa
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/distlib/resources.py
@@ -0,0 +1,358 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2013-2017 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+from __future__ import unicode_literals
+
+import bisect
+import io
+import logging
+import os
+import pkgutil
+import sys
+import types
+import zipimport
+
+from . import DistlibException
+from .util import cached_property, get_cache_base, Cache
+
+logger = logging.getLogger(__name__)
+
+
+cache = None    # created when needed
+
+
+class ResourceCache(Cache):
+    def __init__(self, base=None):
+        if base is None:
+            # Use native string to avoid issues on 2.x: see Python #20140.
+            base = os.path.join(get_cache_base(), str('resource-cache'))
+        super(ResourceCache, self).__init__(base)
+
+    def is_stale(self, resource, path):
+        """
+        Is the cache stale for the given resource?
+
+        :param resource: The :class:`Resource` being cached.
+        :param path: The path of the resource in the cache.
+        :return: True if the cache is stale.
+        """
+        # Cache invalidation is a hard problem :-)
+        return True
+
+    def get(self, resource):
+        """
+        Get a resource into the cache,
+
+        :param resource: A :class:`Resource` instance.
+        :return: The pathname of the resource in the cache.
+        """
+        prefix, path = resource.finder.get_cache_info(resource)
+        if prefix is None:
+            result = path
+        else:
+            result = os.path.join(self.base, self.prefix_to_dir(prefix), path)
+            dirname = os.path.dirname(result)
+            if not os.path.isdir(dirname):
+                os.makedirs(dirname)
+            if not os.path.exists(result):
+                stale = True
+            else:
+                stale = self.is_stale(resource, path)
+            if stale:
+                # write the bytes of the resource to the cache location
+                with open(result, 'wb') as f:
+                    f.write(resource.bytes)
+        return result
+
+
+class ResourceBase(object):
+    def __init__(self, finder, name):
+        self.finder = finder
+        self.name = name
+
+
+class Resource(ResourceBase):
+    """
+    A class representing an in-package resource, such as a data file. This is
+    not normally instantiated by user code, but rather by a
+    :class:`ResourceFinder` which manages the resource.
+    """
+    is_container = False        # Backwards compatibility
+
+    def as_stream(self):
+        """
+        Get the resource as a stream.
+
+        This is not a property to make it obvious that it returns a new stream
+        each time.
+        """
+        return self.finder.get_stream(self)
+
+    @cached_property
+    def file_path(self):
+        global cache
+        if cache is None:
+            cache = ResourceCache()
+        return cache.get(self)
+
+    @cached_property
+    def bytes(self):
+        return self.finder.get_bytes(self)
+
+    @cached_property
+    def size(self):
+        return self.finder.get_size(self)
+
+
+class ResourceContainer(ResourceBase):
+    is_container = True     # Backwards compatibility
+
+    @cached_property
+    def resources(self):
+        return self.finder.get_resources(self)
+
+
+class ResourceFinder(object):
+    """
+    Resource finder for file system resources.
+    """
+
+    if sys.platform.startswith('java'):
+        skipped_extensions = ('.pyc', '.pyo', '.class')
+    else:
+        skipped_extensions = ('.pyc', '.pyo')
+
+    def __init__(self, module):
+        self.module = module
+        self.loader = getattr(module, '__loader__', None)
+        self.base = os.path.dirname(getattr(module, '__file__', ''))
+
+    def _adjust_path(self, path):
+        return os.path.realpath(path)
+
+    def _make_path(self, resource_name):
+        # Issue #50: need to preserve type of path on Python 2.x
+        # like os.path._get_sep
+        if isinstance(resource_name, bytes):    # should only happen on 2.x
+            sep = b'/'
+        else:
+            sep = '/'
+        parts = resource_name.split(sep)
+        parts.insert(0, self.base)
+        result = os.path.join(*parts)
+        return self._adjust_path(result)
+
+    def _find(self, path):
+        return os.path.exists(path)
+
+    def get_cache_info(self, resource):
+        return None, resource.path
+
+    def find(self, resource_name):
+        path = self._make_path(resource_name)
+        if not self._find(path):
+            result = None
+        else:
+            if self._is_directory(path):
+                result = ResourceContainer(self, resource_name)
+            else:
+                result = Resource(self, resource_name)
+            result.path = path
+        return result
+
+    def get_stream(self, resource):
+        return open(resource.path, 'rb')
+
+    def get_bytes(self, resource):
+        with open(resource.path, 'rb') as f:
+            return f.read()
+
+    def get_size(self, resource):
+        return os.path.getsize(resource.path)
+
+    def get_resources(self, resource):
+        def allowed(f):
+            return (f != '__pycache__' and not
+                    f.endswith(self.skipped_extensions))
+        return set([f for f in os.listdir(resource.path) if allowed(f)])
+
+    def is_container(self, resource):
+        return self._is_directory(resource.path)
+
+    _is_directory = staticmethod(os.path.isdir)
+
+    def iterator(self, resource_name):
+        resource = self.find(resource_name)
+        if resource is not None:
+            todo = [resource]
+            while todo:
+                resource = todo.pop(0)
+                yield resource
+                if resource.is_container:
+                    rname = resource.name
+                    for name in resource.resources:
+                        if not rname:
+                            new_name = name
+                        else:
+                            new_name = '/'.join([rname, name])
+                        child = self.find(new_name)
+                        if child.is_container:
+                            todo.append(child)
+                        else:
+                            yield child
+
+
+class ZipResourceFinder(ResourceFinder):
+    """
+    Resource finder for resources in .zip files.
+    """
+    def __init__(self, module):
+        super(ZipResourceFinder, self).__init__(module)
+        archive = self.loader.archive
+        self.prefix_len = 1 + len(archive)
+        # PyPy doesn't have a _files attr on zipimporter, and you can't set one
+        if hasattr(self.loader, '_files'):
+            self._files = self.loader._files
+        else:
+            self._files = zipimport._zip_directory_cache[archive]
+        self.index = sorted(self._files)
+
+    def _adjust_path(self, path):
+        return path
+
+    def _find(self, path):
+        path = path[self.prefix_len:]
+        if path in self._files:
+            result = True
+        else:
+            if path and path[-1] != os.sep:
+                path = path + os.sep
+            i = bisect.bisect(self.index, path)
+            try:
+                result = self.index[i].startswith(path)
+            except IndexError:
+                result = False
+        if not result:
+            logger.debug('_find failed: %r %r', path, self.loader.prefix)
+        else:
+            logger.debug('_find worked: %r %r', path, self.loader.prefix)
+        return result
+
+    def get_cache_info(self, resource):
+        prefix = self.loader.archive
+        path = resource.path[1 + len(prefix):]
+        return prefix, path
+
+    def get_bytes(self, resource):
+        return self.loader.get_data(resource.path)
+
+    def get_stream(self, resource):
+        return io.BytesIO(self.get_bytes(resource))
+
+    def get_size(self, resource):
+        path = resource.path[self.prefix_len:]
+        return self._files[path][3]
+
+    def get_resources(self, resource):
+        path = resource.path[self.prefix_len:]
+        if path and path[-1] != os.sep:
+            path += os.sep
+        plen = len(path)
+        result = set()
+        i = bisect.bisect(self.index, path)
+        while i < len(self.index):
+            if not self.index[i].startswith(path):
+                break
+            s = self.index[i][plen:]
+            result.add(s.split(os.sep, 1)[0])   # only immediate children
+            i += 1
+        return result
+
+    def _is_directory(self, path):
+        path = path[self.prefix_len:]
+        if path and path[-1] != os.sep:
+            path += os.sep
+        i = bisect.bisect(self.index, path)
+        try:
+            result = self.index[i].startswith(path)
+        except IndexError:
+            result = False
+        return result
+
+
+_finder_registry = {
+    type(None): ResourceFinder,
+    zipimport.zipimporter: ZipResourceFinder
+}
+
+try:
+    # In Python 3.6, _frozen_importlib -> _frozen_importlib_external
+    try:
+        import _frozen_importlib_external as _fi
+    except ImportError:
+        import _frozen_importlib as _fi
+    _finder_registry[_fi.SourceFileLoader] = ResourceFinder
+    _finder_registry[_fi.FileFinder] = ResourceFinder
+    # See issue #146
+    _finder_registry[_fi.SourcelessFileLoader] = ResourceFinder
+    del _fi
+except (ImportError, AttributeError):
+    pass
+
+
+def register_finder(loader, finder_maker):
+    _finder_registry[type(loader)] = finder_maker
+
+
+_finder_cache = {}
+
+
+def finder(package):
+    """
+    Return a resource finder for a package.
+    :param package: The name of the package.
+    :return: A :class:`ResourceFinder` instance for the package.
+    """
+    if package in _finder_cache:
+        result = _finder_cache[package]
+    else:
+        if package not in sys.modules:
+            __import__(package)
+        module = sys.modules[package]
+        path = getattr(module, '__path__', None)
+        if path is None:
+            raise DistlibException('You cannot get a finder for a module, '
+                                   'only for a package')
+        loader = getattr(module, '__loader__', None)
+        finder_maker = _finder_registry.get(type(loader))
+        if finder_maker is None:
+            raise DistlibException('Unable to locate finder for %r' % package)
+        result = finder_maker(module)
+        _finder_cache[package] = result
+    return result
+
+
+_dummy_module = types.ModuleType(str('__dummy__'))
+
+
+def finder_for_path(path):
+    """
+    Return a resource finder for a path, which should represent a container.
+
+    :param path: The path.
+    :return: A :class:`ResourceFinder` instance for the path.
+    """
+    result = None
+    # calls any path hooks, gets importer into cache
+    pkgutil.get_importer(path)
+    loader = sys.path_importer_cache.get(path)
+    finder = _finder_registry.get(type(loader))
+    if finder:
+        module = _dummy_module
+        module.__file__ = os.path.join(path, '')
+        module.__loader__ = loader
+        result = finder(module)
+    return result
diff --git a/venv/lib/python3.8/site-packages/distlib/scripts.py b/venv/lib/python3.8/site-packages/distlib/scripts.py
new file mode 100644
index 0000000..d270624
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/distlib/scripts.py
@@ -0,0 +1,437 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2013-2015 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+from io import BytesIO
+import logging
+import os
+import re
+import struct
+import sys
+import time
+from zipfile import ZipInfo
+
+from .compat import sysconfig, detect_encoding, ZipFile
+from .resources import finder
+from .util import (FileOperator, get_export_entry, convert_path,
+                   get_executable, get_platform, in_venv)
+
+logger = logging.getLogger(__name__)
+
+_DEFAULT_MANIFEST = '''
+
+
+ 
+
+ 
+ 
+ 
+ 
+ 
+ 
+ 
+ 
+'''.strip()
+
+# check if Python is called on the first line with this expression
+FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$')
+SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*-
+import re
+import sys
+from %(module)s import %(import_name)s
+if __name__ == '__main__':
+    sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+    sys.exit(%(func)s())
+'''
+
+
+def enquote_executable(executable):
+    if ' ' in executable:
+        # make sure we quote only the executable in case of env
+        # for example /usr/bin/env "/dir with spaces/bin/jython"
+        # instead of "/usr/bin/env /dir with spaces/bin/jython"
+        # otherwise whole
+        if executable.startswith('/usr/bin/env '):
+            env, _executable = executable.split(' ', 1)
+            if ' ' in _executable and not _executable.startswith('"'):
+                executable = '%s "%s"' % (env, _executable)
+        else:
+            if not executable.startswith('"'):
+                executable = '"%s"' % executable
+    return executable
+
+# Keep the old name around (for now), as there is at least one project using it!
+_enquote_executable = enquote_executable
+
+class ScriptMaker(object):
+    """
+    A class to copy or create scripts from source scripts or callable
+    specifications.
+    """
+    script_template = SCRIPT_TEMPLATE
+
+    executable = None  # for shebangs
+
+    def __init__(self, source_dir, target_dir, add_launchers=True,
+                 dry_run=False, fileop=None):
+        self.source_dir = source_dir
+        self.target_dir = target_dir
+        self.add_launchers = add_launchers
+        self.force = False
+        self.clobber = False
+        # It only makes sense to set mode bits on POSIX.
+        self.set_mode = (os.name == 'posix') or (os.name == 'java' and
+                                                 os._name == 'posix')
+        self.variants = set(('', 'X.Y'))
+        self._fileop = fileop or FileOperator(dry_run)
+
+        self._is_nt = os.name == 'nt' or (
+            os.name == 'java' and os._name == 'nt')
+        self.version_info = sys.version_info
+
+    def _get_alternate_executable(self, executable, options):
+        if options.get('gui', False) and self._is_nt:  # pragma: no cover
+            dn, fn = os.path.split(executable)
+            fn = fn.replace('python', 'pythonw')
+            executable = os.path.join(dn, fn)
+        return executable
+
+    if sys.platform.startswith('java'):  # pragma: no cover
+        def _is_shell(self, executable):
+            """
+            Determine if the specified executable is a script
+            (contains a #! line)
+            """
+            try:
+                with open(executable) as fp:
+                    return fp.read(2) == '#!'
+            except (OSError, IOError):
+                logger.warning('Failed to open %s', executable)
+                return False
+
+        def _fix_jython_executable(self, executable):
+            if self._is_shell(executable):
+                # Workaround for Jython is not needed on Linux systems.
+                import java
+
+                if java.lang.System.getProperty('os.name') == 'Linux':
+                    return executable
+            elif executable.lower().endswith('jython.exe'):
+                # Use wrapper exe for Jython on Windows
+                return executable
+            return '/usr/bin/env %s' % executable
+
+    def _build_shebang(self, executable, post_interp):
+        """
+        Build a shebang line. In the simple case (on Windows, or a shebang line
+        which is not too long or contains spaces) use a simple formulation for
+        the shebang. Otherwise, use /bin/sh as the executable, with a contrived
+        shebang which allows the script to run either under Python or sh, using
+        suitable quoting. Thanks to Harald Nordgren for his input.
+
+        See also: http://www.in-ulm.de/~mascheck/various/shebang/#length
+                  https://hg.mozilla.org/mozilla-central/file/tip/mach
+        """
+        if os.name != 'posix':
+            simple_shebang = True
+        else:
+            # Add 3 for '#!' prefix and newline suffix.
+            shebang_length = len(executable) + len(post_interp) + 3
+            if sys.platform == 'darwin':
+                max_shebang_length = 512
+            else:
+                max_shebang_length = 127
+            simple_shebang = ((b' ' not in executable) and
+                              (shebang_length <= max_shebang_length))
+
+        if simple_shebang:
+            result = b'#!' + executable + post_interp + b'\n'
+        else:
+            result = b'#!/bin/sh\n'
+            result += b"'''exec' " + executable + post_interp + b' "$0" "$@"\n'
+            result += b"' '''"
+        return result
+
+    def _get_shebang(self, encoding, post_interp=b'', options=None):
+        enquote = True
+        if self.executable:
+            executable = self.executable
+            enquote = False     # assume this will be taken care of
+        elif not sysconfig.is_python_build():
+            executable = get_executable()
+        elif in_venv():  # pragma: no cover
+            executable = os.path.join(sysconfig.get_path('scripts'),
+                            'python%s' % sysconfig.get_config_var('EXE'))
+        else:  # pragma: no cover
+            executable = os.path.join(
+                sysconfig.get_config_var('BINDIR'),
+               'python%s%s' % (sysconfig.get_config_var('VERSION'),
+                               sysconfig.get_config_var('EXE')))
+            if not os.path.isfile(executable):
+                # for Python builds from source on Windows, no Python executables with
+                # a version suffix are created, so we use python.exe
+                executable = os.path.join(sysconfig.get_config_var('BINDIR'),
+                                'python%s' % (sysconfig.get_config_var('EXE')))
+        if options:
+            executable = self._get_alternate_executable(executable, options)
+
+        if sys.platform.startswith('java'):  # pragma: no cover
+            executable = self._fix_jython_executable(executable)
+
+        # Normalise case for Windows - COMMENTED OUT
+        # executable = os.path.normcase(executable)
+        # N.B. The normalising operation above has been commented out: See
+        # issue #124. Although paths in Windows are generally case-insensitive,
+        # they aren't always. For example, a path containing a ẞ (which is a
+        # LATIN CAPITAL LETTER SHARP S - U+1E9E) is normcased to ß (which is a
+        # LATIN SMALL LETTER SHARP S' - U+00DF). The two are not considered by
+        # Windows as equivalent in path names.
+
+        # If the user didn't specify an executable, it may be necessary to
+        # cater for executable paths with spaces (not uncommon on Windows)
+        if enquote:
+            executable = enquote_executable(executable)
+        # Issue #51: don't use fsencode, since we later try to
+        # check that the shebang is decodable using utf-8.
+        executable = executable.encode('utf-8')
+        # in case of IronPython, play safe and enable frames support
+        if (sys.platform == 'cli' and '-X:Frames' not in post_interp
+            and '-X:FullFrames' not in post_interp):  # pragma: no cover
+            post_interp += b' -X:Frames'
+        shebang = self._build_shebang(executable, post_interp)
+        # Python parser starts to read a script using UTF-8 until
+        # it gets a #coding:xxx cookie. The shebang has to be the
+        # first line of a file, the #coding:xxx cookie cannot be
+        # written before. So the shebang has to be decodable from
+        # UTF-8.
+        try:
+            shebang.decode('utf-8')
+        except UnicodeDecodeError:  # pragma: no cover
+            raise ValueError(
+                'The shebang (%r) is not decodable from utf-8' % shebang)
+        # If the script is encoded to a custom encoding (use a
+        # #coding:xxx cookie), the shebang has to be decodable from
+        # the script encoding too.
+        if encoding != 'utf-8':
+            try:
+                shebang.decode(encoding)
+            except UnicodeDecodeError:  # pragma: no cover
+                raise ValueError(
+                    'The shebang (%r) is not decodable '
+                    'from the script encoding (%r)' % (shebang, encoding))
+        return shebang
+
+    def _get_script_text(self, entry):
+        return self.script_template % dict(module=entry.prefix,
+                                           import_name=entry.suffix.split('.')[0],
+                                           func=entry.suffix)
+
+    manifest = _DEFAULT_MANIFEST
+
+    def get_manifest(self, exename):
+        base = os.path.basename(exename)
+        return self.manifest % base
+
+    def _write_script(self, names, shebang, script_bytes, filenames, ext):
+        use_launcher = self.add_launchers and self._is_nt
+        linesep = os.linesep.encode('utf-8')
+        if not shebang.endswith(linesep):
+            shebang += linesep
+        if not use_launcher:
+            script_bytes = shebang + script_bytes
+        else:  # pragma: no cover
+            if ext == 'py':
+                launcher = self._get_launcher('t')
+            else:
+                launcher = self._get_launcher('w')
+            stream = BytesIO()
+            with ZipFile(stream, 'w') as zf:
+                source_date_epoch = os.environ.get('SOURCE_DATE_EPOCH')
+                if source_date_epoch:
+                    date_time = time.gmtime(int(source_date_epoch))[:6]
+                    zinfo = ZipInfo(filename='__main__.py', date_time=date_time)
+                    zf.writestr(zinfo, script_bytes)
+                else:
+                    zf.writestr('__main__.py', script_bytes)
+            zip_data = stream.getvalue()
+            script_bytes = launcher + shebang + zip_data
+        for name in names:
+            outname = os.path.join(self.target_dir, name)
+            if use_launcher:  # pragma: no cover
+                n, e = os.path.splitext(outname)
+                if e.startswith('.py'):
+                    outname = n
+                outname = '%s.exe' % outname
+                try:
+                    self._fileop.write_binary_file(outname, script_bytes)
+                except Exception:
+                    # Failed writing an executable - it might be in use.
+                    logger.warning('Failed to write executable - trying to '
+                                   'use .deleteme logic')
+                    dfname = '%s.deleteme' % outname
+                    if os.path.exists(dfname):
+                        os.remove(dfname)       # Not allowed to fail here
+                    os.rename(outname, dfname)  # nor here
+                    self._fileop.write_binary_file(outname, script_bytes)
+                    logger.debug('Able to replace executable using '
+                                 '.deleteme logic')
+                    try:
+                        os.remove(dfname)
+                    except Exception:
+                        pass    # still in use - ignore error
+            else:
+                if self._is_nt and not outname.endswith('.' + ext):  # pragma: no cover
+                    outname = '%s.%s' % (outname, ext)
+                if os.path.exists(outname) and not self.clobber:
+                    logger.warning('Skipping existing file %s', outname)
+                    continue
+                self._fileop.write_binary_file(outname, script_bytes)
+                if self.set_mode:
+                    self._fileop.set_executable_mode([outname])
+            filenames.append(outname)
+
+    variant_separator = '-'
+
+    def get_script_filenames(self, name):
+        result = set()
+        if '' in self.variants:
+            result.add(name)
+        if 'X' in self.variants:
+            result.add('%s%s' % (name, self.version_info[0]))
+        if 'X.Y' in self.variants:
+            result.add('%s%s%s.%s' % (name, self.variant_separator,
+                                      self.version_info[0], self.version_info[1]))
+        return result
+
+    def _make_script(self, entry, filenames, options=None):
+        post_interp = b''
+        if options:
+            args = options.get('interpreter_args', [])
+            if args:
+                args = ' %s' % ' '.join(args)
+                post_interp = args.encode('utf-8')
+        shebang = self._get_shebang('utf-8', post_interp, options=options)
+        script = self._get_script_text(entry).encode('utf-8')
+        scriptnames = self.get_script_filenames(entry.name)
+        if options and options.get('gui', False):
+            ext = 'pyw'
+        else:
+            ext = 'py'
+        self._write_script(scriptnames, shebang, script, filenames, ext)
+
+    def _copy_script(self, script, filenames):
+        adjust = False
+        script = os.path.join(self.source_dir, convert_path(script))
+        outname = os.path.join(self.target_dir, os.path.basename(script))
+        if not self.force and not self._fileop.newer(script, outname):
+            logger.debug('not copying %s (up-to-date)', script)
+            return
+
+        # Always open the file, but ignore failures in dry-run mode --
+        # that way, we'll get accurate feedback if we can read the
+        # script.
+        try:
+            f = open(script, 'rb')
+        except IOError:  # pragma: no cover
+            if not self.dry_run:
+                raise
+            f = None
+        else:
+            first_line = f.readline()
+            if not first_line:  # pragma: no cover
+                logger.warning('%s is an empty file (skipping)', script)
+                return
+
+            match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n'))
+            if match:
+                adjust = True
+                post_interp = match.group(1) or b''
+
+        if not adjust:
+            if f:
+                f.close()
+            self._fileop.copy_file(script, outname)
+            if self.set_mode:
+                self._fileop.set_executable_mode([outname])
+            filenames.append(outname)
+        else:
+            logger.info('copying and adjusting %s -> %s', script,
+                        self.target_dir)
+            if not self._fileop.dry_run:
+                encoding, lines = detect_encoding(f.readline)
+                f.seek(0)
+                shebang = self._get_shebang(encoding, post_interp)
+                if b'pythonw' in first_line:  # pragma: no cover
+                    ext = 'pyw'
+                else:
+                    ext = 'py'
+                n = os.path.basename(outname)
+                self._write_script([n], shebang, f.read(), filenames, ext)
+            if f:
+                f.close()
+
+    @property
+    def dry_run(self):
+        return self._fileop.dry_run
+
+    @dry_run.setter
+    def dry_run(self, value):
+        self._fileop.dry_run = value
+
+    if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'):  # pragma: no cover
+        # Executable launcher support.
+        # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/
+
+        def _get_launcher(self, kind):
+            if struct.calcsize('P') == 8:   # 64-bit
+                bits = '64'
+            else:
+                bits = '32'
+            platform_suffix = '-arm' if get_platform() == 'win-arm64' else ''
+            name = '%s%s%s.exe' % (kind, bits, platform_suffix)
+            # Issue 31: don't hardcode an absolute package name, but
+            # determine it relative to the current package
+            distlib_package = __name__.rsplit('.', 1)[0]
+            resource = finder(distlib_package).find(name)
+            if not resource:
+                msg = ('Unable to find resource %s in package %s' % (name,
+                       distlib_package))
+                raise ValueError(msg)
+            return resource.bytes
+
+    # Public API follows
+
+    def make(self, specification, options=None):
+        """
+        Make a script.
+
+        :param specification: The specification, which is either a valid export
+                              entry specification (to make a script from a
+                              callable) or a filename (to make a script by
+                              copying from a source location).
+        :param options: A dictionary of options controlling script generation.
+        :return: A list of all absolute pathnames written to.
+        """
+        filenames = []
+        entry = get_export_entry(specification)
+        if entry is None:
+            self._copy_script(specification, filenames)
+        else:
+            self._make_script(entry, filenames, options=options)
+        return filenames
+
+    def make_multiple(self, specifications, options=None):
+        """
+        Take a list of specifications and make scripts from them,
+        :param specifications: A list of specifications.
+        :return: A list of all absolute pathnames written to,
+        """
+        filenames = []
+        for specification in specifications:
+            filenames.extend(self.make(specification, options))
+        return filenames
diff --git a/venv/lib/python3.8/site-packages/distlib/t32.exe b/venv/lib/python3.8/site-packages/distlib/t32.exe
new file mode 100644
index 0000000..52154f0
Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/t32.exe differ
diff --git a/venv/lib/python3.8/site-packages/distlib/t64-arm.exe b/venv/lib/python3.8/site-packages/distlib/t64-arm.exe
new file mode 100644
index 0000000..e1ab8f8
Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/t64-arm.exe differ
diff --git a/venv/lib/python3.8/site-packages/distlib/t64.exe b/venv/lib/python3.8/site-packages/distlib/t64.exe
new file mode 100644
index 0000000..e8bebdb
Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/t64.exe differ
diff --git a/venv/lib/python3.8/site-packages/distlib/util.py b/venv/lib/python3.8/site-packages/distlib/util.py
new file mode 100644
index 0000000..dd01849
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/distlib/util.py
@@ -0,0 +1,1932 @@
+#
+# Copyright (C) 2012-2021 The Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+import codecs
+from collections import deque
+import contextlib
+import csv
+from glob import iglob as std_iglob
+import io
+import json
+import logging
+import os
+import py_compile
+import re
+import socket
+try:
+    import ssl
+except ImportError:  # pragma: no cover
+    ssl = None
+import subprocess
+import sys
+import tarfile
+import tempfile
+import textwrap
+
+try:
+    import threading
+except ImportError:  # pragma: no cover
+    import dummy_threading as threading
+import time
+
+from . import DistlibException
+from .compat import (string_types, text_type, shutil, raw_input, StringIO,
+                     cache_from_source, urlopen, urljoin, httplib, xmlrpclib,
+                     splittype, HTTPHandler, BaseConfigurator, valid_ident,
+                     Container, configparser, URLError, ZipFile, fsdecode,
+                     unquote, urlparse)
+
+logger = logging.getLogger(__name__)
+
+#
+# Requirement parsing code as per PEP 508
+#
+
+IDENTIFIER = re.compile(r'^([\w\.-]+)\s*')
+VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*')
+COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*')
+MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*')
+OR = re.compile(r'^or\b\s*')
+AND = re.compile(r'^and\b\s*')
+NON_SPACE = re.compile(r'(\S+)\s*')
+STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)')
+
+
+def parse_marker(marker_string):
+    """
+    Parse a marker string and return a dictionary containing a marker expression.
+
+    The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in
+    the expression grammar, or strings. A string contained in quotes is to be
+    interpreted as a literal string, and a string not contained in quotes is a
+    variable (such as os_name).
+    """
+    def marker_var(remaining):
+        # either identifier, or literal string
+        m = IDENTIFIER.match(remaining)
+        if m:
+            result = m.groups()[0]
+            remaining = remaining[m.end():]
+        elif not remaining:
+            raise SyntaxError('unexpected end of input')
+        else:
+            q = remaining[0]
+            if q not in '\'"':
+                raise SyntaxError('invalid expression: %s' % remaining)
+            oq = '\'"'.replace(q, '')
+            remaining = remaining[1:]
+            parts = [q]
+            while remaining:
+                # either a string chunk, or oq, or q to terminate
+                if remaining[0] == q:
+                    break
+                elif remaining[0] == oq:
+                    parts.append(oq)
+                    remaining = remaining[1:]
+                else:
+                    m = STRING_CHUNK.match(remaining)
+                    if not m:
+                        raise SyntaxError('error in string literal: %s' % remaining)
+                    parts.append(m.groups()[0])
+                    remaining = remaining[m.end():]
+            else:
+                s = ''.join(parts)
+                raise SyntaxError('unterminated string: %s' % s)
+            parts.append(q)
+            result = ''.join(parts)
+            remaining = remaining[1:].lstrip() # skip past closing quote
+        return result, remaining
+
+    def marker_expr(remaining):
+        if remaining and remaining[0] == '(':
+            result, remaining = marker(remaining[1:].lstrip())
+            if remaining[0] != ')':
+                raise SyntaxError('unterminated parenthesis: %s' % remaining)
+            remaining = remaining[1:].lstrip()
+        else:
+            lhs, remaining = marker_var(remaining)
+            while remaining:
+                m = MARKER_OP.match(remaining)
+                if not m:
+                    break
+                op = m.groups()[0]
+                remaining = remaining[m.end():]
+                rhs, remaining = marker_var(remaining)
+                lhs = {'op': op, 'lhs': lhs, 'rhs': rhs}
+            result = lhs
+        return result, remaining
+
+    def marker_and(remaining):
+        lhs, remaining = marker_expr(remaining)
+        while remaining:
+            m = AND.match(remaining)
+            if not m:
+                break
+            remaining = remaining[m.end():]
+            rhs, remaining = marker_expr(remaining)
+            lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs}
+        return lhs, remaining
+
+    def marker(remaining):
+        lhs, remaining = marker_and(remaining)
+        while remaining:
+            m = OR.match(remaining)
+            if not m:
+                break
+            remaining = remaining[m.end():]
+            rhs, remaining = marker_and(remaining)
+            lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs}
+        return lhs, remaining
+
+    return marker(marker_string)
+
+
+def parse_requirement(req):
+    """
+    Parse a requirement passed in as a string. Return a Container
+    whose attributes contain the various parts of the requirement.
+    """
+    remaining = req.strip()
+    if not remaining or remaining.startswith('#'):
+        return None
+    m = IDENTIFIER.match(remaining)
+    if not m:
+        raise SyntaxError('name expected: %s' % remaining)
+    distname = m.groups()[0]
+    remaining = remaining[m.end():]
+    extras = mark_expr = versions = uri = None
+    if remaining and remaining[0] == '[':
+        i = remaining.find(']', 1)
+        if i < 0:
+            raise SyntaxError('unterminated extra: %s' % remaining)
+        s = remaining[1:i]
+        remaining = remaining[i + 1:].lstrip()
+        extras = []
+        while s:
+            m = IDENTIFIER.match(s)
+            if not m:
+                raise SyntaxError('malformed extra: %s' % s)
+            extras.append(m.groups()[0])
+            s = s[m.end():]
+            if not s:
+                break
+            if s[0] != ',':
+                raise SyntaxError('comma expected in extras: %s' % s)
+            s = s[1:].lstrip()
+        if not extras:
+            extras = None
+    if remaining:
+        if remaining[0] == '@':
+            # it's a URI
+            remaining = remaining[1:].lstrip()
+            m = NON_SPACE.match(remaining)
+            if not m:
+                raise SyntaxError('invalid URI: %s' % remaining)
+            uri = m.groups()[0]
+            t = urlparse(uri)
+            # there are issues with Python and URL parsing, so this test
+            # is a bit crude. See bpo-20271, bpo-23505. Python doesn't
+            # always parse invalid URLs correctly - it should raise
+            # exceptions for malformed URLs
+            if not (t.scheme and t.netloc):
+                raise SyntaxError('Invalid URL: %s' % uri)
+            remaining = remaining[m.end():].lstrip()
+        else:
+
+            def get_versions(ver_remaining):
+                """
+                Return a list of operator, version tuples if any are
+                specified, else None.
+                """
+                m = COMPARE_OP.match(ver_remaining)
+                versions = None
+                if m:
+                    versions = []
+                    while True:
+                        op = m.groups()[0]
+                        ver_remaining = ver_remaining[m.end():]
+                        m = VERSION_IDENTIFIER.match(ver_remaining)
+                        if not m:
+                            raise SyntaxError('invalid version: %s' % ver_remaining)
+                        v = m.groups()[0]
+                        versions.append((op, v))
+                        ver_remaining = ver_remaining[m.end():]
+                        if not ver_remaining or ver_remaining[0] != ',':
+                            break
+                        ver_remaining = ver_remaining[1:].lstrip()
+                        # Some packages have a trailing comma which would break things
+                        # See issue #148
+                        if not ver_remaining:
+                            break
+                        m = COMPARE_OP.match(ver_remaining)
+                        if not m:
+                            raise SyntaxError('invalid constraint: %s' % ver_remaining)
+                    if not versions:
+                        versions = None
+                return versions, ver_remaining
+
+            if remaining[0] != '(':
+                versions, remaining = get_versions(remaining)
+            else:
+                i = remaining.find(')', 1)
+                if i < 0:
+                    raise SyntaxError('unterminated parenthesis: %s' % remaining)
+                s = remaining[1:i]
+                remaining = remaining[i + 1:].lstrip()
+                # As a special diversion from PEP 508, allow a version number
+                # a.b.c in parentheses as a synonym for ~= a.b.c (because this
+                # is allowed in earlier PEPs)
+                if COMPARE_OP.match(s):
+                    versions, _ = get_versions(s)
+                else:
+                    m = VERSION_IDENTIFIER.match(s)
+                    if not m:
+                        raise SyntaxError('invalid constraint: %s' % s)
+                    v = m.groups()[0]
+                    s = s[m.end():].lstrip()
+                    if s:
+                        raise SyntaxError('invalid constraint: %s' % s)
+                    versions = [('~=', v)]
+
+    if remaining:
+        if remaining[0] != ';':
+            raise SyntaxError('invalid requirement: %s' % remaining)
+        remaining = remaining[1:].lstrip()
+
+        mark_expr, remaining = parse_marker(remaining)
+
+    if remaining and remaining[0] != '#':
+        raise SyntaxError('unexpected trailing data: %s' % remaining)
+
+    if not versions:
+        rs = distname
+    else:
+        rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions]))
+    return Container(name=distname, extras=extras, constraints=versions,
+                     marker=mark_expr, url=uri, requirement=rs)
+
+
+def get_resources_dests(resources_root, rules):
+    """Find destinations for resources files"""
+
+    def get_rel_path(root, path):
+        # normalizes and returns a lstripped-/-separated path
+        root = root.replace(os.path.sep, '/')
+        path = path.replace(os.path.sep, '/')
+        assert path.startswith(root)
+        return path[len(root):].lstrip('/')
+
+    destinations = {}
+    for base, suffix, dest in rules:
+        prefix = os.path.join(resources_root, base)
+        for abs_base in iglob(prefix):
+            abs_glob = os.path.join(abs_base, suffix)
+            for abs_path in iglob(abs_glob):
+                resource_file = get_rel_path(resources_root, abs_path)
+                if dest is None:  # remove the entry if it was here
+                    destinations.pop(resource_file, None)
+                else:
+                    rel_path = get_rel_path(abs_base, abs_path)
+                    rel_dest = dest.replace(os.path.sep, '/').rstrip('/')
+                    destinations[resource_file] = rel_dest + '/' + rel_path
+    return destinations
+
+
+def in_venv():
+    if hasattr(sys, 'real_prefix'):
+        # virtualenv venvs
+        result = True
+    else:
+        # PEP 405 venvs
+        result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix)
+    return result
+
+
+def get_executable():
+# The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as
+# changes to the stub launcher mean that sys.executable always points
+# to the stub on OS X
+#    if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__'
+#                                     in os.environ):
+#        result =  os.environ['__PYVENV_LAUNCHER__']
+#    else:
+#        result = sys.executable
+#    return result
+    # Avoid normcasing: see issue #143
+    # result = os.path.normcase(sys.executable)
+    result = sys.executable
+    if not isinstance(result, text_type):
+        result = fsdecode(result)
+    return result
+
+
+def proceed(prompt, allowed_chars, error_prompt=None, default=None):
+    p = prompt
+    while True:
+        s = raw_input(p)
+        p = prompt
+        if not s and default:
+            s = default
+        if s:
+            c = s[0].lower()
+            if c in allowed_chars:
+                break
+            if error_prompt:
+                p = '%c: %s\n%s' % (c, error_prompt, prompt)
+    return c
+
+
+def extract_by_key(d, keys):
+    if isinstance(keys, string_types):
+        keys = keys.split()
+    result = {}
+    for key in keys:
+        if key in d:
+            result[key] = d[key]
+    return result
+
+def read_exports(stream):
+    if sys.version_info[0] >= 3:
+        # needs to be a text stream
+        stream = codecs.getreader('utf-8')(stream)
+    # Try to load as JSON, falling back on legacy format
+    data = stream.read()
+    stream = StringIO(data)
+    try:
+        jdata = json.load(stream)
+        result = jdata['extensions']['python.exports']['exports']
+        for group, entries in result.items():
+            for k, v in entries.items():
+                s = '%s = %s' % (k, v)
+                entry = get_export_entry(s)
+                assert entry is not None
+                entries[k] = entry
+        return result
+    except Exception:
+        stream.seek(0, 0)
+
+    def read_stream(cp, stream):
+        if hasattr(cp, 'read_file'):
+            cp.read_file(stream)
+        else:
+            cp.readfp(stream)
+
+    cp = configparser.ConfigParser()
+    try:
+        read_stream(cp, stream)
+    except configparser.MissingSectionHeaderError:
+        stream.close()
+        data = textwrap.dedent(data)
+        stream = StringIO(data)
+        read_stream(cp, stream)
+
+    result = {}
+    for key in cp.sections():
+        result[key] = entries = {}
+        for name, value in cp.items(key):
+            s = '%s = %s' % (name, value)
+            entry = get_export_entry(s)
+            assert entry is not None
+            #entry.dist = self
+            entries[name] = entry
+    return result
+
+
+def write_exports(exports, stream):
+    if sys.version_info[0] >= 3:
+        # needs to be a text stream
+        stream = codecs.getwriter('utf-8')(stream)
+    cp = configparser.ConfigParser()
+    for k, v in exports.items():
+        # TODO check k, v for valid values
+        cp.add_section(k)
+        for entry in v.values():
+            if entry.suffix is None:
+                s = entry.prefix
+            else:
+                s = '%s:%s' % (entry.prefix, entry.suffix)
+            if entry.flags:
+                s = '%s [%s]' % (s, ', '.join(entry.flags))
+            cp.set(k, entry.name, s)
+    cp.write(stream)
+
+
+@contextlib.contextmanager
+def tempdir():
+    td = tempfile.mkdtemp()
+    try:
+        yield td
+    finally:
+        shutil.rmtree(td)
+
+@contextlib.contextmanager
+def chdir(d):
+    cwd = os.getcwd()
+    try:
+        os.chdir(d)
+        yield
+    finally:
+        os.chdir(cwd)
+
+
+@contextlib.contextmanager
+def socket_timeout(seconds=15):
+    cto = socket.getdefaulttimeout()
+    try:
+        socket.setdefaulttimeout(seconds)
+        yield
+    finally:
+        socket.setdefaulttimeout(cto)
+
+
+class cached_property(object):
+    def __init__(self, func):
+        self.func = func
+        #for attr in ('__name__', '__module__', '__doc__'):
+        #    setattr(self, attr, getattr(func, attr, None))
+
+    def __get__(self, obj, cls=None):
+        if obj is None:
+            return self
+        value = self.func(obj)
+        object.__setattr__(obj, self.func.__name__, value)
+        #obj.__dict__[self.func.__name__] = value = self.func(obj)
+        return value
+
+def convert_path(pathname):
+    """Return 'pathname' as a name that will work on the native filesystem.
+
+    The path is split on '/' and put back together again using the current
+    directory separator.  Needed because filenames in the setup script are
+    always supplied in Unix style, and have to be converted to the local
+    convention before we can actually use them in the filesystem.  Raises
+    ValueError on non-Unix-ish systems if 'pathname' either starts or
+    ends with a slash.
+    """
+    if os.sep == '/':
+        return pathname
+    if not pathname:
+        return pathname
+    if pathname[0] == '/':
+        raise ValueError("path '%s' cannot be absolute" % pathname)
+    if pathname[-1] == '/':
+        raise ValueError("path '%s' cannot end with '/'" % pathname)
+
+    paths = pathname.split('/')
+    while os.curdir in paths:
+        paths.remove(os.curdir)
+    if not paths:
+        return os.curdir
+    return os.path.join(*paths)
+
+
+class FileOperator(object):
+    def __init__(self, dry_run=False):
+        self.dry_run = dry_run
+        self.ensured = set()
+        self._init_record()
+
+    def _init_record(self):
+        self.record = False
+        self.files_written = set()
+        self.dirs_created = set()
+
+    def record_as_written(self, path):
+        if self.record:
+            self.files_written.add(path)
+
+    def newer(self, source, target):
+        """Tell if the target is newer than the source.
+
+        Returns true if 'source' exists and is more recently modified than
+        'target', or if 'source' exists and 'target' doesn't.
+
+        Returns false if both exist and 'target' is the same age or younger
+        than 'source'. Raise PackagingFileError if 'source' does not exist.
+
+        Note that this test is not very accurate: files created in the same
+        second will have the same "age".
+        """
+        if not os.path.exists(source):
+            raise DistlibException("file '%r' does not exist" %
+                                   os.path.abspath(source))
+        if not os.path.exists(target):
+            return True
+
+        return os.stat(source).st_mtime > os.stat(target).st_mtime
+
+    def copy_file(self, infile, outfile, check=True):
+        """Copy a file respecting dry-run and force flags.
+        """
+        self.ensure_dir(os.path.dirname(outfile))
+        logger.info('Copying %s to %s', infile, outfile)
+        if not self.dry_run:
+            msg = None
+            if check:
+                if os.path.islink(outfile):
+                    msg = '%s is a symlink' % outfile
+                elif os.path.exists(outfile) and not os.path.isfile(outfile):
+                    msg = '%s is a non-regular file' % outfile
+            if msg:
+                raise ValueError(msg + ' which would be overwritten')
+            shutil.copyfile(infile, outfile)
+        self.record_as_written(outfile)
+
+    def copy_stream(self, instream, outfile, encoding=None):
+        assert not os.path.isdir(outfile)
+        self.ensure_dir(os.path.dirname(outfile))
+        logger.info('Copying stream %s to %s', instream, outfile)
+        if not self.dry_run:
+            if encoding is None:
+                outstream = open(outfile, 'wb')
+            else:
+                outstream = codecs.open(outfile, 'w', encoding=encoding)
+            try:
+                shutil.copyfileobj(instream, outstream)
+            finally:
+                outstream.close()
+        self.record_as_written(outfile)
+
+    def write_binary_file(self, path, data):
+        self.ensure_dir(os.path.dirname(path))
+        if not self.dry_run:
+            if os.path.exists(path):
+                os.remove(path)
+            with open(path, 'wb') as f:
+                f.write(data)
+        self.record_as_written(path)
+
+    def write_text_file(self, path, data, encoding):
+        self.write_binary_file(path, data.encode(encoding))
+
+    def set_mode(self, bits, mask, files):
+        if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'):
+            # Set the executable bits (owner, group, and world) on
+            # all the files specified.
+            for f in files:
+                if self.dry_run:
+                    logger.info("changing mode of %s", f)
+                else:
+                    mode = (os.stat(f).st_mode | bits) & mask
+                    logger.info("changing mode of %s to %o", f, mode)
+                    os.chmod(f, mode)
+
+    set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f)
+
+    def ensure_dir(self, path):
+        path = os.path.abspath(path)
+        if path not in self.ensured and not os.path.exists(path):
+            self.ensured.add(path)
+            d, f = os.path.split(path)
+            self.ensure_dir(d)
+            logger.info('Creating %s' % path)
+            if not self.dry_run:
+                os.mkdir(path)
+            if self.record:
+                self.dirs_created.add(path)
+
+    def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_invalidation=False):
+        dpath = cache_from_source(path, not optimize)
+        logger.info('Byte-compiling %s to %s', path, dpath)
+        if not self.dry_run:
+            if force or self.newer(path, dpath):
+                if not prefix:
+                    diagpath = None
+                else:
+                    assert path.startswith(prefix)
+                    diagpath = path[len(prefix):]
+            compile_kwargs = {}
+            if hashed_invalidation and hasattr(py_compile, 'PycInvalidationMode'):
+                compile_kwargs['invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH
+            py_compile.compile(path, dpath, diagpath, True, **compile_kwargs)     # raise error
+        self.record_as_written(dpath)
+        return dpath
+
+    def ensure_removed(self, path):
+        if os.path.exists(path):
+            if os.path.isdir(path) and not os.path.islink(path):
+                logger.debug('Removing directory tree at %s', path)
+                if not self.dry_run:
+                    shutil.rmtree(path)
+                if self.record:
+                    if path in self.dirs_created:
+                        self.dirs_created.remove(path)
+            else:
+                if os.path.islink(path):
+                    s = 'link'
+                else:
+                    s = 'file'
+                logger.debug('Removing %s %s', s, path)
+                if not self.dry_run:
+                    os.remove(path)
+                if self.record:
+                    if path in self.files_written:
+                        self.files_written.remove(path)
+
+    def is_writable(self, path):
+        result = False
+        while not result:
+            if os.path.exists(path):
+                result = os.access(path, os.W_OK)
+                break
+            parent = os.path.dirname(path)
+            if parent == path:
+                break
+            path = parent
+        return result
+
+    def commit(self):
+        """
+        Commit recorded changes, turn off recording, return
+        changes.
+        """
+        assert self.record
+        result = self.files_written, self.dirs_created
+        self._init_record()
+        return result
+
+    def rollback(self):
+        if not self.dry_run:
+            for f in list(self.files_written):
+                if os.path.exists(f):
+                    os.remove(f)
+            # dirs should all be empty now, except perhaps for
+            # __pycache__ subdirs
+            # reverse so that subdirs appear before their parents
+            dirs = sorted(self.dirs_created, reverse=True)
+            for d in dirs:
+                flist = os.listdir(d)
+                if flist:
+                    assert flist == ['__pycache__']
+                    sd = os.path.join(d, flist[0])
+                    os.rmdir(sd)
+                os.rmdir(d)     # should fail if non-empty
+        self._init_record()
+
+def resolve(module_name, dotted_path):
+    if module_name in sys.modules:
+        mod = sys.modules[module_name]
+    else:
+        mod = __import__(module_name)
+    if dotted_path is None:
+        result = mod
+    else:
+        parts = dotted_path.split('.')
+        result = getattr(mod, parts.pop(0))
+        for p in parts:
+            result = getattr(result, p)
+    return result
+
+
+class ExportEntry(object):
+    def __init__(self, name, prefix, suffix, flags):
+        self.name = name
+        self.prefix = prefix
+        self.suffix = suffix
+        self.flags = flags
+
+    @cached_property
+    def value(self):
+        return resolve(self.prefix, self.suffix)
+
+    def __repr__(self):  # pragma: no cover
+        return '' % (self.name, self.prefix,
+                                                self.suffix, self.flags)
+
+    def __eq__(self, other):
+        if not isinstance(other, ExportEntry):
+            result = False
+        else:
+            result = (self.name == other.name and
+                      self.prefix == other.prefix and
+                      self.suffix == other.suffix and
+                      self.flags == other.flags)
+        return result
+
+    __hash__ = object.__hash__
+
+
+ENTRY_RE = re.compile(r'''(?P(\w|[-.+])+)
+                      \s*=\s*(?P(\w+)([:\.]\w+)*)
+                      \s*(\[\s*(?P[\w-]+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])?
+                      ''', re.VERBOSE)
+
+def get_export_entry(specification):
+    m = ENTRY_RE.search(specification)
+    if not m:
+        result = None
+        if '[' in specification or ']' in specification:
+            raise DistlibException("Invalid specification "
+                                   "'%s'" % specification)
+    else:
+        d = m.groupdict()
+        name = d['name']
+        path = d['callable']
+        colons = path.count(':')
+        if colons == 0:
+            prefix, suffix = path, None
+        else:
+            if colons != 1:
+                raise DistlibException("Invalid specification "
+                                       "'%s'" % specification)
+            prefix, suffix = path.split(':')
+        flags = d['flags']
+        if flags is None:
+            if '[' in specification or ']' in specification:
+                raise DistlibException("Invalid specification "
+                                       "'%s'" % specification)
+            flags = []
+        else:
+            flags = [f.strip() for f in flags.split(',')]
+        result = ExportEntry(name, prefix, suffix, flags)
+    return result
+
+
+def get_cache_base(suffix=None):
+    """
+    Return the default base location for distlib caches. If the directory does
+    not exist, it is created. Use the suffix provided for the base directory,
+    and default to '.distlib' if it isn't provided.
+
+    On Windows, if LOCALAPPDATA is defined in the environment, then it is
+    assumed to be a directory, and will be the parent directory of the result.
+    On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home
+    directory - using os.expanduser('~') - will be the parent directory of
+    the result.
+
+    The result is just the directory '.distlib' in the parent directory as
+    determined above, or with the name specified with ``suffix``.
+    """
+    if suffix is None:
+        suffix = '.distlib'
+    if os.name == 'nt' and 'LOCALAPPDATA' in os.environ:
+        result = os.path.expandvars('$localappdata')
+    else:
+        # Assume posix, or old Windows
+        result = os.path.expanduser('~')
+    # we use 'isdir' instead of 'exists', because we want to
+    # fail if there's a file with that name
+    if os.path.isdir(result):
+        usable = os.access(result, os.W_OK)
+        if not usable:
+            logger.warning('Directory exists but is not writable: %s', result)
+    else:
+        try:
+            os.makedirs(result)
+            usable = True
+        except OSError:
+            logger.warning('Unable to create %s', result, exc_info=True)
+            usable = False
+    if not usable:
+        result = tempfile.mkdtemp()
+        logger.warning('Default location unusable, using %s', result)
+    return os.path.join(result, suffix)
+
+
+def path_to_cache_dir(path):
+    """
+    Convert an absolute path to a directory name for use in a cache.
+
+    The algorithm used is:
+
+    #. On Windows, any ``':'`` in the drive is replaced with ``'---'``.
+    #. Any occurrence of ``os.sep`` is replaced with ``'--'``.
+    #. ``'.cache'`` is appended.
+    """
+    d, p = os.path.splitdrive(os.path.abspath(path))
+    if d:
+        d = d.replace(':', '---')
+    p = p.replace(os.sep, '--')
+    return d + p + '.cache'
+
+
+def ensure_slash(s):
+    if not s.endswith('/'):
+        return s + '/'
+    return s
+
+
+def parse_credentials(netloc):
+    username = password = None
+    if '@' in netloc:
+        prefix, netloc = netloc.rsplit('@', 1)
+        if ':' not in prefix:
+            username = prefix
+        else:
+            username, password = prefix.split(':', 1)
+    if username:
+        username = unquote(username)
+    if password:
+        password = unquote(password)
+    return username, password, netloc
+
+
+def get_process_umask():
+    result = os.umask(0o22)
+    os.umask(result)
+    return result
+
+def is_string_sequence(seq):
+    result = True
+    i = None
+    for i, s in enumerate(seq):
+        if not isinstance(s, string_types):
+            result = False
+            break
+    assert i is not None
+    return result
+
+PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-'
+                                      '([a-z0-9_.+-]+)', re.I)
+PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)')
+
+
+def split_filename(filename, project_name=None):
+    """
+    Extract name, version, python version from a filename (no extension)
+
+    Return name, version, pyver or None
+    """
+    result = None
+    pyver = None
+    filename = unquote(filename).replace(' ', '-')
+    m = PYTHON_VERSION.search(filename)
+    if m:
+        pyver = m.group(1)
+        filename = filename[:m.start()]
+    if project_name and len(filename) > len(project_name) + 1:
+        m = re.match(re.escape(project_name) + r'\b', filename)
+        if m:
+            n = m.end()
+            result = filename[:n], filename[n + 1:], pyver
+    if result is None:
+        m = PROJECT_NAME_AND_VERSION.match(filename)
+        if m:
+            result = m.group(1), m.group(3), pyver
+    return result
+
+# Allow spaces in name because of legacy dists like "Twisted Core"
+NAME_VERSION_RE = re.compile(r'(?P[\w .-]+)\s*'
+                             r'\(\s*(?P[^\s)]+)\)$')
+
+def parse_name_and_version(p):
+    """
+    A utility method used to get name and version from a string.
+
+    From e.g. a Provides-Dist value.
+
+    :param p: A value in a form 'foo (1.0)'
+    :return: The name and version as a tuple.
+    """
+    m = NAME_VERSION_RE.match(p)
+    if not m:
+        raise DistlibException('Ill-formed name/version string: \'%s\'' % p)
+    d = m.groupdict()
+    return d['name'].strip().lower(), d['ver']
+
+def get_extras(requested, available):
+    result = set()
+    requested = set(requested or [])
+    available = set(available or [])
+    if '*' in requested:
+        requested.remove('*')
+        result |= available
+    for r in requested:
+        if r == '-':
+            result.add(r)
+        elif r.startswith('-'):
+            unwanted = r[1:]
+            if unwanted not in available:
+                logger.warning('undeclared extra: %s' % unwanted)
+            if unwanted in result:
+                result.remove(unwanted)
+        else:
+            if r not in available:
+                logger.warning('undeclared extra: %s' % r)
+            result.add(r)
+    return result
+#
+# Extended metadata functionality
+#
+
+def _get_external_data(url):
+    result = {}
+    try:
+        # urlopen might fail if it runs into redirections,
+        # because of Python issue #13696. Fixed in locators
+        # using a custom redirect handler.
+        resp = urlopen(url)
+        headers = resp.info()
+        ct = headers.get('Content-Type')
+        if not ct.startswith('application/json'):
+            logger.debug('Unexpected response for JSON request: %s', ct)
+        else:
+            reader = codecs.getreader('utf-8')(resp)
+            #data = reader.read().decode('utf-8')
+            #result = json.loads(data)
+            result = json.load(reader)
+    except Exception as e:
+        logger.exception('Failed to get external data for %s: %s', url, e)
+    return result
+
+_external_data_base_url = 'https://www.red-dove.com/pypi/projects/'
+
+def get_project_data(name):
+    url = '%s/%s/project.json' % (name[0].upper(), name)
+    url = urljoin(_external_data_base_url, url)
+    result = _get_external_data(url)
+    return result
+
+def get_package_data(name, version):
+    url = '%s/%s/package-%s.json' % (name[0].upper(), name, version)
+    url = urljoin(_external_data_base_url, url)
+    return _get_external_data(url)
+
+
+class Cache(object):
+    """
+    A class implementing a cache for resources that need to live in the file system
+    e.g. shared libraries. This class was moved from resources to here because it
+    could be used by other modules, e.g. the wheel module.
+    """
+
+    def __init__(self, base):
+        """
+        Initialise an instance.
+
+        :param base: The base directory where the cache should be located.
+        """
+        # we use 'isdir' instead of 'exists', because we want to
+        # fail if there's a file with that name
+        if not os.path.isdir(base):  # pragma: no cover
+            os.makedirs(base)
+        if (os.stat(base).st_mode & 0o77) != 0:
+            logger.warning('Directory \'%s\' is not private', base)
+        self.base = os.path.abspath(os.path.normpath(base))
+
+    def prefix_to_dir(self, prefix):
+        """
+        Converts a resource prefix to a directory name in the cache.
+        """
+        return path_to_cache_dir(prefix)
+
+    def clear(self):
+        """
+        Clear the cache.
+        """
+        not_removed = []
+        for fn in os.listdir(self.base):
+            fn = os.path.join(self.base, fn)
+            try:
+                if os.path.islink(fn) or os.path.isfile(fn):
+                    os.remove(fn)
+                elif os.path.isdir(fn):
+                    shutil.rmtree(fn)
+            except Exception:
+                not_removed.append(fn)
+        return not_removed
+
+
+class EventMixin(object):
+    """
+    A very simple publish/subscribe system.
+    """
+    def __init__(self):
+        self._subscribers = {}
+
+    def add(self, event, subscriber, append=True):
+        """
+        Add a subscriber for an event.
+
+        :param event: The name of an event.
+        :param subscriber: The subscriber to be added (and called when the
+                           event is published).
+        :param append: Whether to append or prepend the subscriber to an
+                       existing subscriber list for the event.
+        """
+        subs = self._subscribers
+        if event not in subs:
+            subs[event] = deque([subscriber])
+        else:
+            sq = subs[event]
+            if append:
+                sq.append(subscriber)
+            else:
+                sq.appendleft(subscriber)
+
+    def remove(self, event, subscriber):
+        """
+        Remove a subscriber for an event.
+
+        :param event: The name of an event.
+        :param subscriber: The subscriber to be removed.
+        """
+        subs = self._subscribers
+        if event not in subs:
+            raise ValueError('No subscribers: %r' % event)
+        subs[event].remove(subscriber)
+
+    def get_subscribers(self, event):
+        """
+        Return an iterator for the subscribers for an event.
+        :param event: The event to return subscribers for.
+        """
+        return iter(self._subscribers.get(event, ()))
+
+    def publish(self, event, *args, **kwargs):
+        """
+        Publish a event and return a list of values returned by its
+        subscribers.
+
+        :param event: The event to publish.
+        :param args: The positional arguments to pass to the event's
+                     subscribers.
+        :param kwargs: The keyword arguments to pass to the event's
+                       subscribers.
+        """
+        result = []
+        for subscriber in self.get_subscribers(event):
+            try:
+                value = subscriber(event, *args, **kwargs)
+            except Exception:
+                logger.exception('Exception during event publication')
+                value = None
+            result.append(value)
+        logger.debug('publish %s: args = %s, kwargs = %s, result = %s',
+                     event, args, kwargs, result)
+        return result
+
+#
+# Simple sequencing
+#
+class Sequencer(object):
+    def __init__(self):
+        self._preds = {}
+        self._succs = {}
+        self._nodes = set()     # nodes with no preds/succs
+
+    def add_node(self, node):
+        self._nodes.add(node)
+
+    def remove_node(self, node, edges=False):
+        if node in self._nodes:
+            self._nodes.remove(node)
+        if edges:
+            for p in set(self._preds.get(node, ())):
+                self.remove(p, node)
+            for s in set(self._succs.get(node, ())):
+                self.remove(node, s)
+            # Remove empties
+            for k, v in list(self._preds.items()):
+                if not v:
+                    del self._preds[k]
+            for k, v in list(self._succs.items()):
+                if not v:
+                    del self._succs[k]
+
+    def add(self, pred, succ):
+        assert pred != succ
+        self._preds.setdefault(succ, set()).add(pred)
+        self._succs.setdefault(pred, set()).add(succ)
+
+    def remove(self, pred, succ):
+        assert pred != succ
+        try:
+            preds = self._preds[succ]
+            succs = self._succs[pred]
+        except KeyError:  # pragma: no cover
+            raise ValueError('%r not a successor of anything' % succ)
+        try:
+            preds.remove(pred)
+            succs.remove(succ)
+        except KeyError:  # pragma: no cover
+            raise ValueError('%r not a successor of %r' % (succ, pred))
+
+    def is_step(self, step):
+        return (step in self._preds or step in self._succs or
+                step in self._nodes)
+
+    def get_steps(self, final):
+        if not self.is_step(final):
+            raise ValueError('Unknown: %r' % final)
+        result = []
+        todo = []
+        seen = set()
+        todo.append(final)
+        while todo:
+            step = todo.pop(0)
+            if step in seen:
+                # if a step was already seen,
+                # move it to the end (so it will appear earlier
+                # when reversed on return) ... but not for the
+                # final step, as that would be confusing for
+                # users
+                if step != final:
+                    result.remove(step)
+                    result.append(step)
+            else:
+                seen.add(step)
+                result.append(step)
+                preds = self._preds.get(step, ())
+                todo.extend(preds)
+        return reversed(result)
+
+    @property
+    def strong_connections(self):
+        #http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm
+        index_counter = [0]
+        stack = []
+        lowlinks = {}
+        index = {}
+        result = []
+
+        graph = self._succs
+
+        def strongconnect(node):
+            # set the depth index for this node to the smallest unused index
+            index[node] = index_counter[0]
+            lowlinks[node] = index_counter[0]
+            index_counter[0] += 1
+            stack.append(node)
+
+            # Consider successors
+            try:
+                successors = graph[node]
+            except Exception:
+                successors = []
+            for successor in successors:
+                if successor not in lowlinks:
+                    # Successor has not yet been visited
+                    strongconnect(successor)
+                    lowlinks[node] = min(lowlinks[node],lowlinks[successor])
+                elif successor in stack:
+                    # the successor is in the stack and hence in the current
+                    # strongly connected component (SCC)
+                    lowlinks[node] = min(lowlinks[node],index[successor])
+
+            # If `node` is a root node, pop the stack and generate an SCC
+            if lowlinks[node] == index[node]:
+                connected_component = []
+
+                while True:
+                    successor = stack.pop()
+                    connected_component.append(successor)
+                    if successor == node: break
+                component = tuple(connected_component)
+                # storing the result
+                result.append(component)
+
+        for node in graph:
+            if node not in lowlinks:
+                strongconnect(node)
+
+        return result
+
+    @property
+    def dot(self):
+        result = ['digraph G {']
+        for succ in self._preds:
+            preds = self._preds[succ]
+            for pred in preds:
+                result.append('  %s -> %s;' % (pred, succ))
+        for node in self._nodes:
+            result.append('  %s;' % node)
+        result.append('}')
+        return '\n'.join(result)
+
+#
+# Unarchiving functionality for zip, tar, tgz, tbz, whl
+#
+
+ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip',
+                      '.tgz', '.tbz', '.whl')
+
+def unarchive(archive_filename, dest_dir, format=None, check=True):
+
+    def check_path(path):
+        if not isinstance(path, text_type):
+            path = path.decode('utf-8')
+        p = os.path.abspath(os.path.join(dest_dir, path))
+        if not p.startswith(dest_dir) or p[plen] != os.sep:
+            raise ValueError('path outside destination: %r' % p)
+
+    dest_dir = os.path.abspath(dest_dir)
+    plen = len(dest_dir)
+    archive = None
+    if format is None:
+        if archive_filename.endswith(('.zip', '.whl')):
+            format = 'zip'
+        elif archive_filename.endswith(('.tar.gz', '.tgz')):
+            format = 'tgz'
+            mode = 'r:gz'
+        elif archive_filename.endswith(('.tar.bz2', '.tbz')):
+            format = 'tbz'
+            mode = 'r:bz2'
+        elif archive_filename.endswith('.tar'):
+            format = 'tar'
+            mode = 'r'
+        else:  # pragma: no cover
+            raise ValueError('Unknown format for %r' % archive_filename)
+    try:
+        if format == 'zip':
+            archive = ZipFile(archive_filename, 'r')
+            if check:
+                names = archive.namelist()
+                for name in names:
+                    check_path(name)
+        else:
+            archive = tarfile.open(archive_filename, mode)
+            if check:
+                names = archive.getnames()
+                for name in names:
+                    check_path(name)
+        if format != 'zip' and sys.version_info[0] < 3:
+            # See Python issue 17153. If the dest path contains Unicode,
+            # tarfile extraction fails on Python 2.x if a member path name
+            # contains non-ASCII characters - it leads to an implicit
+            # bytes -> unicode conversion using ASCII to decode.
+            for tarinfo in archive.getmembers():
+                if not isinstance(tarinfo.name, text_type):
+                    tarinfo.name = tarinfo.name.decode('utf-8')
+        archive.extractall(dest_dir)
+
+    finally:
+        if archive:
+            archive.close()
+
+
+def zip_dir(directory):
+    """zip a directory tree into a BytesIO object"""
+    result = io.BytesIO()
+    dlen = len(directory)
+    with ZipFile(result, "w") as zf:
+        for root, dirs, files in os.walk(directory):
+            for name in files:
+                full = os.path.join(root, name)
+                rel = root[dlen:]
+                dest = os.path.join(rel, name)
+                zf.write(full, dest)
+    return result
+
+#
+# Simple progress bar
+#
+
+UNITS = ('', 'K', 'M', 'G','T','P')
+
+
+class Progress(object):
+    unknown = 'UNKNOWN'
+
+    def __init__(self, minval=0, maxval=100):
+        assert maxval is None or maxval >= minval
+        self.min = self.cur = minval
+        self.max = maxval
+        self.started = None
+        self.elapsed = 0
+        self.done = False
+
+    def update(self, curval):
+        assert self.min <= curval
+        assert self.max is None or curval <= self.max
+        self.cur = curval
+        now = time.time()
+        if self.started is None:
+            self.started = now
+        else:
+            self.elapsed = now - self.started
+
+    def increment(self, incr):
+        assert incr >= 0
+        self.update(self.cur + incr)
+
+    def start(self):
+        self.update(self.min)
+        return self
+
+    def stop(self):
+        if self.max is not None:
+            self.update(self.max)
+        self.done = True
+
+    @property
+    def maximum(self):
+        return self.unknown if self.max is None else self.max
+
+    @property
+    def percentage(self):
+        if self.done:
+            result = '100 %'
+        elif self.max is None:
+            result = ' ?? %'
+        else:
+            v = 100.0 * (self.cur - self.min) / (self.max - self.min)
+            result = '%3d %%' % v
+        return result
+
+    def format_duration(self, duration):
+        if (duration <= 0) and self.max is None or self.cur == self.min:
+            result = '??:??:??'
+        #elif duration < 1:
+        #    result = '--:--:--'
+        else:
+            result = time.strftime('%H:%M:%S', time.gmtime(duration))
+        return result
+
+    @property
+    def ETA(self):
+        if self.done:
+            prefix = 'Done'
+            t = self.elapsed
+            #import pdb; pdb.set_trace()
+        else:
+            prefix = 'ETA '
+            if self.max is None:
+                t = -1
+            elif self.elapsed == 0 or (self.cur == self.min):
+                t = 0
+            else:
+                #import pdb; pdb.set_trace()
+                t = float(self.max - self.min)
+                t /= self.cur - self.min
+                t = (t - 1) * self.elapsed
+        return '%s: %s' % (prefix, self.format_duration(t))
+
+    @property
+    def speed(self):
+        if self.elapsed == 0:
+            result = 0.0
+        else:
+            result = (self.cur - self.min) / self.elapsed
+        for unit in UNITS:
+            if result < 1000:
+                break
+            result /= 1000.0
+        return '%d %sB/s' % (result, unit)
+
+#
+# Glob functionality
+#
+
+RICH_GLOB = re.compile(r'\{([^}]*)\}')
+_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]')
+_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$')
+
+
+def iglob(path_glob):
+    """Extended globbing function that supports ** and {opt1,opt2,opt3}."""
+    if _CHECK_RECURSIVE_GLOB.search(path_glob):
+        msg = """invalid glob %r: recursive glob "**" must be used alone"""
+        raise ValueError(msg % path_glob)
+    if _CHECK_MISMATCH_SET.search(path_glob):
+        msg = """invalid glob %r: mismatching set marker '{' or '}'"""
+        raise ValueError(msg % path_glob)
+    return _iglob(path_glob)
+
+
+def _iglob(path_glob):
+    rich_path_glob = RICH_GLOB.split(path_glob, 1)
+    if len(rich_path_glob) > 1:
+        assert len(rich_path_glob) == 3, rich_path_glob
+        prefix, set, suffix = rich_path_glob
+        for item in set.split(','):
+            for path in _iglob(''.join((prefix, item, suffix))):
+                yield path
+    else:
+        if '**' not in path_glob:
+            for item in std_iglob(path_glob):
+                yield item
+        else:
+            prefix, radical = path_glob.split('**', 1)
+            if prefix == '':
+                prefix = '.'
+            if radical == '':
+                radical = '*'
+            else:
+                # we support both
+                radical = radical.lstrip('/')
+                radical = radical.lstrip('\\')
+            for path, dir, files in os.walk(prefix):
+                path = os.path.normpath(path)
+                for fn in _iglob(os.path.join(path, radical)):
+                    yield fn
+
+if ssl:
+    from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname,
+                         CertificateError)
+
+
+#
+# HTTPSConnection which verifies certificates/matches domains
+#
+
+    class HTTPSConnection(httplib.HTTPSConnection):
+        ca_certs = None # set this to the path to the certs file (.pem)
+        check_domain = True # only used if ca_certs is not None
+
+        # noinspection PyPropertyAccess
+        def connect(self):
+            sock = socket.create_connection((self.host, self.port), self.timeout)
+            if getattr(self, '_tunnel_host', False):
+                self.sock = sock
+                self._tunnel()
+
+            context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+            if hasattr(ssl, 'OP_NO_SSLv2'):
+                context.options |= ssl.OP_NO_SSLv2
+            if self.cert_file:
+                context.load_cert_chain(self.cert_file, self.key_file)
+            kwargs = {}
+            if self.ca_certs:
+                context.verify_mode = ssl.CERT_REQUIRED
+                context.load_verify_locations(cafile=self.ca_certs)
+                if getattr(ssl, 'HAS_SNI', False):
+                    kwargs['server_hostname'] = self.host
+
+            self.sock = context.wrap_socket(sock, **kwargs)
+            if self.ca_certs and self.check_domain:
+                try:
+                    match_hostname(self.sock.getpeercert(), self.host)
+                    logger.debug('Host verified: %s', self.host)
+                except CertificateError:  # pragma: no cover
+                    self.sock.shutdown(socket.SHUT_RDWR)
+                    self.sock.close()
+                    raise
+
+    class HTTPSHandler(BaseHTTPSHandler):
+        def __init__(self, ca_certs, check_domain=True):
+            BaseHTTPSHandler.__init__(self)
+            self.ca_certs = ca_certs
+            self.check_domain = check_domain
+
+        def _conn_maker(self, *args, **kwargs):
+            """
+            This is called to create a connection instance. Normally you'd
+            pass a connection class to do_open, but it doesn't actually check for
+            a class, and just expects a callable. As long as we behave just as a
+            constructor would have, we should be OK. If it ever changes so that
+            we *must* pass a class, we'll create an UnsafeHTTPSConnection class
+            which just sets check_domain to False in the class definition, and
+            choose which one to pass to do_open.
+            """
+            result = HTTPSConnection(*args, **kwargs)
+            if self.ca_certs:
+                result.ca_certs = self.ca_certs
+                result.check_domain = self.check_domain
+            return result
+
+        def https_open(self, req):
+            try:
+                return self.do_open(self._conn_maker, req)
+            except URLError as e:
+                if 'certificate verify failed' in str(e.reason):
+                    raise CertificateError('Unable to verify server certificate '
+                                           'for %s' % req.host)
+                else:
+                    raise
+
+    #
+    # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The-
+    # Middle proxy using HTTP listens on port 443, or an index mistakenly serves
+    # HTML containing a http://xyz link when it should be https://xyz),
+    # you can use the following handler class, which does not allow HTTP traffic.
+    #
+    # It works by inheriting from HTTPHandler - so build_opener won't add a
+    # handler for HTTP itself.
+    #
+    class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler):
+        def http_open(self, req):
+            raise URLError('Unexpected HTTP request on what should be a secure '
+                           'connection: %s' % req)
+
+#
+# XML-RPC with timeouts
+#
+class Transport(xmlrpclib.Transport):
+    def __init__(self, timeout, use_datetime=0):
+        self.timeout = timeout
+        xmlrpclib.Transport.__init__(self, use_datetime)
+
+    def make_connection(self, host):
+        h, eh, x509 = self.get_host_info(host)
+        if not self._connection or host != self._connection[0]:
+            self._extra_headers = eh
+            self._connection = host, httplib.HTTPConnection(h)
+        return self._connection[1]
+
+if ssl:
+    class SafeTransport(xmlrpclib.SafeTransport):
+        def __init__(self, timeout, use_datetime=0):
+            self.timeout = timeout
+            xmlrpclib.SafeTransport.__init__(self, use_datetime)
+
+        def make_connection(self, host):
+            h, eh, kwargs = self.get_host_info(host)
+            if not kwargs:
+                kwargs = {}
+            kwargs['timeout'] = self.timeout
+            if not self._connection or host != self._connection[0]:
+                self._extra_headers = eh
+                self._connection = host, httplib.HTTPSConnection(h, None,
+                                                                 **kwargs)
+            return self._connection[1]
+
+
+class ServerProxy(xmlrpclib.ServerProxy):
+    def __init__(self, uri, **kwargs):
+        self.timeout = timeout = kwargs.pop('timeout', None)
+        # The above classes only come into play if a timeout
+        # is specified
+        if timeout is not None:
+            # scheme = splittype(uri)  # deprecated as of Python 3.8
+            scheme = urlparse(uri)[0]
+            use_datetime = kwargs.get('use_datetime', 0)
+            if scheme == 'https':
+                tcls = SafeTransport
+            else:
+                tcls = Transport
+            kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime)
+            self.transport = t
+        xmlrpclib.ServerProxy.__init__(self, uri, **kwargs)
+
+#
+# CSV functionality. This is provided because on 2.x, the csv module can't
+# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files.
+#
+
+def _csv_open(fn, mode, **kwargs):
+    if sys.version_info[0] < 3:
+        mode += 'b'
+    else:
+        kwargs['newline'] = ''
+        # Python 3 determines encoding from locale. Force 'utf-8'
+        # file encoding to match other forced utf-8 encoding
+        kwargs['encoding'] = 'utf-8'
+    return open(fn, mode, **kwargs)
+
+
+class CSVBase(object):
+    defaults = {
+        'delimiter': str(','),      # The strs are used because we need native
+        'quotechar': str('"'),      # str in the csv API (2.x won't take
+        'lineterminator': str('\n') # Unicode)
+    }
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, *exc_info):
+        self.stream.close()
+
+
+class CSVReader(CSVBase):
+    def __init__(self, **kwargs):
+        if 'stream' in kwargs:
+            stream = kwargs['stream']
+            if sys.version_info[0] >= 3:
+                # needs to be a text stream
+                stream = codecs.getreader('utf-8')(stream)
+            self.stream = stream
+        else:
+            self.stream = _csv_open(kwargs['path'], 'r')
+        self.reader = csv.reader(self.stream, **self.defaults)
+
+    def __iter__(self):
+        return self
+
+    def next(self):
+        result = next(self.reader)
+        if sys.version_info[0] < 3:
+            for i, item in enumerate(result):
+                if not isinstance(item, text_type):
+                    result[i] = item.decode('utf-8')
+        return result
+
+    __next__ = next
+
+class CSVWriter(CSVBase):
+    def __init__(self, fn, **kwargs):
+        self.stream = _csv_open(fn, 'w')
+        self.writer = csv.writer(self.stream, **self.defaults)
+
+    def writerow(self, row):
+        if sys.version_info[0] < 3:
+            r = []
+            for item in row:
+                if isinstance(item, text_type):
+                    item = item.encode('utf-8')
+                r.append(item)
+            row = r
+        self.writer.writerow(row)
+
+#
+#   Configurator functionality
+#
+
+class Configurator(BaseConfigurator):
+
+    value_converters = dict(BaseConfigurator.value_converters)
+    value_converters['inc'] = 'inc_convert'
+
+    def __init__(self, config, base=None):
+        super(Configurator, self).__init__(config)
+        self.base = base or os.getcwd()
+
+    def configure_custom(self, config):
+        def convert(o):
+            if isinstance(o, (list, tuple)):
+                result = type(o)([convert(i) for i in o])
+            elif isinstance(o, dict):
+                if '()' in o:
+                    result = self.configure_custom(o)
+                else:
+                    result = {}
+                    for k in o:
+                        result[k] = convert(o[k])
+            else:
+                result = self.convert(o)
+            return result
+
+        c = config.pop('()')
+        if not callable(c):
+            c = self.resolve(c)
+        props = config.pop('.', None)
+        # Check for valid identifiers
+        args = config.pop('[]', ())
+        if args:
+            args = tuple([convert(o) for o in args])
+        items = [(k, convert(config[k])) for k in config if valid_ident(k)]
+        kwargs = dict(items)
+        result = c(*args, **kwargs)
+        if props:
+            for n, v in props.items():
+                setattr(result, n, convert(v))
+        return result
+
+    def __getitem__(self, key):
+        result = self.config[key]
+        if isinstance(result, dict) and '()' in result:
+            self.config[key] = result = self.configure_custom(result)
+        return result
+
+    def inc_convert(self, value):
+        """Default converter for the inc:// protocol."""
+        if not os.path.isabs(value):
+            value = os.path.join(self.base, value)
+        with codecs.open(value, 'r', encoding='utf-8') as f:
+            result = json.load(f)
+        return result
+
+
+class SubprocessMixin(object):
+    """
+    Mixin for running subprocesses and capturing their output
+    """
+    def __init__(self, verbose=False, progress=None):
+        self.verbose = verbose
+        self.progress = progress
+
+    def reader(self, stream, context):
+        """
+        Read lines from a subprocess' output stream and either pass to a progress
+        callable (if specified) or write progress information to sys.stderr.
+        """
+        progress = self.progress
+        verbose = self.verbose
+        while True:
+            s = stream.readline()
+            if not s:
+                break
+            if progress is not None:
+                progress(s, context)
+            else:
+                if not verbose:
+                    sys.stderr.write('.')
+                else:
+                    sys.stderr.write(s.decode('utf-8'))
+                sys.stderr.flush()
+        stream.close()
+
+    def run_command(self, cmd, **kwargs):
+        p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE, **kwargs)
+        t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout'))
+        t1.start()
+        t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr'))
+        t2.start()
+        p.wait()
+        t1.join()
+        t2.join()
+        if self.progress is not None:
+            self.progress('done.', 'main')
+        elif self.verbose:
+            sys.stderr.write('done.\n')
+        return p
+
+
+def normalize_name(name):
+    """Normalize a python package name a la PEP 503"""
+    # https://www.python.org/dev/peps/pep-0503/#normalized-names
+    return re.sub('[-_.]+', '-', name).lower()
+
+# def _get_pypirc_command():
+    # """
+    # Get the distutils command for interacting with PyPI configurations.
+    # :return: the command.
+    # """
+    # from distutils.core import Distribution
+    # from distutils.config import PyPIRCCommand
+    # d = Distribution()
+    # return PyPIRCCommand(d)
+
+class PyPIRCFile(object):
+
+    DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/'
+    DEFAULT_REALM = 'pypi'
+
+    def __init__(self, fn=None, url=None):
+        if fn is None:
+            fn = os.path.join(os.path.expanduser('~'), '.pypirc')
+        self.filename = fn
+        self.url = url
+
+    def read(self):
+        result = {}
+
+        if os.path.exists(self.filename):
+            repository = self.url or self.DEFAULT_REPOSITORY
+
+            config = configparser.RawConfigParser()
+            config.read(self.filename)
+            sections = config.sections()
+            if 'distutils' in sections:
+                # let's get the list of servers
+                index_servers = config.get('distutils', 'index-servers')
+                _servers = [server.strip() for server in
+                            index_servers.split('\n')
+                            if server.strip() != '']
+                if _servers == []:
+                    # nothing set, let's try to get the default pypi
+                    if 'pypi' in sections:
+                        _servers = ['pypi']
+                else:
+                    for server in _servers:
+                        result = {'server': server}
+                        result['username'] = config.get(server, 'username')
+
+                        # optional params
+                        for key, default in (('repository', self.DEFAULT_REPOSITORY),
+                                             ('realm', self.DEFAULT_REALM),
+                                             ('password', None)):
+                            if config.has_option(server, key):
+                                result[key] = config.get(server, key)
+                            else:
+                                result[key] = default
+
+                        # work around people having "repository" for the "pypi"
+                        # section of their config set to the HTTP (rather than
+                        # HTTPS) URL
+                        if (server == 'pypi' and
+                            repository in (self.DEFAULT_REPOSITORY, 'pypi')):
+                            result['repository'] = self.DEFAULT_REPOSITORY
+                        elif (result['server'] != repository and
+                              result['repository'] != repository):
+                            result = {}
+            elif 'server-login' in sections:
+                # old format
+                server = 'server-login'
+                if config.has_option(server, 'repository'):
+                    repository = config.get(server, 'repository')
+                else:
+                    repository = self.DEFAULT_REPOSITORY
+                result = {
+                    'username': config.get(server, 'username'),
+                    'password': config.get(server, 'password'),
+                    'repository': repository,
+                    'server': server,
+                    'realm': self.DEFAULT_REALM
+                }
+        return result
+
+    def update(self, username, password):
+        # import pdb; pdb.set_trace()
+        config = configparser.RawConfigParser()
+        fn = self.filename
+        config.read(fn)
+        if not config.has_section('pypi'):
+            config.add_section('pypi')
+        config.set('pypi', 'username', username)
+        config.set('pypi', 'password', password)
+        with open(fn, 'w') as f:
+            config.write(f)
+
+def _load_pypirc(index):
+    """
+    Read the PyPI access configuration as supported by distutils.
+    """
+    return PyPIRCFile(url=index.url).read()
+
+def _store_pypirc(index):
+    PyPIRCFile().update(index.username, index.password)
+
+#
+# get_platform()/get_host_platform() copied from Python 3.10.a0 source, with some minor
+# tweaks
+#
+
+def get_host_platform():
+    """Return a string that identifies the current platform.  This is used mainly to
+    distinguish platform-specific build directories and platform-specific built
+    distributions.  Typically includes the OS name and version and the
+    architecture (as supplied by 'os.uname()'), although the exact information
+    included depends on the OS; eg. on Linux, the kernel version isn't
+    particularly important.
+
+    Examples of returned values:
+       linux-i586
+       linux-alpha (?)
+       solaris-2.6-sun4u
+
+    Windows will return one of:
+       win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
+       win32 (all others - specifically, sys.platform is returned)
+
+    For other non-POSIX platforms, currently just returns 'sys.platform'.
+
+    """
+    if os.name == 'nt':
+        if 'amd64' in sys.version.lower():
+            return 'win-amd64'
+        if '(arm)' in sys.version.lower():
+            return 'win-arm32'
+        if '(arm64)' in sys.version.lower():
+            return 'win-arm64'
+        return sys.platform
+
+    # Set for cross builds explicitly
+    if "_PYTHON_HOST_PLATFORM" in os.environ:
+        return os.environ["_PYTHON_HOST_PLATFORM"]
+
+    if os.name != 'posix' or not hasattr(os, 'uname'):
+        # XXX what about the architecture? NT is Intel or Alpha,
+        # Mac OS is M68k or PPC, etc.
+        return sys.platform
+
+    # Try to distinguish various flavours of Unix
+
+    (osname, host, release, version, machine) = os.uname()
+
+    # Convert the OS name to lowercase, remove '/' characters, and translate
+    # spaces (for "Power Macintosh")
+    osname = osname.lower().replace('/', '')
+    machine = machine.replace(' ', '_').replace('/', '-')
+
+    if osname[:5] == 'linux':
+        # At least on Linux/Intel, 'machine' is the processor --
+        # i386, etc.
+        # XXX what about Alpha, SPARC, etc?
+        return  "%s-%s" % (osname, machine)
+
+    elif osname[:5] == 'sunos':
+        if release[0] >= '5':           # SunOS 5 == Solaris 2
+            osname = 'solaris'
+            release = '%d.%s' % (int(release[0]) - 3, release[2:])
+            # We can't use 'platform.architecture()[0]' because a
+            # bootstrap problem. We use a dict to get an error
+            # if some suspicious happens.
+            bitness = {2147483647:'32bit', 9223372036854775807:'64bit'}
+            machine += '.%s' % bitness[sys.maxsize]
+        # fall through to standard osname-release-machine representation
+    elif osname[:3] == 'aix':
+        from _aix_support import aix_platform
+        return aix_platform()
+    elif osname[:6] == 'cygwin':
+        osname = 'cygwin'
+        rel_re = re.compile (r'[\d.]+', re.ASCII)
+        m = rel_re.match(release)
+        if m:
+            release = m.group()
+    elif osname[:6] == 'darwin':
+        import _osx_support, distutils.sysconfig
+        osname, release, machine = _osx_support.get_platform_osx(
+                                        distutils.sysconfig.get_config_vars(),
+                                        osname, release, machine)
+
+    return '%s-%s-%s' % (osname, release, machine)
+
+
+_TARGET_TO_PLAT = {
+    'x86' : 'win32',
+    'x64' : 'win-amd64',
+    'arm' : 'win-arm32',
+}
+
+
+def get_platform():
+    if os.name != 'nt':
+        return get_host_platform()
+    cross_compilation_target = os.environ.get('VSCMD_ARG_TGT_ARCH')
+    if cross_compilation_target not in _TARGET_TO_PLAT:
+        return get_host_platform()
+    return _TARGET_TO_PLAT[cross_compilation_target]
diff --git a/venv/lib/python3.8/site-packages/distlib/version.py b/venv/lib/python3.8/site-packages/distlib/version.py
new file mode 100644
index 0000000..c7c8bb6
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/distlib/version.py
@@ -0,0 +1,739 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012-2017 The Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+"""
+Implementation of a flexible versioning scheme providing support for PEP-440,
+setuptools-compatible and semantic versioning.
+"""
+
+import logging
+import re
+
+from .compat import string_types
+from .util import parse_requirement
+
+__all__ = ['NormalizedVersion', 'NormalizedMatcher',
+           'LegacyVersion', 'LegacyMatcher',
+           'SemanticVersion', 'SemanticMatcher',
+           'UnsupportedVersionError', 'get_scheme']
+
+logger = logging.getLogger(__name__)
+
+
+class UnsupportedVersionError(ValueError):
+    """This is an unsupported version."""
+    pass
+
+
+class Version(object):
+    def __init__(self, s):
+        self._string = s = s.strip()
+        self._parts = parts = self.parse(s)
+        assert isinstance(parts, tuple)
+        assert len(parts) > 0
+
+    def parse(self, s):
+        raise NotImplementedError('please implement in a subclass')
+
+    def _check_compatible(self, other):
+        if type(self) != type(other):
+            raise TypeError('cannot compare %r and %r' % (self, other))
+
+    def __eq__(self, other):
+        self._check_compatible(other)
+        return self._parts == other._parts
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    def __lt__(self, other):
+        self._check_compatible(other)
+        return self._parts < other._parts
+
+    def __gt__(self, other):
+        return not (self.__lt__(other) or self.__eq__(other))
+
+    def __le__(self, other):
+        return self.__lt__(other) or self.__eq__(other)
+
+    def __ge__(self, other):
+        return self.__gt__(other) or self.__eq__(other)
+
+    # See http://docs.python.org/reference/datamodel#object.__hash__
+    def __hash__(self):
+        return hash(self._parts)
+
+    def __repr__(self):
+        return "%s('%s')" % (self.__class__.__name__, self._string)
+
+    def __str__(self):
+        return self._string
+
+    @property
+    def is_prerelease(self):
+        raise NotImplementedError('Please implement in subclasses.')
+
+
+class Matcher(object):
+    version_class = None
+
+    # value is either a callable or the name of a method
+    _operators = {
+        '<': lambda v, c, p: v < c,
+        '>': lambda v, c, p: v > c,
+        '<=': lambda v, c, p: v == c or v < c,
+        '>=': lambda v, c, p: v == c or v > c,
+        '==': lambda v, c, p: v == c,
+        '===': lambda v, c, p: v == c,
+        # by default, compatible => >=.
+        '~=': lambda v, c, p: v == c or v > c,
+        '!=': lambda v, c, p: v != c,
+    }
+
+    # this is a method only to support alternative implementations
+    # via overriding
+    def parse_requirement(self, s):
+        return parse_requirement(s)
+
+    def __init__(self, s):
+        if self.version_class is None:
+            raise ValueError('Please specify a version class')
+        self._string = s = s.strip()
+        r = self.parse_requirement(s)
+        if not r:
+            raise ValueError('Not valid: %r' % s)
+        self.name = r.name
+        self.key = self.name.lower()    # for case-insensitive comparisons
+        clist = []
+        if r.constraints:
+            # import pdb; pdb.set_trace()
+            for op, s in r.constraints:
+                if s.endswith('.*'):
+                    if op not in ('==', '!='):
+                        raise ValueError('\'.*\' not allowed for '
+                                         '%r constraints' % op)
+                    # Could be a partial version (e.g. for '2.*') which
+                    # won't parse as a version, so keep it as a string
+                    vn, prefix = s[:-2], True
+                    # Just to check that vn is a valid version
+                    self.version_class(vn)
+                else:
+                    # Should parse as a version, so we can create an
+                    # instance for the comparison
+                    vn, prefix = self.version_class(s), False
+                clist.append((op, vn, prefix))
+        self._parts = tuple(clist)
+
+    def match(self, version):
+        """
+        Check if the provided version matches the constraints.
+
+        :param version: The version to match against this instance.
+        :type version: String or :class:`Version` instance.
+        """
+        if isinstance(version, string_types):
+            version = self.version_class(version)
+        for operator, constraint, prefix in self._parts:
+            f = self._operators.get(operator)
+            if isinstance(f, string_types):
+                f = getattr(self, f)
+            if not f:
+                msg = ('%r not implemented '
+                       'for %s' % (operator, self.__class__.__name__))
+                raise NotImplementedError(msg)
+            if not f(version, constraint, prefix):
+                return False
+        return True
+
+    @property
+    def exact_version(self):
+        result = None
+        if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='):
+            result = self._parts[0][1]
+        return result
+
+    def _check_compatible(self, other):
+        if type(self) != type(other) or self.name != other.name:
+            raise TypeError('cannot compare %s and %s' % (self, other))
+
+    def __eq__(self, other):
+        self._check_compatible(other)
+        return self.key == other.key and self._parts == other._parts
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    # See http://docs.python.org/reference/datamodel#object.__hash__
+    def __hash__(self):
+        return hash(self.key) + hash(self._parts)
+
+    def __repr__(self):
+        return "%s(%r)" % (self.__class__.__name__, self._string)
+
+    def __str__(self):
+        return self._string
+
+
+PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?'
+                               r'(\.(post)(\d+))?(\.(dev)(\d+))?'
+                               r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$')
+
+
+def _pep_440_key(s):
+    s = s.strip()
+    m = PEP440_VERSION_RE.match(s)
+    if not m:
+        raise UnsupportedVersionError('Not a valid version: %s' % s)
+    groups = m.groups()
+    nums = tuple(int(v) for v in groups[1].split('.'))
+    while len(nums) > 1 and nums[-1] == 0:
+        nums = nums[:-1]
+
+    if not groups[0]:
+        epoch = 0
+    else:
+        epoch = int(groups[0][:-1])
+    pre = groups[4:6]
+    post = groups[7:9]
+    dev = groups[10:12]
+    local = groups[13]
+    if pre == (None, None):
+        pre = ()
+    else:
+        pre = pre[0], int(pre[1])
+    if post == (None, None):
+        post = ()
+    else:
+        post = post[0], int(post[1])
+    if dev == (None, None):
+        dev = ()
+    else:
+        dev = dev[0], int(dev[1])
+    if local is None:
+        local = ()
+    else:
+        parts = []
+        for part in local.split('.'):
+            # to ensure that numeric compares as > lexicographic, avoid
+            # comparing them directly, but encode a tuple which ensures
+            # correct sorting
+            if part.isdigit():
+                part = (1, int(part))
+            else:
+                part = (0, part)
+            parts.append(part)
+        local = tuple(parts)
+    if not pre:
+        # either before pre-release, or final release and after
+        if not post and dev:
+            # before pre-release
+            pre = ('a', -1)     # to sort before a0
+        else:
+            pre = ('z',)        # to sort after all pre-releases
+    # now look at the state of post and dev.
+    if not post:
+        post = ('_',)   # sort before 'a'
+    if not dev:
+        dev = ('final',)
+
+    #print('%s -> %s' % (s, m.groups()))
+    return epoch, nums, pre, post, dev, local
+
+
+_normalized_key = _pep_440_key
+
+
+class NormalizedVersion(Version):
+    """A rational version.
+
+    Good:
+        1.2         # equivalent to "1.2.0"
+        1.2.0
+        1.2a1
+        1.2.3a2
+        1.2.3b1
+        1.2.3c1
+        1.2.3.4
+        TODO: fill this out
+
+    Bad:
+        1           # minimum two numbers
+        1.2a        # release level must have a release serial
+        1.2.3b
+    """
+    def parse(self, s):
+        result = _normalized_key(s)
+        # _normalized_key loses trailing zeroes in the release
+        # clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0
+        # However, PEP 440 prefix matching needs it: for example,
+        # (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0).
+        m = PEP440_VERSION_RE.match(s)      # must succeed
+        groups = m.groups()
+        self._release_clause = tuple(int(v) for v in groups[1].split('.'))
+        return result
+
+    PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev'])
+
+    @property
+    def is_prerelease(self):
+        return any(t[0] in self.PREREL_TAGS for t in self._parts if t)
+
+
+def _match_prefix(x, y):
+    x = str(x)
+    y = str(y)
+    if x == y:
+        return True
+    if not x.startswith(y):
+        return False
+    n = len(y)
+    return x[n] == '.'
+
+
+class NormalizedMatcher(Matcher):
+    version_class = NormalizedVersion
+
+    # value is either a callable or the name of a method
+    _operators = {
+        '~=': '_match_compatible',
+        '<': '_match_lt',
+        '>': '_match_gt',
+        '<=': '_match_le',
+        '>=': '_match_ge',
+        '==': '_match_eq',
+        '===': '_match_arbitrary',
+        '!=': '_match_ne',
+    }
+
+    def _adjust_local(self, version, constraint, prefix):
+        if prefix:
+            strip_local = '+' not in constraint and version._parts[-1]
+        else:
+            # both constraint and version are
+            # NormalizedVersion instances.
+            # If constraint does not have a local component,
+            # ensure the version doesn't, either.
+            strip_local = not constraint._parts[-1] and version._parts[-1]
+        if strip_local:
+            s = version._string.split('+', 1)[0]
+            version = self.version_class(s)
+        return version, constraint
+
+    def _match_lt(self, version, constraint, prefix):
+        version, constraint = self._adjust_local(version, constraint, prefix)
+        if version >= constraint:
+            return False
+        release_clause = constraint._release_clause
+        pfx = '.'.join([str(i) for i in release_clause])
+        return not _match_prefix(version, pfx)
+
+    def _match_gt(self, version, constraint, prefix):
+        version, constraint = self._adjust_local(version, constraint, prefix)
+        if version <= constraint:
+            return False
+        release_clause = constraint._release_clause
+        pfx = '.'.join([str(i) for i in release_clause])
+        return not _match_prefix(version, pfx)
+
+    def _match_le(self, version, constraint, prefix):
+        version, constraint = self._adjust_local(version, constraint, prefix)
+        return version <= constraint
+
+    def _match_ge(self, version, constraint, prefix):
+        version, constraint = self._adjust_local(version, constraint, prefix)
+        return version >= constraint
+
+    def _match_eq(self, version, constraint, prefix):
+        version, constraint = self._adjust_local(version, constraint, prefix)
+        if not prefix:
+            result = (version == constraint)
+        else:
+            result = _match_prefix(version, constraint)
+        return result
+
+    def _match_arbitrary(self, version, constraint, prefix):
+        return str(version) == str(constraint)
+
+    def _match_ne(self, version, constraint, prefix):
+        version, constraint = self._adjust_local(version, constraint, prefix)
+        if not prefix:
+            result = (version != constraint)
+        else:
+            result = not _match_prefix(version, constraint)
+        return result
+
+    def _match_compatible(self, version, constraint, prefix):
+        version, constraint = self._adjust_local(version, constraint, prefix)
+        if version == constraint:
+            return True
+        if version < constraint:
+            return False
+#        if not prefix:
+#            return True
+        release_clause = constraint._release_clause
+        if len(release_clause) > 1:
+            release_clause = release_clause[:-1]
+        pfx = '.'.join([str(i) for i in release_clause])
+        return _match_prefix(version, pfx)
+
+_REPLACEMENTS = (
+    (re.compile('[.+-]$'), ''),                     # remove trailing puncts
+    (re.compile(r'^[.](\d)'), r'0.\1'),             # .N -> 0.N at start
+    (re.compile('^[.-]'), ''),                      # remove leading puncts
+    (re.compile(r'^\((.*)\)$'), r'\1'),             # remove parentheses
+    (re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'),    # remove leading v(ersion)
+    (re.compile(r'^r(ev)?\s*(\d+)'), r'\2'),        # remove leading v(ersion)
+    (re.compile('[.]{2,}'), '.'),                   # multiple runs of '.'
+    (re.compile(r'\b(alfa|apha)\b'), 'alpha'),      # misspelt alpha
+    (re.compile(r'\b(pre-alpha|prealpha)\b'),
+                'pre.alpha'),                       # standardise
+    (re.compile(r'\(beta\)$'), 'beta'),             # remove parentheses
+)
+
+_SUFFIX_REPLACEMENTS = (
+    (re.compile('^[:~._+-]+'), ''),                   # remove leading puncts
+    (re.compile('[,*")([\\]]'), ''),                  # remove unwanted chars
+    (re.compile('[~:+_ -]'), '.'),                    # replace illegal chars
+    (re.compile('[.]{2,}'), '.'),                   # multiple runs of '.'
+    (re.compile(r'\.$'), ''),                       # trailing '.'
+)
+
+_NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)')
+
+
+def _suggest_semantic_version(s):
+    """
+    Try to suggest a semantic form for a version for which
+    _suggest_normalized_version couldn't come up with anything.
+    """
+    result = s.strip().lower()
+    for pat, repl in _REPLACEMENTS:
+        result = pat.sub(repl, result)
+    if not result:
+        result = '0.0.0'
+
+    # Now look for numeric prefix, and separate it out from
+    # the rest.
+    #import pdb; pdb.set_trace()
+    m = _NUMERIC_PREFIX.match(result)
+    if not m:
+        prefix = '0.0.0'
+        suffix = result
+    else:
+        prefix = m.groups()[0].split('.')
+        prefix = [int(i) for i in prefix]
+        while len(prefix) < 3:
+            prefix.append(0)
+        if len(prefix) == 3:
+            suffix = result[m.end():]
+        else:
+            suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():]
+            prefix = prefix[:3]
+        prefix = '.'.join([str(i) for i in prefix])
+        suffix = suffix.strip()
+    if suffix:
+        #import pdb; pdb.set_trace()
+        # massage the suffix.
+        for pat, repl in _SUFFIX_REPLACEMENTS:
+            suffix = pat.sub(repl, suffix)
+
+    if not suffix:
+        result = prefix
+    else:
+        sep = '-' if 'dev' in suffix else '+'
+        result = prefix + sep + suffix
+    if not is_semver(result):
+        result = None
+    return result
+
+
+def _suggest_normalized_version(s):
+    """Suggest a normalized version close to the given version string.
+
+    If you have a version string that isn't rational (i.e. NormalizedVersion
+    doesn't like it) then you might be able to get an equivalent (or close)
+    rational version from this function.
+
+    This does a number of simple normalizations to the given string, based
+    on observation of versions currently in use on PyPI. Given a dump of
+    those version during PyCon 2009, 4287 of them:
+    - 2312 (53.93%) match NormalizedVersion without change
+      with the automatic suggestion
+    - 3474 (81.04%) match when using this suggestion method
+
+    @param s {str} An irrational version string.
+    @returns A rational version string, or None, if couldn't determine one.
+    """
+    try:
+        _normalized_key(s)
+        return s   # already rational
+    except UnsupportedVersionError:
+        pass
+
+    rs = s.lower()
+
+    # part of this could use maketrans
+    for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'),
+                       ('beta', 'b'), ('rc', 'c'), ('-final', ''),
+                       ('-pre', 'c'),
+                       ('-release', ''), ('.release', ''), ('-stable', ''),
+                       ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''),
+                       ('final', '')):
+        rs = rs.replace(orig, repl)
+
+    # if something ends with dev or pre, we add a 0
+    rs = re.sub(r"pre$", r"pre0", rs)
+    rs = re.sub(r"dev$", r"dev0", rs)
+
+    # if we have something like "b-2" or "a.2" at the end of the
+    # version, that is probably beta, alpha, etc
+    # let's remove the dash or dot
+    rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs)
+
+    # 1.0-dev-r371 -> 1.0.dev371
+    # 0.1-dev-r79 -> 0.1.dev79
+    rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs)
+
+    # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1
+    rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs)
+
+    # Clean: v0.3, v1.0
+    if rs.startswith('v'):
+        rs = rs[1:]
+
+    # Clean leading '0's on numbers.
+    #TODO: unintended side-effect on, e.g., "2003.05.09"
+    # PyPI stats: 77 (~2%) better
+    rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs)
+
+    # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers
+    # zero.
+    # PyPI stats: 245 (7.56%) better
+    rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs)
+
+    # the 'dev-rNNN' tag is a dev tag
+    rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs)
+
+    # clean the - when used as a pre delimiter
+    rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs)
+
+    # a terminal "dev" or "devel" can be changed into ".dev0"
+    rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs)
+
+    # a terminal "dev" can be changed into ".dev0"
+    rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs)
+
+    # a terminal "final" or "stable" can be removed
+    rs = re.sub(r"(final|stable)$", "", rs)
+
+    # The 'r' and the '-' tags are post release tags
+    #   0.4a1.r10       ->  0.4a1.post10
+    #   0.9.33-17222    ->  0.9.33.post17222
+    #   0.9.33-r17222   ->  0.9.33.post17222
+    rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs)
+
+    # Clean 'r' instead of 'dev' usage:
+    #   0.9.33+r17222   ->  0.9.33.dev17222
+    #   1.0dev123       ->  1.0.dev123
+    #   1.0.git123      ->  1.0.dev123
+    #   1.0.bzr123      ->  1.0.dev123
+    #   0.1a0dev.123    ->  0.1a0.dev123
+    # PyPI stats:  ~150 (~4%) better
+    rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs)
+
+    # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage:
+    #   0.2.pre1        ->  0.2c1
+    #   0.2-c1         ->  0.2c1
+    #   1.0preview123   ->  1.0c123
+    # PyPI stats: ~21 (0.62%) better
+    rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs)
+
+    # Tcl/Tk uses "px" for their post release markers
+    rs = re.sub(r"p(\d+)$", r".post\1", rs)
+
+    try:
+        _normalized_key(rs)
+    except UnsupportedVersionError:
+        rs = None
+    return rs
+
+#
+#   Legacy version processing (distribute-compatible)
+#
+
+_VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I)
+_VERSION_REPLACE = {
+    'pre': 'c',
+    'preview': 'c',
+    '-': 'final-',
+    'rc': 'c',
+    'dev': '@',
+    '': None,
+    '.': None,
+}
+
+
+def _legacy_key(s):
+    def get_parts(s):
+        result = []
+        for p in _VERSION_PART.split(s.lower()):
+            p = _VERSION_REPLACE.get(p, p)
+            if p:
+                if '0' <= p[:1] <= '9':
+                    p = p.zfill(8)
+                else:
+                    p = '*' + p
+                result.append(p)
+        result.append('*final')
+        return result
+
+    result = []
+    for p in get_parts(s):
+        if p.startswith('*'):
+            if p < '*final':
+                while result and result[-1] == '*final-':
+                    result.pop()
+            while result and result[-1] == '00000000':
+                result.pop()
+        result.append(p)
+    return tuple(result)
+
+
+class LegacyVersion(Version):
+    def parse(self, s):
+        return _legacy_key(s)
+
+    @property
+    def is_prerelease(self):
+        result = False
+        for x in self._parts:
+            if (isinstance(x, string_types) and x.startswith('*') and
+                x < '*final'):
+                result = True
+                break
+        return result
+
+
+class LegacyMatcher(Matcher):
+    version_class = LegacyVersion
+
+    _operators = dict(Matcher._operators)
+    _operators['~='] = '_match_compatible'
+
+    numeric_re = re.compile(r'^(\d+(\.\d+)*)')
+
+    def _match_compatible(self, version, constraint, prefix):
+        if version < constraint:
+            return False
+        m = self.numeric_re.match(str(constraint))
+        if not m:
+            logger.warning('Cannot compute compatible match for version %s '
+                           ' and constraint %s', version, constraint)
+            return True
+        s = m.groups()[0]
+        if '.' in s:
+            s = s.rsplit('.', 1)[0]
+        return _match_prefix(version, s)
+
+#
+#   Semantic versioning
+#
+
+_SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)'
+                        r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?'
+                        r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I)
+
+
+def is_semver(s):
+    return _SEMVER_RE.match(s)
+
+
+def _semantic_key(s):
+    def make_tuple(s, absent):
+        if s is None:
+            result = (absent,)
+        else:
+            parts = s[1:].split('.')
+            # We can't compare ints and strings on Python 3, so fudge it
+            # by zero-filling numeric values so simulate a numeric comparison
+            result = tuple([p.zfill(8) if p.isdigit() else p for p in parts])
+        return result
+
+    m = is_semver(s)
+    if not m:
+        raise UnsupportedVersionError(s)
+    groups = m.groups()
+    major, minor, patch = [int(i) for i in groups[:3]]
+    # choose the '|' and '*' so that versions sort correctly
+    pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*')
+    return (major, minor, patch), pre, build
+
+
+class SemanticVersion(Version):
+    def parse(self, s):
+        return _semantic_key(s)
+
+    @property
+    def is_prerelease(self):
+        return self._parts[1][0] != '|'
+
+
+class SemanticMatcher(Matcher):
+    version_class = SemanticVersion
+
+
+class VersionScheme(object):
+    def __init__(self, key, matcher, suggester=None):
+        self.key = key
+        self.matcher = matcher
+        self.suggester = suggester
+
+    def is_valid_version(self, s):
+        try:
+            self.matcher.version_class(s)
+            result = True
+        except UnsupportedVersionError:
+            result = False
+        return result
+
+    def is_valid_matcher(self, s):
+        try:
+            self.matcher(s)
+            result = True
+        except UnsupportedVersionError:
+            result = False
+        return result
+
+    def is_valid_constraint_list(self, s):
+        """
+        Used for processing some metadata fields
+        """
+        # See issue #140. Be tolerant of a single trailing comma.
+        if s.endswith(','):
+            s = s[:-1]
+        return self.is_valid_matcher('dummy_name (%s)' % s)
+
+    def suggest(self, s):
+        if self.suggester is None:
+            result = None
+        else:
+            result = self.suggester(s)
+        return result
+
+_SCHEMES = {
+    'normalized': VersionScheme(_normalized_key, NormalizedMatcher,
+                                _suggest_normalized_version),
+    'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s),
+    'semantic': VersionScheme(_semantic_key, SemanticMatcher,
+                              _suggest_semantic_version),
+}
+
+_SCHEMES['default'] = _SCHEMES['normalized']
+
+
+def get_scheme(name):
+    if name not in _SCHEMES:
+        raise ValueError('unknown scheme name: %r' % name)
+    return _SCHEMES[name]
diff --git a/venv/lib/python3.8/site-packages/distlib/w32.exe b/venv/lib/python3.8/site-packages/distlib/w32.exe
new file mode 100644
index 0000000..4ee2d3a
Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/w32.exe differ
diff --git a/venv/lib/python3.8/site-packages/distlib/w64-arm.exe b/venv/lib/python3.8/site-packages/distlib/w64-arm.exe
new file mode 100644
index 0000000..951d581
Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/w64-arm.exe differ
diff --git a/venv/lib/python3.8/site-packages/distlib/w64.exe b/venv/lib/python3.8/site-packages/distlib/w64.exe
new file mode 100644
index 0000000..5763076
Binary files /dev/null and b/venv/lib/python3.8/site-packages/distlib/w64.exe differ
diff --git a/venv/lib/python3.8/site-packages/distlib/wheel.py b/venv/lib/python3.8/site-packages/distlib/wheel.py
new file mode 100644
index 0000000..028c2d9
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/distlib/wheel.py
@@ -0,0 +1,1082 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2013-2020 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+from __future__ import unicode_literals
+
+import base64
+import codecs
+import datetime
+from email import message_from_file
+import hashlib
+import json
+import logging
+import os
+import posixpath
+import re
+import shutil
+import sys
+import tempfile
+import zipfile
+
+from . import __version__, DistlibException
+from .compat import sysconfig, ZipFile, fsdecode, text_type, filter
+from .database import InstalledDistribution
+from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME,
+                       LEGACY_METADATA_FILENAME)
+from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache,
+                   cached_property, get_cache_base, read_exports, tempdir,
+                   get_platform)
+from .version import NormalizedVersion, UnsupportedVersionError
+
+logger = logging.getLogger(__name__)
+
+cache = None    # created when needed
+
+if hasattr(sys, 'pypy_version_info'):  # pragma: no cover
+    IMP_PREFIX = 'pp'
+elif sys.platform.startswith('java'):  # pragma: no cover
+    IMP_PREFIX = 'jy'
+elif sys.platform == 'cli':  # pragma: no cover
+    IMP_PREFIX = 'ip'
+else:
+    IMP_PREFIX = 'cp'
+
+VER_SUFFIX = sysconfig.get_config_var('py_version_nodot')
+if not VER_SUFFIX:   # pragma: no cover
+    VER_SUFFIX = '%s%s' % sys.version_info[:2]
+PYVER = 'py' + VER_SUFFIX
+IMPVER = IMP_PREFIX + VER_SUFFIX
+
+ARCH = get_platform().replace('-', '_').replace('.', '_')
+
+ABI = sysconfig.get_config_var('SOABI')
+if ABI and ABI.startswith('cpython-'):
+    ABI = ABI.replace('cpython-', 'cp').split('-')[0]
+else:
+    def _derive_abi():
+        parts = ['cp', VER_SUFFIX]
+        if sysconfig.get_config_var('Py_DEBUG'):
+            parts.append('d')
+        if IMP_PREFIX == 'cp':
+            vi = sys.version_info[:2]
+            if vi < (3, 8):
+                wpm = sysconfig.get_config_var('WITH_PYMALLOC')
+                if wpm is None:
+                    wpm = True
+                if wpm:
+                    parts.append('m')
+                if vi < (3, 3):
+                    us = sysconfig.get_config_var('Py_UNICODE_SIZE')
+                    if us == 4 or (us is None and sys.maxunicode == 0x10FFFF):
+                        parts.append('u')
+        return ''.join(parts)
+    ABI = _derive_abi()
+    del _derive_abi
+
+FILENAME_RE = re.compile(r'''
+(?P[^-]+)
+-(?P\d+[^-]*)
+(-(?P\d+[^-]*))?
+-(?P\w+\d+(\.\w+\d+)*)
+-(?P\w+)
+-(?P\w+(\.\w+)*)
+\.whl$
+''', re.IGNORECASE | re.VERBOSE)
+
+NAME_VERSION_RE = re.compile(r'''
+(?P[^-]+)
+-(?P\d+[^-]*)
+(-(?P\d+[^-]*))?$
+''', re.IGNORECASE | re.VERBOSE)
+
+SHEBANG_RE = re.compile(br'\s*#![^\r\n]*')
+SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$')
+SHEBANG_PYTHON = b'#!python'
+SHEBANG_PYTHONW = b'#!pythonw'
+
+if os.sep == '/':
+    to_posix = lambda o: o
+else:
+    to_posix = lambda o: o.replace(os.sep, '/')
+
+if sys.version_info[0] < 3:
+    import imp
+else:
+    imp = None
+    import importlib.machinery
+    import importlib.util
+
+def _get_suffixes():
+    if imp:
+        return [s[0] for s in imp.get_suffixes()]
+    else:
+        return importlib.machinery.EXTENSION_SUFFIXES
+
+def _load_dynamic(name, path):
+    # https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly
+    if imp:
+        return imp.load_dynamic(name, path)
+    else:
+        spec = importlib.util.spec_from_file_location(name, path)
+        module = importlib.util.module_from_spec(spec)
+        sys.modules[name] = module
+        spec.loader.exec_module(module)
+        return module
+
+class Mounter(object):
+    def __init__(self):
+        self.impure_wheels = {}
+        self.libs = {}
+
+    def add(self, pathname, extensions):
+        self.impure_wheels[pathname] = extensions
+        self.libs.update(extensions)
+
+    def remove(self, pathname):
+        extensions = self.impure_wheels.pop(pathname)
+        for k, v in extensions:
+            if k in self.libs:
+                del self.libs[k]
+
+    def find_module(self, fullname, path=None):
+        if fullname in self.libs:
+            result = self
+        else:
+            result = None
+        return result
+
+    def load_module(self, fullname):
+        if fullname in sys.modules:
+            result = sys.modules[fullname]
+        else:
+            if fullname not in self.libs:
+                raise ImportError('unable to find extension for %s' % fullname)
+            result = _load_dynamic(fullname, self.libs[fullname])
+            result.__loader__ = self
+            parts = fullname.rsplit('.', 1)
+            if len(parts) > 1:
+                result.__package__ = parts[0]
+        return result
+
+_hook = Mounter()
+
+
+class Wheel(object):
+    """
+    Class to build and install from Wheel files (PEP 427).
+    """
+
+    wheel_version = (1, 1)
+    hash_kind = 'sha256'
+
+    def __init__(self, filename=None, sign=False, verify=False):
+        """
+        Initialise an instance using a (valid) filename.
+        """
+        self.sign = sign
+        self.should_verify = verify
+        self.buildver = ''
+        self.pyver = [PYVER]
+        self.abi = ['none']
+        self.arch = ['any']
+        self.dirname = os.getcwd()
+        if filename is None:
+            self.name = 'dummy'
+            self.version = '0.1'
+            self._filename = self.filename
+        else:
+            m = NAME_VERSION_RE.match(filename)
+            if m:
+                info = m.groupdict('')
+                self.name = info['nm']
+                # Reinstate the local version separator
+                self.version = info['vn'].replace('_', '-')
+                self.buildver = info['bn']
+                self._filename = self.filename
+            else:
+                dirname, filename = os.path.split(filename)
+                m = FILENAME_RE.match(filename)
+                if not m:
+                    raise DistlibException('Invalid name or '
+                                           'filename: %r' % filename)
+                if dirname:
+                    self.dirname = os.path.abspath(dirname)
+                self._filename = filename
+                info = m.groupdict('')
+                self.name = info['nm']
+                self.version = info['vn']
+                self.buildver = info['bn']
+                self.pyver = info['py'].split('.')
+                self.abi = info['bi'].split('.')
+                self.arch = info['ar'].split('.')
+
+    @property
+    def filename(self):
+        """
+        Build and return a filename from the various components.
+        """
+        if self.buildver:
+            buildver = '-' + self.buildver
+        else:
+            buildver = ''
+        pyver = '.'.join(self.pyver)
+        abi = '.'.join(self.abi)
+        arch = '.'.join(self.arch)
+        # replace - with _ as a local version separator
+        version = self.version.replace('-', '_')
+        return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver,
+                                         pyver, abi, arch)
+
+    @property
+    def exists(self):
+        path = os.path.join(self.dirname, self.filename)
+        return os.path.isfile(path)
+
+    @property
+    def tags(self):
+        for pyver in self.pyver:
+            for abi in self.abi:
+                for arch in self.arch:
+                    yield pyver, abi, arch
+
+    @cached_property
+    def metadata(self):
+        pathname = os.path.join(self.dirname, self.filename)
+        name_ver = '%s-%s' % (self.name, self.version)
+        info_dir = '%s.dist-info' % name_ver
+        wrapper = codecs.getreader('utf-8')
+        with ZipFile(pathname, 'r') as zf:
+            wheel_metadata = self.get_wheel_metadata(zf)
+            wv = wheel_metadata['Wheel-Version'].split('.', 1)
+            file_version = tuple([int(i) for i in wv])
+            # if file_version < (1, 1):
+                # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME,
+                       # LEGACY_METADATA_FILENAME]
+            # else:
+                # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME]
+            fns = [WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME]
+            result = None
+            for fn in fns:
+                try:
+                    metadata_filename = posixpath.join(info_dir, fn)
+                    with zf.open(metadata_filename) as bf:
+                        wf = wrapper(bf)
+                        result = Metadata(fileobj=wf)
+                        if result:
+                            break
+                except KeyError:
+                    pass
+            if not result:
+                raise ValueError('Invalid wheel, because metadata is '
+                                 'missing: looked in %s' % ', '.join(fns))
+        return result
+
+    def get_wheel_metadata(self, zf):
+        name_ver = '%s-%s' % (self.name, self.version)
+        info_dir = '%s.dist-info' % name_ver
+        metadata_filename = posixpath.join(info_dir, 'WHEEL')
+        with zf.open(metadata_filename) as bf:
+            wf = codecs.getreader('utf-8')(bf)
+            message = message_from_file(wf)
+        return dict(message)
+
+    @cached_property
+    def info(self):
+        pathname = os.path.join(self.dirname, self.filename)
+        with ZipFile(pathname, 'r') as zf:
+            result = self.get_wheel_metadata(zf)
+        return result
+
+    def process_shebang(self, data):
+        m = SHEBANG_RE.match(data)
+        if m:
+            end = m.end()
+            shebang, data_after_shebang = data[:end], data[end:]
+            # Preserve any arguments after the interpreter
+            if b'pythonw' in shebang.lower():
+                shebang_python = SHEBANG_PYTHONW
+            else:
+                shebang_python = SHEBANG_PYTHON
+            m = SHEBANG_DETAIL_RE.match(shebang)
+            if m:
+                args = b' ' + m.groups()[-1]
+            else:
+                args = b''
+            shebang = shebang_python + args
+            data = shebang + data_after_shebang
+        else:
+            cr = data.find(b'\r')
+            lf = data.find(b'\n')
+            if cr < 0 or cr > lf:
+                term = b'\n'
+            else:
+                if data[cr:cr + 2] == b'\r\n':
+                    term = b'\r\n'
+                else:
+                    term = b'\r'
+            data = SHEBANG_PYTHON + term + data
+        return data
+
+    def get_hash(self, data, hash_kind=None):
+        if hash_kind is None:
+            hash_kind = self.hash_kind
+        try:
+            hasher = getattr(hashlib, hash_kind)
+        except AttributeError:
+            raise DistlibException('Unsupported hash algorithm: %r' % hash_kind)
+        result = hasher(data).digest()
+        result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii')
+        return hash_kind, result
+
+    def write_record(self, records, record_path, archive_record_path):
+        records = list(records) # make a copy, as mutated
+        records.append((archive_record_path, '', ''))
+        with CSVWriter(record_path) as writer:
+            for row in records:
+                writer.writerow(row)
+
+    def write_records(self, info, libdir, archive_paths):
+        records = []
+        distinfo, info_dir = info
+        hasher = getattr(hashlib, self.hash_kind)
+        for ap, p in archive_paths:
+            with open(p, 'rb') as f:
+                data = f.read()
+            digest = '%s=%s' % self.get_hash(data)
+            size = os.path.getsize(p)
+            records.append((ap, digest, size))
+
+        p = os.path.join(distinfo, 'RECORD')
+        ap = to_posix(os.path.join(info_dir, 'RECORD'))
+        self.write_record(records, p, ap)
+        archive_paths.append((ap, p))
+
+    def build_zip(self, pathname, archive_paths):
+        with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf:
+            for ap, p in archive_paths:
+                logger.debug('Wrote %s to %s in wheel', p, ap)
+                zf.write(p, ap)
+
+    def build(self, paths, tags=None, wheel_version=None):
+        """
+        Build a wheel from files in specified paths, and use any specified tags
+        when determining the name of the wheel.
+        """
+        if tags is None:
+            tags = {}
+
+        libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0]
+        if libkey == 'platlib':
+            is_pure = 'false'
+            default_pyver = [IMPVER]
+            default_abi = [ABI]
+            default_arch = [ARCH]
+        else:
+            is_pure = 'true'
+            default_pyver = [PYVER]
+            default_abi = ['none']
+            default_arch = ['any']
+
+        self.pyver = tags.get('pyver', default_pyver)
+        self.abi = tags.get('abi', default_abi)
+        self.arch = tags.get('arch', default_arch)
+
+        libdir = paths[libkey]
+
+        name_ver = '%s-%s' % (self.name, self.version)
+        data_dir = '%s.data' % name_ver
+        info_dir = '%s.dist-info' % name_ver
+
+        archive_paths = []
+
+        # First, stuff which is not in site-packages
+        for key in ('data', 'headers', 'scripts'):
+            if key not in paths:
+                continue
+            path = paths[key]
+            if os.path.isdir(path):
+                for root, dirs, files in os.walk(path):
+                    for fn in files:
+                        p = fsdecode(os.path.join(root, fn))
+                        rp = os.path.relpath(p, path)
+                        ap = to_posix(os.path.join(data_dir, key, rp))
+                        archive_paths.append((ap, p))
+                        if key == 'scripts' and not p.endswith('.exe'):
+                            with open(p, 'rb') as f:
+                                data = f.read()
+                            data = self.process_shebang(data)
+                            with open(p, 'wb') as f:
+                                f.write(data)
+
+        # Now, stuff which is in site-packages, other than the
+        # distinfo stuff.
+        path = libdir
+        distinfo = None
+        for root, dirs, files in os.walk(path):
+            if root == path:
+                # At the top level only, save distinfo for later
+                # and skip it for now
+                for i, dn in enumerate(dirs):
+                    dn = fsdecode(dn)
+                    if dn.endswith('.dist-info'):
+                        distinfo = os.path.join(root, dn)
+                        del dirs[i]
+                        break
+                assert distinfo, '.dist-info directory expected, not found'
+
+            for fn in files:
+                # comment out next suite to leave .pyc files in
+                if fsdecode(fn).endswith(('.pyc', '.pyo')):
+                    continue
+                p = os.path.join(root, fn)
+                rp = to_posix(os.path.relpath(p, path))
+                archive_paths.append((rp, p))
+
+        # Now distinfo. Assumed to be flat, i.e. os.listdir is enough.
+        files = os.listdir(distinfo)
+        for fn in files:
+            if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'):
+                p = fsdecode(os.path.join(distinfo, fn))
+                ap = to_posix(os.path.join(info_dir, fn))
+                archive_paths.append((ap, p))
+
+        wheel_metadata = [
+            'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version),
+            'Generator: distlib %s' % __version__,
+            'Root-Is-Purelib: %s' % is_pure,
+        ]
+        for pyver, abi, arch in self.tags:
+            wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch))
+        p = os.path.join(distinfo, 'WHEEL')
+        with open(p, 'w') as f:
+            f.write('\n'.join(wheel_metadata))
+        ap = to_posix(os.path.join(info_dir, 'WHEEL'))
+        archive_paths.append((ap, p))
+
+        # sort the entries by archive path. Not needed by any spec, but it
+        # keeps the archive listing and RECORD tidier than they would otherwise
+        # be. Use the number of path segments to keep directory entries together,
+        # and keep the dist-info stuff at the end.
+        def sorter(t):
+            ap = t[0]
+            n = ap.count('/')
+            if '.dist-info' in ap:
+                n += 10000
+            return (n, ap)
+        archive_paths = sorted(archive_paths, key=sorter)
+
+        # Now, at last, RECORD.
+        # Paths in here are archive paths - nothing else makes sense.
+        self.write_records((distinfo, info_dir), libdir, archive_paths)
+        # Now, ready to build the zip file
+        pathname = os.path.join(self.dirname, self.filename)
+        self.build_zip(pathname, archive_paths)
+        return pathname
+
+    def skip_entry(self, arcname):
+        """
+        Determine whether an archive entry should be skipped when verifying
+        or installing.
+        """
+        # The signature file won't be in RECORD,
+        # and we  don't currently don't do anything with it
+        # We also skip directories, as they won't be in RECORD
+        # either. See:
+        #
+        # https://github.com/pypa/wheel/issues/294
+        # https://github.com/pypa/wheel/issues/287
+        # https://github.com/pypa/wheel/pull/289
+        #
+        return arcname.endswith(('/', '/RECORD.jws'))
+
+    def install(self, paths, maker, **kwargs):
+        """
+        Install a wheel to the specified paths. If kwarg ``warner`` is
+        specified, it should be a callable, which will be called with two
+        tuples indicating the wheel version of this software and the wheel
+        version in the file, if there is a discrepancy in the versions.
+        This can be used to issue any warnings to raise any exceptions.
+        If kwarg ``lib_only`` is True, only the purelib/platlib files are
+        installed, and the headers, scripts, data and dist-info metadata are
+        not written. If kwarg ``bytecode_hashed_invalidation`` is True, written
+        bytecode will try to use file-hash based invalidation (PEP-552) on
+        supported interpreter versions (CPython 2.7+).
+
+        The return value is a :class:`InstalledDistribution` instance unless
+        ``options.lib_only`` is True, in which case the return value is ``None``.
+        """
+
+        dry_run = maker.dry_run
+        warner = kwargs.get('warner')
+        lib_only = kwargs.get('lib_only', False)
+        bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False)
+
+        pathname = os.path.join(self.dirname, self.filename)
+        name_ver = '%s-%s' % (self.name, self.version)
+        data_dir = '%s.data' % name_ver
+        info_dir = '%s.dist-info' % name_ver
+
+        metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME)
+        wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
+        record_name = posixpath.join(info_dir, 'RECORD')
+
+        wrapper = codecs.getreader('utf-8')
+
+        with ZipFile(pathname, 'r') as zf:
+            with zf.open(wheel_metadata_name) as bwf:
+                wf = wrapper(bwf)
+                message = message_from_file(wf)
+            wv = message['Wheel-Version'].split('.', 1)
+            file_version = tuple([int(i) for i in wv])
+            if (file_version != self.wheel_version) and warner:
+                warner(self.wheel_version, file_version)
+
+            if message['Root-Is-Purelib'] == 'true':
+                libdir = paths['purelib']
+            else:
+                libdir = paths['platlib']
+
+            records = {}
+            with zf.open(record_name) as bf:
+                with CSVReader(stream=bf) as reader:
+                    for row in reader:
+                        p = row[0]
+                        records[p] = row
+
+            data_pfx = posixpath.join(data_dir, '')
+            info_pfx = posixpath.join(info_dir, '')
+            script_pfx = posixpath.join(data_dir, 'scripts', '')
+
+            # make a new instance rather than a copy of maker's,
+            # as we mutate it
+            fileop = FileOperator(dry_run=dry_run)
+            fileop.record = True    # so we can rollback if needed
+
+            bc = not sys.dont_write_bytecode    # Double negatives. Lovely!
+
+            outfiles = []   # for RECORD writing
+
+            # for script copying/shebang processing
+            workdir = tempfile.mkdtemp()
+            # set target dir later
+            # we default add_launchers to False, as the
+            # Python Launcher should be used instead
+            maker.source_dir = workdir
+            maker.target_dir = None
+            try:
+                for zinfo in zf.infolist():
+                    arcname = zinfo.filename
+                    if isinstance(arcname, text_type):
+                        u_arcname = arcname
+                    else:
+                        u_arcname = arcname.decode('utf-8')
+                    if self.skip_entry(u_arcname):
+                        continue
+                    row = records[u_arcname]
+                    if row[2] and str(zinfo.file_size) != row[2]:
+                        raise DistlibException('size mismatch for '
+                                               '%s' % u_arcname)
+                    if row[1]:
+                        kind, value = row[1].split('=', 1)
+                        with zf.open(arcname) as bf:
+                            data = bf.read()
+                        _, digest = self.get_hash(data, kind)
+                        if digest != value:
+                            raise DistlibException('digest mismatch for '
+                                                   '%s' % arcname)
+
+                    if lib_only and u_arcname.startswith((info_pfx, data_pfx)):
+                        logger.debug('lib_only: skipping %s', u_arcname)
+                        continue
+                    is_script = (u_arcname.startswith(script_pfx)
+                                 and not u_arcname.endswith('.exe'))
+
+                    if u_arcname.startswith(data_pfx):
+                        _, where, rp = u_arcname.split('/', 2)
+                        outfile = os.path.join(paths[where], convert_path(rp))
+                    else:
+                        # meant for site-packages.
+                        if u_arcname in (wheel_metadata_name, record_name):
+                            continue
+                        outfile = os.path.join(libdir, convert_path(u_arcname))
+                    if not is_script:
+                        with zf.open(arcname) as bf:
+                            fileop.copy_stream(bf, outfile)
+                        # Issue #147: permission bits aren't preserved. Using
+                        # zf.extract(zinfo, libdir) should have worked, but didn't,
+                        # see https://www.thetopsites.net/article/53834422.shtml
+                        # So ... manually preserve permission bits as given in zinfo
+                        if os.name == 'posix':
+                            # just set the normal permission bits
+                            os.chmod(outfile, (zinfo.external_attr >> 16) & 0x1FF)
+                        outfiles.append(outfile)
+                        # Double check the digest of the written file
+                        if not dry_run and row[1]:
+                            with open(outfile, 'rb') as bf:
+                                data = bf.read()
+                                _, newdigest = self.get_hash(data, kind)
+                                if newdigest != digest:
+                                    raise DistlibException('digest mismatch '
+                                                           'on write for '
+                                                           '%s' % outfile)
+                        if bc and outfile.endswith('.py'):
+                            try:
+                                pyc = fileop.byte_compile(outfile,
+                                                          hashed_invalidation=bc_hashed_invalidation)
+                                outfiles.append(pyc)
+                            except Exception:
+                                # Don't give up if byte-compilation fails,
+                                # but log it and perhaps warn the user
+                                logger.warning('Byte-compilation failed',
+                                               exc_info=True)
+                    else:
+                        fn = os.path.basename(convert_path(arcname))
+                        workname = os.path.join(workdir, fn)
+                        with zf.open(arcname) as bf:
+                            fileop.copy_stream(bf, workname)
+
+                        dn, fn = os.path.split(outfile)
+                        maker.target_dir = dn
+                        filenames = maker.make(fn)
+                        fileop.set_executable_mode(filenames)
+                        outfiles.extend(filenames)
+
+                if lib_only:
+                    logger.debug('lib_only: returning None')
+                    dist = None
+                else:
+                    # Generate scripts
+
+                    # Try to get pydist.json so we can see if there are
+                    # any commands to generate. If this fails (e.g. because
+                    # of a legacy wheel), log a warning but don't give up.
+                    commands = None
+                    file_version = self.info['Wheel-Version']
+                    if file_version == '1.0':
+                        # Use legacy info
+                        ep = posixpath.join(info_dir, 'entry_points.txt')
+                        try:
+                            with zf.open(ep) as bwf:
+                                epdata = read_exports(bwf)
+                            commands = {}
+                            for key in ('console', 'gui'):
+                                k = '%s_scripts' % key
+                                if k in epdata:
+                                    commands['wrap_%s' % key] = d = {}
+                                    for v in epdata[k].values():
+                                        s = '%s:%s' % (v.prefix, v.suffix)
+                                        if v.flags:
+                                            s += ' [%s]' % ','.join(v.flags)
+                                        d[v.name] = s
+                        except Exception:
+                            logger.warning('Unable to read legacy script '
+                                           'metadata, so cannot generate '
+                                           'scripts')
+                    else:
+                        try:
+                            with zf.open(metadata_name) as bwf:
+                                wf = wrapper(bwf)
+                                commands = json.load(wf).get('extensions')
+                                if commands:
+                                    commands = commands.get('python.commands')
+                        except Exception:
+                            logger.warning('Unable to read JSON metadata, so '
+                                           'cannot generate scripts')
+                    if commands:
+                        console_scripts = commands.get('wrap_console', {})
+                        gui_scripts = commands.get('wrap_gui', {})
+                        if console_scripts or gui_scripts:
+                            script_dir = paths.get('scripts', '')
+                            if not os.path.isdir(script_dir):
+                                raise ValueError('Valid script path not '
+                                                 'specified')
+                            maker.target_dir = script_dir
+                            for k, v in console_scripts.items():
+                                script = '%s = %s' % (k, v)
+                                filenames = maker.make(script)
+                                fileop.set_executable_mode(filenames)
+
+                            if gui_scripts:
+                                options = {'gui': True }
+                                for k, v in gui_scripts.items():
+                                    script = '%s = %s' % (k, v)
+                                    filenames = maker.make(script, options)
+                                    fileop.set_executable_mode(filenames)
+
+                    p = os.path.join(libdir, info_dir)
+                    dist = InstalledDistribution(p)
+
+                    # Write SHARED
+                    paths = dict(paths)     # don't change passed in dict
+                    del paths['purelib']
+                    del paths['platlib']
+                    paths['lib'] = libdir
+                    p = dist.write_shared_locations(paths, dry_run)
+                    if p:
+                        outfiles.append(p)
+
+                    # Write RECORD
+                    dist.write_installed_files(outfiles, paths['prefix'],
+                                               dry_run)
+                return dist
+            except Exception:  # pragma: no cover
+                logger.exception('installation failed.')
+                fileop.rollback()
+                raise
+            finally:
+                shutil.rmtree(workdir)
+
+    def _get_dylib_cache(self):
+        global cache
+        if cache is None:
+            # Use native string to avoid issues on 2.x: see Python #20140.
+            base = os.path.join(get_cache_base(), str('dylib-cache'),
+                                '%s.%s' % sys.version_info[:2])
+            cache = Cache(base)
+        return cache
+
+    def _get_extensions(self):
+        pathname = os.path.join(self.dirname, self.filename)
+        name_ver = '%s-%s' % (self.name, self.version)
+        info_dir = '%s.dist-info' % name_ver
+        arcname = posixpath.join(info_dir, 'EXTENSIONS')
+        wrapper = codecs.getreader('utf-8')
+        result = []
+        with ZipFile(pathname, 'r') as zf:
+            try:
+                with zf.open(arcname) as bf:
+                    wf = wrapper(bf)
+                    extensions = json.load(wf)
+                    cache = self._get_dylib_cache()
+                    prefix = cache.prefix_to_dir(pathname)
+                    cache_base = os.path.join(cache.base, prefix)
+                    if not os.path.isdir(cache_base):
+                        os.makedirs(cache_base)
+                    for name, relpath in extensions.items():
+                        dest = os.path.join(cache_base, convert_path(relpath))
+                        if not os.path.exists(dest):
+                            extract = True
+                        else:
+                            file_time = os.stat(dest).st_mtime
+                            file_time = datetime.datetime.fromtimestamp(file_time)
+                            info = zf.getinfo(relpath)
+                            wheel_time = datetime.datetime(*info.date_time)
+                            extract = wheel_time > file_time
+                        if extract:
+                            zf.extract(relpath, cache_base)
+                        result.append((name, dest))
+            except KeyError:
+                pass
+        return result
+
+    def is_compatible(self):
+        """
+        Determine if a wheel is compatible with the running system.
+        """
+        return is_compatible(self)
+
+    def is_mountable(self):
+        """
+        Determine if a wheel is asserted as mountable by its metadata.
+        """
+        return True # for now - metadata details TBD
+
+    def mount(self, append=False):
+        pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
+        if not self.is_compatible():
+            msg = 'Wheel %s not compatible with this Python.' % pathname
+            raise DistlibException(msg)
+        if not self.is_mountable():
+            msg = 'Wheel %s is marked as not mountable.' % pathname
+            raise DistlibException(msg)
+        if pathname in sys.path:
+            logger.debug('%s already in path', pathname)
+        else:
+            if append:
+                sys.path.append(pathname)
+            else:
+                sys.path.insert(0, pathname)
+            extensions = self._get_extensions()
+            if extensions:
+                if _hook not in sys.meta_path:
+                    sys.meta_path.append(_hook)
+                _hook.add(pathname, extensions)
+
+    def unmount(self):
+        pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
+        if pathname not in sys.path:
+            logger.debug('%s not in path', pathname)
+        else:
+            sys.path.remove(pathname)
+            if pathname in _hook.impure_wheels:
+                _hook.remove(pathname)
+            if not _hook.impure_wheels:
+                if _hook in sys.meta_path:
+                    sys.meta_path.remove(_hook)
+
+    def verify(self):
+        pathname = os.path.join(self.dirname, self.filename)
+        name_ver = '%s-%s' % (self.name, self.version)
+        data_dir = '%s.data' % name_ver
+        info_dir = '%s.dist-info' % name_ver
+
+        metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME)
+        wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
+        record_name = posixpath.join(info_dir, 'RECORD')
+
+        wrapper = codecs.getreader('utf-8')
+
+        with ZipFile(pathname, 'r') as zf:
+            with zf.open(wheel_metadata_name) as bwf:
+                wf = wrapper(bwf)
+                message = message_from_file(wf)
+            wv = message['Wheel-Version'].split('.', 1)
+            file_version = tuple([int(i) for i in wv])
+            # TODO version verification
+
+            records = {}
+            with zf.open(record_name) as bf:
+                with CSVReader(stream=bf) as reader:
+                    for row in reader:
+                        p = row[0]
+                        records[p] = row
+
+            for zinfo in zf.infolist():
+                arcname = zinfo.filename
+                if isinstance(arcname, text_type):
+                    u_arcname = arcname
+                else:
+                    u_arcname = arcname.decode('utf-8')
+                # See issue #115: some wheels have .. in their entries, but
+                # in the filename ... e.g. __main__..py ! So the check is
+                # updated to look for .. in the directory portions
+                p = u_arcname.split('/')
+                if '..' in p:
+                    raise DistlibException('invalid entry in '
+                                           'wheel: %r' % u_arcname)
+
+                if self.skip_entry(u_arcname):
+                    continue
+                row = records[u_arcname]
+                if row[2] and str(zinfo.file_size) != row[2]:
+                    raise DistlibException('size mismatch for '
+                                           '%s' % u_arcname)
+                if row[1]:
+                    kind, value = row[1].split('=', 1)
+                    with zf.open(arcname) as bf:
+                        data = bf.read()
+                    _, digest = self.get_hash(data, kind)
+                    if digest != value:
+                        raise DistlibException('digest mismatch for '
+                                               '%s' % arcname)
+
+    def update(self, modifier, dest_dir=None, **kwargs):
+        """
+        Update the contents of a wheel in a generic way. The modifier should
+        be a callable which expects a dictionary argument: its keys are
+        archive-entry paths, and its values are absolute filesystem paths
+        where the contents the corresponding archive entries can be found. The
+        modifier is free to change the contents of the files pointed to, add
+        new entries and remove entries, before returning. This method will
+        extract the entire contents of the wheel to a temporary location, call
+        the modifier, and then use the passed (and possibly updated)
+        dictionary to write a new wheel. If ``dest_dir`` is specified, the new
+        wheel is written there -- otherwise, the original wheel is overwritten.
+
+        The modifier should return True if it updated the wheel, else False.
+        This method returns the same value the modifier returns.
+        """
+
+        def get_version(path_map, info_dir):
+            version = path = None
+            key = '%s/%s' % (info_dir, LEGACY_METADATA_FILENAME)
+            if key not in path_map:
+                key = '%s/PKG-INFO' % info_dir
+            if key in path_map:
+                path = path_map[key]
+                version = Metadata(path=path).version
+            return version, path
+
+        def update_version(version, path):
+            updated = None
+            try:
+                v = NormalizedVersion(version)
+                i = version.find('-')
+                if i < 0:
+                    updated = '%s+1' % version
+                else:
+                    parts = [int(s) for s in version[i + 1:].split('.')]
+                    parts[-1] += 1
+                    updated = '%s+%s' % (version[:i],
+                                         '.'.join(str(i) for i in parts))
+            except UnsupportedVersionError:
+                logger.debug('Cannot update non-compliant (PEP-440) '
+                             'version %r', version)
+            if updated:
+                md = Metadata(path=path)
+                md.version = updated
+                legacy = path.endswith(LEGACY_METADATA_FILENAME)
+                md.write(path=path, legacy=legacy)
+                logger.debug('Version updated from %r to %r', version,
+                             updated)
+
+        pathname = os.path.join(self.dirname, self.filename)
+        name_ver = '%s-%s' % (self.name, self.version)
+        info_dir = '%s.dist-info' % name_ver
+        record_name = posixpath.join(info_dir, 'RECORD')
+        with tempdir() as workdir:
+            with ZipFile(pathname, 'r') as zf:
+                path_map = {}
+                for zinfo in zf.infolist():
+                    arcname = zinfo.filename
+                    if isinstance(arcname, text_type):
+                        u_arcname = arcname
+                    else:
+                        u_arcname = arcname.decode('utf-8')
+                    if u_arcname == record_name:
+                        continue
+                    if '..' in u_arcname:
+                        raise DistlibException('invalid entry in '
+                                               'wheel: %r' % u_arcname)
+                    zf.extract(zinfo, workdir)
+                    path = os.path.join(workdir, convert_path(u_arcname))
+                    path_map[u_arcname] = path
+
+            # Remember the version.
+            original_version, _ = get_version(path_map, info_dir)
+            # Files extracted. Call the modifier.
+            modified = modifier(path_map, **kwargs)
+            if modified:
+                # Something changed - need to build a new wheel.
+                current_version, path = get_version(path_map, info_dir)
+                if current_version and (current_version == original_version):
+                    # Add or update local version to signify changes.
+                    update_version(current_version, path)
+                # Decide where the new wheel goes.
+                if dest_dir is None:
+                    fd, newpath = tempfile.mkstemp(suffix='.whl',
+                                                   prefix='wheel-update-',
+                                                   dir=workdir)
+                    os.close(fd)
+                else:
+                    if not os.path.isdir(dest_dir):
+                        raise DistlibException('Not a directory: %r' % dest_dir)
+                    newpath = os.path.join(dest_dir, self.filename)
+                archive_paths = list(path_map.items())
+                distinfo = os.path.join(workdir, info_dir)
+                info = distinfo, info_dir
+                self.write_records(info, workdir, archive_paths)
+                self.build_zip(newpath, archive_paths)
+                if dest_dir is None:
+                    shutil.copyfile(newpath, pathname)
+        return modified
+
+def _get_glibc_version():
+    import platform
+    ver = platform.libc_ver()
+    result = []
+    if ver[0] == 'glibc':
+        for s in ver[1].split('.'):
+            result.append(int(s) if s.isdigit() else 0)
+        result = tuple(result)
+    return result
+
+def compatible_tags():
+    """
+    Return (pyver, abi, arch) tuples compatible with this Python.
+    """
+    versions = [VER_SUFFIX]
+    major = VER_SUFFIX[0]
+    for minor in range(sys.version_info[1] - 1, - 1, -1):
+        versions.append(''.join([major, str(minor)]))
+
+    abis = []
+    for suffix in _get_suffixes():
+        if suffix.startswith('.abi'):
+            abis.append(suffix.split('.', 2)[1])
+    abis.sort()
+    if ABI != 'none':
+        abis.insert(0, ABI)
+    abis.append('none')
+    result = []
+
+    arches = [ARCH]
+    if sys.platform == 'darwin':
+        m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH)
+        if m:
+            name, major, minor, arch = m.groups()
+            minor = int(minor)
+            matches = [arch]
+            if arch in ('i386', 'ppc'):
+                matches.append('fat')
+            if arch in ('i386', 'ppc', 'x86_64'):
+                matches.append('fat3')
+            if arch in ('ppc64', 'x86_64'):
+                matches.append('fat64')
+            if arch in ('i386', 'x86_64'):
+                matches.append('intel')
+            if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'):
+                matches.append('universal')
+            while minor >= 0:
+                for match in matches:
+                    s = '%s_%s_%s_%s' % (name, major, minor, match)
+                    if s != ARCH:   # already there
+                        arches.append(s)
+                minor -= 1
+
+    # Most specific - our Python version, ABI and arch
+    for abi in abis:
+        for arch in arches:
+            result.append((''.join((IMP_PREFIX, versions[0])), abi, arch))
+            # manylinux
+            if abi != 'none' and sys.platform.startswith('linux'):
+                arch = arch.replace('linux_', '')
+                parts = _get_glibc_version()
+                if len(parts) == 2:
+                    if parts >= (2, 5):
+                        result.append((''.join((IMP_PREFIX, versions[0])), abi,
+                                       'manylinux1_%s' % arch))
+                    if parts >= (2, 12):
+                        result.append((''.join((IMP_PREFIX, versions[0])), abi,
+                                       'manylinux2010_%s' % arch))
+                    if parts >= (2, 17):
+                        result.append((''.join((IMP_PREFIX, versions[0])), abi,
+                                       'manylinux2014_%s' % arch))
+                    result.append((''.join((IMP_PREFIX, versions[0])), abi,
+                                   'manylinux_%s_%s_%s' % (parts[0], parts[1],
+                                                           arch)))
+
+    # where no ABI / arch dependency, but IMP_PREFIX dependency
+    for i, version in enumerate(versions):
+        result.append((''.join((IMP_PREFIX, version)), 'none', 'any'))
+        if i == 0:
+            result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any'))
+
+    # no IMP_PREFIX, ABI or arch dependency
+    for i, version in enumerate(versions):
+        result.append((''.join(('py', version)), 'none', 'any'))
+        if i == 0:
+            result.append((''.join(('py', version[0])), 'none', 'any'))
+
+    return set(result)
+
+
+COMPATIBLE_TAGS = compatible_tags()
+
+del compatible_tags
+
+
+def is_compatible(wheel, tags=None):
+    if not isinstance(wheel, Wheel):
+        wheel = Wheel(wheel)    # assume it's a filename
+    result = False
+    if tags is None:
+        tags = COMPATIBLE_TAGS
+    for ver, abi, arch in tags:
+        if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch:
+            result = True
+            break
+    return result
diff --git a/venv/lib/python3.8/site-packages/distutils-precedence.pth b/venv/lib/python3.8/site-packages/distutils-precedence.pth
new file mode 100644
index 0000000..6de4198
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/distutils-precedence.pth
@@ -0,0 +1 @@
+import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'stdlib') == 'local'; enabled and __import__('_distutils_hack').add_shim(); 
diff --git a/venv/lib/python3.8/site-packages/filelock-3.8.0.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/filelock-3.8.0.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/filelock-3.8.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/lib/python3.8/site-packages/filelock-3.8.0.dist-info/LICENSE b/venv/lib/python3.8/site-packages/filelock-3.8.0.dist-info/LICENSE
new file mode 100644
index 0000000..cf1ab25
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/filelock-3.8.0.dist-info/LICENSE
@@ -0,0 +1,24 @@
+This is free and unencumbered software released into the public domain.
+
+Anyone is free to copy, modify, publish, use, compile, sell, or
+distribute this software, either in source code form or as a compiled
+binary, for any purpose, commercial or non-commercial, and by any
+means.
+
+In jurisdictions that recognize copyright laws, the author or authors
+of this software dedicate any and all copyright interest in the
+software to the public domain. We make this dedication for the benefit
+of the public at large and to the detriment of our heirs and
+successors. We intend this dedication to be an overt act of
+relinquishment in perpetuity of all present and future rights to this
+software under copyright law.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
+
+For more information, please refer to 
diff --git a/venv/lib/python3.8/site-packages/filelock-3.8.0.dist-info/METADATA b/venv/lib/python3.8/site-packages/filelock-3.8.0.dist-info/METADATA
new file mode 100644
index 0000000..8ed1972
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/filelock-3.8.0.dist-info/METADATA
@@ -0,0 +1,48 @@
+Metadata-Version: 2.1
+Name: filelock
+Version: 3.8.0
+Summary: A platform independent file lock.
+Home-page: https://github.com/tox-dev/py-filelock
+Download-URL: https://github.com/tox-dev/py-filelock/archive/main.zip
+Author: Benedikt Schmitt
+Author-email: benedikt@benediktschmitt.de
+License: Unlicense
+Project-URL: Source, https://github.com/tox-dev/py-filelock
+Project-URL: Tracker, https://github.com/tox-dev/py-filelock/issues
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: Public Domain
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Topic :: Internet
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: System
+Requires-Python: >=3.7
+Description-Content-Type: text/markdown
+License-File: LICENSE
+Provides-Extra: docs
+Requires-Dist: furo (>=2022.6.21) ; extra == 'docs'
+Requires-Dist: sphinx (>=5.1.1) ; extra == 'docs'
+Requires-Dist: sphinx-autodoc-typehints (>=1.19.1) ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: covdefaults (>=2.2) ; extra == 'testing'
+Requires-Dist: coverage (>=6.4.2) ; extra == 'testing'
+Requires-Dist: pytest (>=7.1.2) ; extra == 'testing'
+Requires-Dist: pytest-cov (>=3) ; extra == 'testing'
+Requires-Dist: pytest-timeout (>=2.1) ; extra == 'testing'
+
+# py-filelock
+
+[![PyPI](https://img.shields.io/pypi/v/filelock)](https://pypi.org/project/filelock/)
+[![Supported Python
+versions](https://img.shields.io/pypi/pyversions/filelock.svg)](https://pypi.org/project/filelock/)
+[![Documentation
+status](https://readthedocs.org/projects/py-filelock/badge/?version=latest)](https://py-filelock.readthedocs.io/en/latest/?badge=latest)
+[![Code style:
+black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
+[![Downloads](https://pepy.tech/badge/filelock/month)](https://pepy.tech/project/filelock/month)
+[![check](https://github.com/tox-dev/py-filelock/actions/workflows/check.yml/badge.svg)](https://github.com/tox-dev/py-filelock/actions/workflows/check.yml)
+
+For more information checkout the [official documentation](https://py-filelock.readthedocs.io/en/latest/index.html).
diff --git a/venv/lib/python3.8/site-packages/filelock-3.8.0.dist-info/RECORD b/venv/lib/python3.8/site-packages/filelock-3.8.0.dist-info/RECORD
new file mode 100644
index 0000000..ca8ff3b
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/filelock-3.8.0.dist-info/RECORD
@@ -0,0 +1,24 @@
+filelock-3.8.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+filelock-3.8.0.dist-info/LICENSE,sha256=iNm062BXnBkew5HKBMFhMFctfu3EqG2qWL8oxuFMm80,1210
+filelock-3.8.0.dist-info/METADATA,sha256=CDoR1fv4WeAylV4aZlRFOVJ0O6JAbH0mHauIEVdT_cg,2311
+filelock-3.8.0.dist-info/RECORD,,
+filelock-3.8.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
+filelock-3.8.0.dist-info/top_level.txt,sha256=NDrf9i5BNogz4hEdsr6Hi7Ws3TlSSKY4Q2Y9_-i2GwU,9
+filelock-3.8.0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
+filelock/__init__.py,sha256=XE-RanOqTbJkFCKJbe3PqzNCkVgLQXqs_Yp1rrUPF58,1246
+filelock/__pycache__/__init__.cpython-38.pyc,,
+filelock/__pycache__/_api.cpython-38.pyc,,
+filelock/__pycache__/_error.cpython-38.pyc,,
+filelock/__pycache__/_soft.cpython-38.pyc,,
+filelock/__pycache__/_unix.cpython-38.pyc,,
+filelock/__pycache__/_util.cpython-38.pyc,,
+filelock/__pycache__/_windows.cpython-38.pyc,,
+filelock/__pycache__/version.cpython-38.pyc,,
+filelock/_api.py,sha256=74rZeupmXu8PFZGBAprN3fE4rS45d0njANACuPOeI0M,8895
+filelock/_error.py,sha256=Gaxp2TfdmgdvYFkllGCBOE37vYIXnHKKW3RYfKH7DYM,399
+filelock/_soft.py,sha256=rSpmt4Oi0Eb4JeKzyWImeqf5MYCJR0Dyc_kUN3kHj7Y,1650
+filelock/_unix.py,sha256=gM4-5mqDtamGp5qwWkaZNSTuv9p7vwBa4diVmI1ZBwQ,1578
+filelock/_util.py,sha256=BZKOAYTQdmcHmF34-Ft4kMdEvk3a8NGtsAhe6kfxZuU,594
+filelock/_windows.py,sha256=wvg-_SfJEDyxDeDVH8wBqxbPquXaycoYXgXJOBmm-G4,1890
+filelock/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+filelock/version.py,sha256=tu_YI6bXBlf0-atO2EfySopGOlrOwqLUiRyOvOhMUtw,176
diff --git a/venv/lib/python3.8/site-packages/filelock-3.8.0.dist-info/WHEEL b/venv/lib/python3.8/site-packages/filelock-3.8.0.dist-info/WHEEL
new file mode 100644
index 0000000..becc9a6
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/filelock-3.8.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.1)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/venv/lib/python3.8/site-packages/filelock-3.8.0.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/filelock-3.8.0.dist-info/top_level.txt
new file mode 100644
index 0000000..83c2e35
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/filelock-3.8.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+filelock
diff --git a/venv/lib/python3.8/site-packages/filelock-3.8.0.dist-info/zip-safe b/venv/lib/python3.8/site-packages/filelock-3.8.0.dist-info/zip-safe
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/filelock-3.8.0.dist-info/zip-safe
@@ -0,0 +1 @@
+
diff --git a/venv/lib/python3.8/site-packages/filelock/__init__.py b/venv/lib/python3.8/site-packages/filelock/__init__.py
new file mode 100644
index 0000000..afcdb70
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/filelock/__init__.py
@@ -0,0 +1,48 @@
+"""
+A platform independent file lock that supports the with-statement.
+
+.. autodata:: filelock.__version__
+   :no-value:
+
+"""
+from __future__ import annotations
+
+import sys
+import warnings
+
+from ._api import AcquireReturnProxy, BaseFileLock
+from ._error import Timeout
+from ._soft import SoftFileLock
+from ._unix import UnixFileLock, has_fcntl
+from ._windows import WindowsFileLock
+from .version import version
+
+#: version of the project as a string
+__version__: str = version
+
+
+if sys.platform == "win32":  # pragma: win32 cover
+    _FileLock: type[BaseFileLock] = WindowsFileLock
+else:  # pragma: win32 no cover
+    if has_fcntl:
+        _FileLock: type[BaseFileLock] = UnixFileLock
+    else:
+        _FileLock = SoftFileLock
+        if warnings is not None:
+            warnings.warn("only soft file lock is available")
+
+#: Alias for the lock, which should be used for the current platform. On Windows, this is an alias for
+# :class:`WindowsFileLock`, on Unix for :class:`UnixFileLock` and otherwise for :class:`SoftFileLock`.
+FileLock: type[BaseFileLock] = _FileLock
+
+
+__all__ = [
+    "__version__",
+    "FileLock",
+    "SoftFileLock",
+    "Timeout",
+    "UnixFileLock",
+    "WindowsFileLock",
+    "BaseFileLock",
+    "AcquireReturnProxy",
+]
diff --git a/venv/lib/python3.8/site-packages/filelock/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/filelock/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000..32067e9
Binary files /dev/null and b/venv/lib/python3.8/site-packages/filelock/__pycache__/__init__.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/filelock/__pycache__/_api.cpython-38.pyc b/venv/lib/python3.8/site-packages/filelock/__pycache__/_api.cpython-38.pyc
new file mode 100644
index 0000000..180943b
Binary files /dev/null and b/venv/lib/python3.8/site-packages/filelock/__pycache__/_api.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/filelock/__pycache__/_error.cpython-38.pyc b/venv/lib/python3.8/site-packages/filelock/__pycache__/_error.cpython-38.pyc
new file mode 100644
index 0000000..75cc6eb
Binary files /dev/null and b/venv/lib/python3.8/site-packages/filelock/__pycache__/_error.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/filelock/__pycache__/_soft.cpython-38.pyc b/venv/lib/python3.8/site-packages/filelock/__pycache__/_soft.cpython-38.pyc
new file mode 100644
index 0000000..18135ec
Binary files /dev/null and b/venv/lib/python3.8/site-packages/filelock/__pycache__/_soft.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/filelock/__pycache__/_unix.cpython-38.pyc b/venv/lib/python3.8/site-packages/filelock/__pycache__/_unix.cpython-38.pyc
new file mode 100644
index 0000000..f77d513
Binary files /dev/null and b/venv/lib/python3.8/site-packages/filelock/__pycache__/_unix.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/filelock/__pycache__/_util.cpython-38.pyc b/venv/lib/python3.8/site-packages/filelock/__pycache__/_util.cpython-38.pyc
new file mode 100644
index 0000000..a809824
Binary files /dev/null and b/venv/lib/python3.8/site-packages/filelock/__pycache__/_util.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/filelock/__pycache__/_windows.cpython-38.pyc b/venv/lib/python3.8/site-packages/filelock/__pycache__/_windows.cpython-38.pyc
new file mode 100644
index 0000000..0eafaab
Binary files /dev/null and b/venv/lib/python3.8/site-packages/filelock/__pycache__/_windows.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/filelock/__pycache__/version.cpython-38.pyc b/venv/lib/python3.8/site-packages/filelock/__pycache__/version.cpython-38.pyc
new file mode 100644
index 0000000..ea46883
Binary files /dev/null and b/venv/lib/python3.8/site-packages/filelock/__pycache__/version.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/filelock/_api.py b/venv/lib/python3.8/site-packages/filelock/_api.py
new file mode 100644
index 0000000..9c40003
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/filelock/_api.py
@@ -0,0 +1,246 @@
+from __future__ import annotations
+
+import contextlib
+import logging
+import os
+import time
+import warnings
+from abc import ABC, abstractmethod
+from threading import Lock
+from types import TracebackType
+from typing import Any
+
+from ._error import Timeout
+
+_LOGGER = logging.getLogger("filelock")
+
+
+# This is a helper class which is returned by :meth:`BaseFileLock.acquire` and wraps the lock to make sure __enter__
+# is not called twice when entering the with statement. If we would simply return *self*, the lock would be acquired
+# again in the *__enter__* method of the BaseFileLock, but not released again automatically. issue #37 (memory leak)
+class AcquireReturnProxy:
+    """A context aware object that will release the lock file when exiting."""
+
+    def __init__(self, lock: BaseFileLock) -> None:
+        self.lock = lock
+
+    def __enter__(self) -> BaseFileLock:
+        return self.lock
+
+    def __exit__(
+        self,
+        exc_type: type[BaseException] | None,  # noqa: U100
+        exc_value: BaseException | None,  # noqa: U100
+        traceback: TracebackType | None,  # noqa: U100
+    ) -> None:
+        self.lock.release()
+
+
+class BaseFileLock(ABC, contextlib.ContextDecorator):
+    """Abstract base class for a file lock object."""
+
+    def __init__(self, lock_file: str | os.PathLike[Any], timeout: float = -1) -> None:
+        """
+        Create a new lock object.
+
+        :param lock_file: path to the file
+        :param timeout: default timeout when acquiring the lock, in seconds. It will be used as fallback value in
+        the acquire method, if no timeout value (``None``) is given. If you want to disable the timeout, set it
+        to a negative value. A timeout of 0 means, that there is exactly one attempt to acquire the file lock.
+        """
+        # The path to the lock file.
+        self._lock_file: str = os.fspath(lock_file)
+
+        # The file descriptor for the *_lock_file* as it is returned by the os.open() function.
+        # This file lock is only NOT None, if the object currently holds the lock.
+        self._lock_file_fd: int | None = None
+
+        # The default timeout value.
+        self.timeout: float = timeout
+
+        # We use this lock primarily for the lock counter.
+        self._thread_lock: Lock = Lock()
+
+        # The lock counter is used for implementing the nested locking mechanism. Whenever the lock is acquired, the
+        # counter is increased and the lock is only released, when this value is 0 again.
+        self._lock_counter: int = 0
+
+    @property
+    def lock_file(self) -> str:
+        """:return: path to the lock file"""
+        return self._lock_file
+
+    @property
+    def timeout(self) -> float:
+        """
+        :return: the default timeout value, in seconds
+
+        .. versionadded:: 2.0.0
+        """
+        return self._timeout
+
+    @timeout.setter
+    def timeout(self, value: float | str) -> None:
+        """
+        Change the default timeout value.
+
+        :param value: the new value, in seconds
+        """
+        self._timeout = float(value)
+
+    @abstractmethod
+    def _acquire(self) -> None:
+        """If the file lock could be acquired, self._lock_file_fd holds the file descriptor of the lock file."""
+        raise NotImplementedError
+
+    @abstractmethod
+    def _release(self) -> None:
+        """Releases the lock and sets self._lock_file_fd to None."""
+        raise NotImplementedError
+
+    @property
+    def is_locked(self) -> bool:
+        """
+
+        :return: A boolean indicating if the lock file is holding the lock currently.
+
+        .. versionchanged:: 2.0.0
+
+            This was previously a method and is now a property.
+        """
+        return self._lock_file_fd is not None
+
+    def acquire(
+        self,
+        timeout: float | None = None,
+        poll_interval: float = 0.05,
+        *,
+        poll_intervall: float | None = None,
+        blocking: bool = True,
+    ) -> AcquireReturnProxy:
+        """
+        Try to acquire the file lock.
+
+        :param timeout: maximum wait time for acquiring the lock, ``None`` means use the default :attr:`~timeout` is and
+         if ``timeout < 0``, there is no timeout and this method will block until the lock could be acquired
+        :param poll_interval: interval of trying to acquire the lock file
+        :param poll_intervall: deprecated, kept for backwards compatibility, use ``poll_interval`` instead
+        :param blocking: defaults to True. If False, function will return immediately if it cannot obtain a lock on the
+         first attempt. Otherwise this method will block until the timeout expires or the lock is acquired.
+        :raises Timeout: if fails to acquire lock within the timeout period
+        :return: a context object that will unlock the file when the context is exited
+
+        .. code-block:: python
+
+            # You can use this method in the context manager (recommended)
+            with lock.acquire():
+                pass
+
+            # Or use an equivalent try-finally construct:
+            lock.acquire()
+            try:
+                pass
+            finally:
+                lock.release()
+
+        .. versionchanged:: 2.0.0
+
+            This method returns now a *proxy* object instead of *self*,
+            so that it can be used in a with statement without side effects.
+
+        """
+        # Use the default timeout, if no timeout is provided.
+        if timeout is None:
+            timeout = self.timeout
+
+        if poll_intervall is not None:
+            msg = "use poll_interval instead of poll_intervall"
+            warnings.warn(msg, DeprecationWarning, stacklevel=2)
+            poll_interval = poll_intervall
+
+        # Increment the number right at the beginning. We can still undo it, if something fails.
+        with self._thread_lock:
+            self._lock_counter += 1
+
+        lock_id = id(self)
+        lock_filename = self._lock_file
+        start_time = time.monotonic()
+        try:
+            while True:
+                with self._thread_lock:
+                    if not self.is_locked:
+                        _LOGGER.debug("Attempting to acquire lock %s on %s", lock_id, lock_filename)
+                        self._acquire()
+
+                if self.is_locked:
+                    _LOGGER.debug("Lock %s acquired on %s", lock_id, lock_filename)
+                    break
+                elif blocking is False:
+                    _LOGGER.debug("Failed to immediately acquire lock %s on %s", lock_id, lock_filename)
+                    raise Timeout(self._lock_file)
+                elif 0 <= timeout < time.monotonic() - start_time:
+                    _LOGGER.debug("Timeout on acquiring lock %s on %s", lock_id, lock_filename)
+                    raise Timeout(self._lock_file)
+                else:
+                    msg = "Lock %s not acquired on %s, waiting %s seconds ..."
+                    _LOGGER.debug(msg, lock_id, lock_filename, poll_interval)
+                    time.sleep(poll_interval)
+        except BaseException:  # Something did go wrong, so decrement the counter.
+            with self._thread_lock:
+                self._lock_counter = max(0, self._lock_counter - 1)
+            raise
+        return AcquireReturnProxy(lock=self)
+
+    def release(self, force: bool = False) -> None:
+        """
+        Releases the file lock. Please note, that the lock is only completely released, if the lock counter is 0. Also
+        note, that the lock file itself is not automatically deleted.
+
+        :param force: If true, the lock counter is ignored and the lock is released in every case/
+        """
+        with self._thread_lock:
+
+            if self.is_locked:
+                self._lock_counter -= 1
+
+                if self._lock_counter == 0 or force:
+                    lock_id, lock_filename = id(self), self._lock_file
+
+                    _LOGGER.debug("Attempting to release lock %s on %s", lock_id, lock_filename)
+                    self._release()
+                    self._lock_counter = 0
+                    _LOGGER.debug("Lock %s released on %s", lock_id, lock_filename)
+
+    def __enter__(self) -> BaseFileLock:
+        """
+        Acquire the lock.
+
+        :return: the lock object
+        """
+        self.acquire()
+        return self
+
+    def __exit__(
+        self,
+        exc_type: type[BaseException] | None,  # noqa: U100
+        exc_value: BaseException | None,  # noqa: U100
+        traceback: TracebackType | None,  # noqa: U100
+    ) -> None:
+        """
+        Release the lock.
+
+        :param exc_type: the exception type if raised
+        :param exc_value: the exception value if raised
+        :param traceback: the exception traceback if raised
+        """
+        self.release()
+
+    def __del__(self) -> None:
+        """Called when the lock object is deleted."""
+        self.release(force=True)
+
+
+__all__ = [
+    "BaseFileLock",
+    "AcquireReturnProxy",
+]
diff --git a/venv/lib/python3.8/site-packages/filelock/_error.py b/venv/lib/python3.8/site-packages/filelock/_error.py
new file mode 100644
index 0000000..b388521
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/filelock/_error.py
@@ -0,0 +1,17 @@
+from __future__ import annotations
+
+
+class Timeout(TimeoutError):
+    """Raised when the lock could not be acquired in *timeout* seconds."""
+
+    def __init__(self, lock_file: str) -> None:
+        #: The path of the file lock.
+        self.lock_file = lock_file
+
+    def __str__(self) -> str:
+        return f"The file lock '{self.lock_file}' could not be acquired."
+
+
+__all__ = [
+    "Timeout",
+]
diff --git a/venv/lib/python3.8/site-packages/filelock/_soft.py b/venv/lib/python3.8/site-packages/filelock/_soft.py
new file mode 100644
index 0000000..cb09799
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/filelock/_soft.py
@@ -0,0 +1,47 @@
+from __future__ import annotations
+
+import os
+import sys
+from errno import EACCES, EEXIST, ENOENT
+
+from ._api import BaseFileLock
+from ._util import raise_on_exist_ro_file
+
+
+class SoftFileLock(BaseFileLock):
+    """Simply watches the existence of the lock file."""
+
+    def _acquire(self) -> None:
+        raise_on_exist_ro_file(self._lock_file)
+        # first check for exists and read-only mode as the open will mask this case as EEXIST
+        mode = (
+            os.O_WRONLY  # open for writing only
+            | os.O_CREAT
+            | os.O_EXCL  # together with above raise EEXIST if the file specified by filename exists
+            | os.O_TRUNC  # truncate the file to zero byte
+        )
+        try:
+            fd = os.open(self._lock_file, mode)
+        except OSError as exception:
+            if exception.errno == EEXIST:  # expected if cannot lock
+                pass
+            elif exception.errno == ENOENT:  # No such file or directory - parent directory is missing
+                raise
+            elif exception.errno == EACCES and sys.platform != "win32":  # pragma: win32 no cover
+                # Permission denied - parent dir is R/O
+                raise  # note windows does not allow you to make a folder r/o only files
+        else:
+            self._lock_file_fd = fd
+
+    def _release(self) -> None:
+        os.close(self._lock_file_fd)  # type: ignore # the lock file is definitely not None
+        self._lock_file_fd = None
+        try:
+            os.remove(self._lock_file)
+        except OSError:  # the file is already deleted and that's what we want
+            pass
+
+
+__all__ = [
+    "SoftFileLock",
+]
diff --git a/venv/lib/python3.8/site-packages/filelock/_unix.py b/venv/lib/python3.8/site-packages/filelock/_unix.py
new file mode 100644
index 0000000..03b612c
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/filelock/_unix.py
@@ -0,0 +1,56 @@
+from __future__ import annotations
+
+import os
+import sys
+from typing import cast
+
+from ._api import BaseFileLock
+
+#: a flag to indicate if the fcntl API is available
+has_fcntl = False
+if sys.platform == "win32":  # pragma: win32 cover
+
+    class UnixFileLock(BaseFileLock):
+        """Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems."""
+
+        def _acquire(self) -> None:
+            raise NotImplementedError
+
+        def _release(self) -> None:
+            raise NotImplementedError
+
+else:  # pragma: win32 no cover
+    try:
+        import fcntl
+    except ImportError:
+        pass
+    else:
+        has_fcntl = True
+
+    class UnixFileLock(BaseFileLock):
+        """Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems."""
+
+        def _acquire(self) -> None:
+            open_mode = os.O_RDWR | os.O_CREAT | os.O_TRUNC
+            fd = os.open(self._lock_file, open_mode)
+            try:
+                fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
+            except OSError:
+                os.close(fd)
+            else:
+                self._lock_file_fd = fd
+
+        def _release(self) -> None:
+            # Do not remove the lockfile:
+            #   https://github.com/tox-dev/py-filelock/issues/31
+            #   https://stackoverflow.com/questions/17708885/flock-removing-locked-file-without-race-condition
+            fd = cast(int, self._lock_file_fd)
+            self._lock_file_fd = None
+            fcntl.flock(fd, fcntl.LOCK_UN)
+            os.close(fd)
+
+
+__all__ = [
+    "has_fcntl",
+    "UnixFileLock",
+]
diff --git a/venv/lib/python3.8/site-packages/filelock/_util.py b/venv/lib/python3.8/site-packages/filelock/_util.py
new file mode 100644
index 0000000..238b80f
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/filelock/_util.py
@@ -0,0 +1,20 @@
+from __future__ import annotations
+
+import os
+import stat
+
+
+def raise_on_exist_ro_file(filename: str) -> None:
+    try:
+        file_stat = os.stat(filename)  # use stat to do exists + can write to check without race condition
+    except OSError:
+        return None  # swallow does not exist or other errors
+
+    if file_stat.st_mtime != 0:  # if os.stat returns but modification is zero that's an invalid os.stat - ignore it
+        if not (file_stat.st_mode & stat.S_IWUSR):
+            raise PermissionError(f"Permission denied: {filename!r}")
+
+
+__all__ = [
+    "raise_on_exist_ro_file",
+]
diff --git a/venv/lib/python3.8/site-packages/filelock/_windows.py b/venv/lib/python3.8/site-packages/filelock/_windows.py
new file mode 100644
index 0000000..60e68cb
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/filelock/_windows.py
@@ -0,0 +1,63 @@
+from __future__ import annotations
+
+import os
+import sys
+from errno import ENOENT
+from typing import cast
+
+from ._api import BaseFileLock
+from ._util import raise_on_exist_ro_file
+
+if sys.platform == "win32":  # pragma: win32 cover
+    import msvcrt
+
+    class WindowsFileLock(BaseFileLock):
+        """Uses the :func:`msvcrt.locking` function to hard lock the lock file on windows systems."""
+
+        def _acquire(self) -> None:
+            raise_on_exist_ro_file(self._lock_file)
+            mode = (
+                os.O_RDWR  # open for read and write
+                | os.O_CREAT  # create file if not exists
+                | os.O_TRUNC  # truncate file  if not empty
+            )
+            try:
+                fd = os.open(self._lock_file, mode)
+            except OSError as exception:
+                if exception.errno == ENOENT:  # No such file or directory
+                    raise
+            else:
+                try:
+                    msvcrt.locking(fd, msvcrt.LK_NBLCK, 1)
+                except OSError:
+                    os.close(fd)
+                else:
+                    self._lock_file_fd = fd
+
+        def _release(self) -> None:
+            fd = cast(int, self._lock_file_fd)
+            self._lock_file_fd = None
+            msvcrt.locking(fd, msvcrt.LK_UNLCK, 1)
+            os.close(fd)
+
+            try:
+                os.remove(self._lock_file)
+            # Probably another instance of the application hat acquired the file lock.
+            except OSError:
+                pass
+
+else:  # pragma: win32 no cover
+
+    class WindowsFileLock(BaseFileLock):
+        """Uses the :func:`msvcrt.locking` function to hard lock the lock file on windows systems."""
+
+        def _acquire(self) -> None:
+            raise NotImplementedError
+
+        def _release(self) -> None:
+            raise NotImplementedError
+
+
+__all__ = [
+    "WindowsFileLock",
+]
diff --git a/venv/lib/python3.8/site-packages/filelock/py.typed b/venv/lib/python3.8/site-packages/filelock/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/venv/lib/python3.8/site-packages/filelock/version.py b/venv/lib/python3.8/site-packages/filelock/version.py
new file mode 100644
index 0000000..c9de29e
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/filelock/version.py
@@ -0,0 +1,5 @@
+# coding: utf-8
+# file generated by setuptools_scm
+# don't change, don't track in version control
+__version__ = version = '3.8.0'
+__version_tuple__ = version_tuple = (3, 8, 0)
diff --git a/venv/lib/python3.8/site-packages/jinja2/__init__.py b/venv/lib/python3.8/site-packages/jinja2/__init__.py
new file mode 100644
index 0000000..e323926
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/jinja2/__init__.py
@@ -0,0 +1,37 @@
+"""Jinja is a template engine written in pure Python. It provides a
+non-XML syntax that supports inline expressions and an optional
+sandboxed environment.
+"""
+from .bccache import BytecodeCache as BytecodeCache
+from .bccache import FileSystemBytecodeCache as FileSystemBytecodeCache
+from .bccache import MemcachedBytecodeCache as MemcachedBytecodeCache
+from .environment import Environment as Environment
+from .environment import Template as Template
+from .exceptions import TemplateAssertionError as TemplateAssertionError
+from .exceptions import TemplateError as TemplateError
+from .exceptions import TemplateNotFound as TemplateNotFound
+from .exceptions import TemplateRuntimeError as TemplateRuntimeError
+from .exceptions import TemplatesNotFound as TemplatesNotFound
+from .exceptions import TemplateSyntaxError as TemplateSyntaxError
+from .exceptions import UndefinedError as UndefinedError
+from .loaders import BaseLoader as BaseLoader
+from .loaders import ChoiceLoader as ChoiceLoader
+from .loaders import DictLoader as DictLoader
+from .loaders import FileSystemLoader as FileSystemLoader
+from .loaders import FunctionLoader as FunctionLoader
+from .loaders import ModuleLoader as ModuleLoader
+from .loaders import PackageLoader as PackageLoader
+from .loaders import PrefixLoader as PrefixLoader
+from .runtime import ChainableUndefined as ChainableUndefined
+from .runtime import DebugUndefined as DebugUndefined
+from .runtime import make_logging_undefined as make_logging_undefined
+from .runtime import StrictUndefined as StrictUndefined
+from .runtime import Undefined as Undefined
+from .utils import clear_caches as clear_caches
+from .utils import is_undefined as is_undefined
+from .utils import pass_context as pass_context
+from .utils import pass_environment as pass_environment
+from .utils import pass_eval_context as pass_eval_context
+from .utils import select_autoescape as select_autoescape
+
+__version__ = "3.1.2"
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/__init__.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000..2c66a61
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/__init__.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/_identifier.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/_identifier.cpython-38.pyc
new file mode 100644
index 0000000..d83b75e
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/_identifier.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/async_utils.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/async_utils.cpython-38.pyc
new file mode 100644
index 0000000..05f4553
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/async_utils.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/bccache.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/bccache.cpython-38.pyc
new file mode 100644
index 0000000..beeace8
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/bccache.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/compiler.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/compiler.cpython-38.pyc
new file mode 100644
index 0000000..8b35fb3
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/compiler.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/constants.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/constants.cpython-38.pyc
new file mode 100644
index 0000000..0dc2f01
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/constants.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/debug.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/debug.cpython-38.pyc
new file mode 100644
index 0000000..66d72c5
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/debug.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/defaults.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/defaults.cpython-38.pyc
new file mode 100644
index 0000000..e9d15ee
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/defaults.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/environment.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/environment.cpython-38.pyc
new file mode 100644
index 0000000..b060b75
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/environment.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/exceptions.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/exceptions.cpython-38.pyc
new file mode 100644
index 0000000..d634445
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/exceptions.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/ext.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/ext.cpython-38.pyc
new file mode 100644
index 0000000..c3d9e6b
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/ext.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/filters.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/filters.cpython-38.pyc
new file mode 100644
index 0000000..71375b5
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/filters.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/idtracking.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/idtracking.cpython-38.pyc
new file mode 100644
index 0000000..74ef95d
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/idtracking.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/lexer.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/lexer.cpython-38.pyc
new file mode 100644
index 0000000..5e9897c
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/lexer.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/loaders.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/loaders.cpython-38.pyc
new file mode 100644
index 0000000..8bda6fc
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/loaders.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/meta.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/meta.cpython-38.pyc
new file mode 100644
index 0000000..bbead50
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/meta.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/nativetypes.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/nativetypes.cpython-38.pyc
new file mode 100644
index 0000000..15e654c
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/nativetypes.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/nodes.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/nodes.cpython-38.pyc
new file mode 100644
index 0000000..451277f
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/nodes.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/optimizer.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/optimizer.cpython-38.pyc
new file mode 100644
index 0000000..81071ff
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/optimizer.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/parser.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/parser.cpython-38.pyc
new file mode 100644
index 0000000..117e6c3
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/parser.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/runtime.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/runtime.cpython-38.pyc
new file mode 100644
index 0000000..5ddef59
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/runtime.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/sandbox.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/sandbox.cpython-38.pyc
new file mode 100644
index 0000000..2a89a41
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/sandbox.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/tests.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/tests.cpython-38.pyc
new file mode 100644
index 0000000..913bc08
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/tests.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/utils.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/utils.cpython-38.pyc
new file mode 100644
index 0000000..81e42a3
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/utils.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/__pycache__/visitor.cpython-38.pyc b/venv/lib/python3.8/site-packages/jinja2/__pycache__/visitor.cpython-38.pyc
new file mode 100644
index 0000000..bfe4b52
Binary files /dev/null and b/venv/lib/python3.8/site-packages/jinja2/__pycache__/visitor.cpython-38.pyc differ
diff --git a/venv/lib/python3.8/site-packages/jinja2/_identifier.py b/venv/lib/python3.8/site-packages/jinja2/_identifier.py
new file mode 100644
index 0000000..928c150
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/jinja2/_identifier.py
@@ -0,0 +1,6 @@
+import re
+
+# generated by scripts/generate_identifier_pattern.py
+pattern = re.compile(
+    r"[\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߽߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛࣓-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣ৾ਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣૺ-૿ଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఄా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഀ-ഃ഻഼ാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳷-᳹᷀-᷹᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꣿꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𐴤-𐽆𐴧-𐽐𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑄴𑅅𑅆𑅳𑆀-𑆂𑆳-𑇀𑇉-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌻𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑑞𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑠬-𑠺𑨁-𑨊𑨳-𑨹𑨻-𑨾𑩇𑩑-𑩛𑪊-𑪙𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𑴱-𑴶𑴺𑴼𑴽𑴿-𑵅𑵇𑶊-𑶎𑶐𑶑𑶓-𑶗𑻳-𑻶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+"  # noqa: B950
+)
diff --git a/venv/lib/python3.8/site-packages/jinja2/async_utils.py b/venv/lib/python3.8/site-packages/jinja2/async_utils.py
new file mode 100644
index 0000000..1a4f389
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/jinja2/async_utils.py
@@ -0,0 +1,84 @@
+import inspect
+import typing as t
+from functools import WRAPPER_ASSIGNMENTS
+from functools import wraps
+
+from .utils import _PassArg
+from .utils import pass_eval_context
+
+V = t.TypeVar("V")
+
+
+def async_variant(normal_func):  # type: ignore
+    def decorator(async_func):  # type: ignore
+        pass_arg = _PassArg.from_obj(normal_func)
+        need_eval_context = pass_arg is None
+
+        if pass_arg is _PassArg.environment:
+
+            def is_async(args: t.Any) -> bool:
+                return t.cast(bool, args[0].is_async)
+
+        else:
+
+            def is_async(args: t.Any) -> bool:
+                return t.cast(bool, args[0].environment.is_async)
+
+        # Take the doc and annotations from the sync function, but the
+        # name from the async function. Pallets-Sphinx-Themes
+        # build_function_directive expects __wrapped__ to point to the
+        # sync function.
+        async_func_attrs = ("__module__", "__name__", "__qualname__")
+        normal_func_attrs = tuple(set(WRAPPER_ASSIGNMENTS).difference(async_func_attrs))
+
+        @wraps(normal_func, assigned=normal_func_attrs)
+        @wraps(async_func, assigned=async_func_attrs, updated=())
+        def wrapper(*args, **kwargs):  # type: ignore
+            b = is_async(args)
+
+            if need_eval_context:
+                args = args[1:]
+
+            if b:
+                return async_func(*args, **kwargs)
+
+            return normal_func(*args, **kwargs)
+
+        if need_eval_context:
+            wrapper = pass_eval_context(wrapper)
+
+        wrapper.jinja_async_variant = True
+        return wrapper
+
+    return decorator
+
+
+_common_primitives = {int, float, bool, str, list, dict, tuple, type(None)}
+
+
+async def auto_await(value: t.Union[t.Awaitable["V"], "V"]) -> "V":
+    # Avoid a costly call to isawaitable
+    if type(value) in _common_primitives:
+        return t.cast("V", value)
+
+    if inspect.isawaitable(value):
+        return await t.cast("t.Awaitable[V]", value)
+
+    return t.cast("V", value)
+
+
+async def auto_aiter(
+    iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+) -> "t.AsyncIterator[V]":
+    if hasattr(iterable, "__aiter__"):
+        async for item in t.cast("t.AsyncIterable[V]", iterable):
+            yield item
+    else:
+        for item in t.cast("t.Iterable[V]", iterable):
+            yield item
+
+
+async def auto_to_list(
+    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+) -> t.List["V"]:
+    return [x async for x in auto_aiter(value)]
diff --git a/venv/lib/python3.8/site-packages/jinja2/bccache.py b/venv/lib/python3.8/site-packages/jinja2/bccache.py
new file mode 100644
index 0000000..d0ddf56
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/jinja2/bccache.py
@@ -0,0 +1,406 @@
+"""The optional bytecode cache system. This is useful if you have very
+complex template situations and the compilation of all those templates
+slows down your application too much.
+
+Situations where this is useful are often forking web applications that
+are initialized on the first request.
+"""
+import errno
+import fnmatch
+import marshal
+import os
+import pickle
+import stat
+import sys
+import tempfile
+import typing as t
+from hashlib import sha1
+from io import BytesIO
+from types import CodeType
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+    from .environment import Environment
+
+    class _MemcachedClient(te.Protocol):
+        def get(self, key: str) -> bytes:
+            ...
+
+        def set(self, key: str, value: bytes, timeout: t.Optional[int] = None) -> None:
+            ...
+
+
+bc_version = 5
+# Magic bytes to identify Jinja bytecode cache files. Contains the
+# Python major and minor version to avoid loading incompatible bytecode
+# if a project upgrades its Python version.
+bc_magic = (
+    b"j2"
+    + pickle.dumps(bc_version, 2)
+    + pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1], 2)
+)
+
+
+class Bucket:
+    """Buckets are used to store the bytecode for one template.  It's created
+    and initialized by the bytecode cache and passed to the loading functions.
+
+    The buckets get an internal checksum from the cache assigned and use this
+    to automatically reject outdated cache material.  Individual bytecode
+    cache subclasses don't have to care about cache invalidation.
+    """
+
+    def __init__(self, environment: "Environment", key: str, checksum: str) -> None:
+        self.environment = environment
+        self.key = key
+        self.checksum = checksum
+        self.reset()
+
+    def reset(self) -> None:
+        """Resets the bucket (unloads the bytecode)."""
+        self.code: t.Optional[CodeType] = None
+
+    def load_bytecode(self, f: t.BinaryIO) -> None:
+        """Loads bytecode from a file or file like object."""
+        # make sure the magic header is correct
+        magic = f.read(len(bc_magic))
+        if magic != bc_magic:
+            self.reset()
+            return
+        # the source code of the file changed, we need to reload
+        checksum = pickle.load(f)
+        if self.checksum != checksum:
+            self.reset()
+            return
+        # if marshal_load fails then we need to reload
+        try:
+            self.code = marshal.load(f)
+        except (EOFError, ValueError, TypeError):
+            self.reset()
+            return
+
+    def write_bytecode(self, f: t.IO[bytes]) -> None:
+        """Dump the bytecode into the file or file like object passed."""
+        if self.code is None:
+            raise TypeError("can't write empty bucket")
+        f.write(bc_magic)
+        pickle.dump(self.checksum, f, 2)
+        marshal.dump(self.code, f)
+
+    def bytecode_from_string(self, string: bytes) -> None:
+        """Load bytecode from bytes."""
+        self.load_bytecode(BytesIO(string))
+
+    def bytecode_to_string(self) -> bytes:
+        """Return the bytecode as bytes."""
+        out = BytesIO()
+        self.write_bytecode(out)
+        return out.getvalue()
+
+
+class BytecodeCache:
+    """To implement your own bytecode cache you have to subclass this class
+    and override :meth:`load_bytecode` and :meth:`dump_bytecode`.  Both of
+    these methods are passed a :class:`~jinja2.bccache.Bucket`.
+
+    A very basic bytecode cache that saves the bytecode on the file system::
+
+        from os import path
+
+        class MyCache(BytecodeCache):
+
+            def __init__(self, directory):
+                self.directory = directory
+
+            def load_bytecode(self, bucket):
+                filename = path.join(self.directory, bucket.key)
+                if path.exists(filename):
+                    with open(filename, 'rb') as f:
+                        bucket.load_bytecode(f)
+
+            def dump_bytecode(self, bucket):
+                filename = path.join(self.directory, bucket.key)
+                with open(filename, 'wb') as f:
+                    bucket.write_bytecode(f)
+
+    A more advanced version of a filesystem based bytecode cache is part of
+    Jinja.
+    """
+
+    def load_bytecode(self, bucket: Bucket) -> None:
+        """Subclasses have to override this method to load bytecode into a
+        bucket.  If they are not able to find code in the cache for the
+        bucket, it must not do anything.
+        """
+        raise NotImplementedError()
+
+    def dump_bytecode(self, bucket: Bucket) -> None:
+        """Subclasses have to override this method to write the bytecode
+        from a bucket back to the cache.  If it unable to do so it must not
+        fail silently but raise an exception.
+        """
+        raise NotImplementedError()
+
+    def clear(self) -> None:
+        """Clears the cache.  This method is not used by Jinja but should be
+        implemented to allow applications to clear the bytecode cache used
+        by a particular environment.
+        """
+
+    def get_cache_key(
+        self, name: str, filename: t.Optional[t.Union[str]] = None
+    ) -> str:
+        """Returns the unique hash key for this template name."""
+        hash = sha1(name.encode("utf-8"))
+
+        if filename is not None:
+            hash.update(f"|{filename}".encode())
+
+        return hash.hexdigest()
+
+    def get_source_checksum(self, source: str) -> str:
+        """Returns a checksum for the source."""
+        return sha1(source.encode("utf-8")).hexdigest()
+
+    def get_bucket(
+        self,
+        environment: "Environment",
+        name: str,
+        filename: t.Optional[str],
+        source: str,
+    ) -> Bucket:
+        """Return a cache bucket for the given template.  All arguments are
+        mandatory but filename may be `None`.
+        """
+        key = self.get_cache_key(name, filename)
+        checksum = self.get_source_checksum(source)
+        bucket = Bucket(environment, key, checksum)
+        self.load_bytecode(bucket)
+        return bucket
+
+    def set_bucket(self, bucket: Bucket) -> None:
+        """Put the bucket into the cache."""
+        self.dump_bytecode(bucket)
+
+
+class FileSystemBytecodeCache(BytecodeCache):
+    """A bytecode cache that stores bytecode on the filesystem.  It accepts
+    two arguments: The directory where the cache items are stored and a
+    pattern string that is used to build the filename.
+
+    If no directory is specified a default cache directory is selected.  On
+    Windows the user's temp directory is used, on UNIX systems a directory
+    is created for the user in the system temp directory.
+
+    The pattern can be used to have multiple separate caches operate on the
+    same directory.  The default pattern is ``'__jinja2_%s.cache'``.  ``%s``
+    is replaced with the cache key.
+
+    >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache')
+
+    This bytecode cache supports clearing of the cache using the clear method.
+    """
+
+    def __init__(
+        self, directory: t.Optional[str] = None, pattern: str = "__jinja2_%s.cache"
+    ) -> None:
+        if directory is None:
+            directory = self._get_default_cache_dir()
+        self.directory = directory
+        self.pattern = pattern
+
+    def _get_default_cache_dir(self) -> str:
+        def _unsafe_dir() -> "te.NoReturn":
+            raise RuntimeError(
+                "Cannot determine safe temp directory.  You "
+                "need to explicitly provide one."
+            )
+
+        tmpdir = tempfile.gettempdir()
+
+        # On windows the temporary directory is used specific unless
+        # explicitly forced otherwise.  We can just use that.
+        if os.name == "nt":
+            return tmpdir
+        if not hasattr(os, "getuid"):
+            _unsafe_dir()
+
+        dirname = f"_jinja2-cache-{os.getuid()}"
+        actual_dir = os.path.join(tmpdir, dirname)
+
+        try:
+            os.mkdir(actual_dir, stat.S_IRWXU)
+        except OSError as e:
+            if e.errno != errno.EEXIST:
+                raise
+        try:
+            os.chmod(actual_dir, stat.S_IRWXU)
+            actual_dir_stat = os.lstat(actual_dir)
+            if (
+                actual_dir_stat.st_uid != os.getuid()
+                or not stat.S_ISDIR(actual_dir_stat.st_mode)
+                or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU
+            ):
+                _unsafe_dir()
+        except OSError as e:
+            if e.errno != errno.EEXIST:
+                raise
+
+        actual_dir_stat = os.lstat(actual_dir)
+        if (
+            actual_dir_stat.st_uid != os.getuid()
+            or not stat.S_ISDIR(actual_dir_stat.st_mode)
+            or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU
+        ):
+            _unsafe_dir()
+
+        return actual_dir
+
+    def _get_cache_filename(self, bucket: Bucket) -> str:
+        return os.path.join(self.directory, self.pattern % (bucket.key,))
+
+    def load_bytecode(self, bucket: Bucket) -> None:
+        filename = self._get_cache_filename(bucket)
+
+        # Don't test for existence before opening the file, since the
+        # file could disappear after the test before the open.
+        try:
+            f = open(filename, "rb")
+        except (FileNotFoundError, IsADirectoryError, PermissionError):
+            # PermissionError can occur on Windows when an operation is
+            # in progress, such as calling clear().
+            return
+
+        with f:
+            bucket.load_bytecode(f)
+
+    def dump_bytecode(self, bucket: Bucket) -> None:
+        # Write to a temporary file, then rename to the real name after
+        # writing. This avoids another process reading the file before
+        # it is fully written.
+        name = self._get_cache_filename(bucket)
+        f = tempfile.NamedTemporaryFile(
+            mode="wb",
+            dir=os.path.dirname(name),
+            prefix=os.path.basename(name),
+            suffix=".tmp",
+            delete=False,
+        )
+
+        def remove_silent() -> None:
+            try:
+                os.remove(f.name)
+            except OSError:
+                # Another process may have called clear(). On Windows,
+                # another program may be holding the file open.
+                pass
+
+        try:
+            with f:
+                bucket.write_bytecode(f)
+        except BaseException:
+            remove_silent()
+            raise
+
+        try:
+            os.replace(f.name, name)
+        except OSError:
+            # Another process may have called clear(). On Windows,
+            # another program may be holding the file open.
+            remove_silent()
+        except BaseException:
+            remove_silent()
+            raise
+
+    def clear(self) -> None:
+        # imported lazily here because google app-engine doesn't support
+        # write access on the file system and the function does not exist
+        # normally.
+        from os import remove
+
+        files = fnmatch.filter(os.listdir(self.directory), self.pattern % ("*",))
+        for filename in files:
+            try:
+                remove(os.path.join(self.directory, filename))
+            except OSError:
+                pass
+
+
+class MemcachedBytecodeCache(BytecodeCache):
+    """This class implements a bytecode cache that uses a memcache cache for
+    storing the information.  It does not enforce a specific memcache library
+    (tummy's memcache or cmemcache) but will accept any class that provides
+    the minimal interface required.
+
+    Libraries compatible with this class:
+
+    -   `cachelib `_
+    -   `python-memcached `_
+
+    (Unfortunately the django cache interface is not compatible because it
+    does not support storing binary data, only text. You can however pass
+    the underlying cache client to the bytecode cache which is available
+    as `django.core.cache.cache._client`.)
+
+    The minimal interface for the client passed to the constructor is this:
+
+    .. class:: MinimalClientInterface
+
+        .. method:: set(key, value[, timeout])
+
+            Stores the bytecode in the cache.  `value` is a string and
+            `timeout` the timeout of the key.  If timeout is not provided
+            a default timeout or no timeout should be assumed, if it's
+            provided it's an integer with the number of seconds the cache
+            item should exist.
+
+        .. method:: get(key)
+
+            Returns the value for the cache key.  If the item does not
+            exist in the cache the return value must be `None`.
+
+    The other arguments to the constructor are the prefix for all keys that
+    is added before the actual cache key and the timeout for the bytecode in
+    the cache system.  We recommend a high (or no) timeout.
+
+    This bytecode cache does not support clearing of used items in the cache.
+    The clear method is a no-operation function.
+
+    .. versionadded:: 2.7
+       Added support for ignoring memcache errors through the
+       `ignore_memcache_errors` parameter.
+    """
+
+    def __init__(
+        self,
+        client: "_MemcachedClient",
+        prefix: str = "jinja2/bytecode/",
+        timeout: t.Optional[int] = None,
+        ignore_memcache_errors: bool = True,
+    ):
+        self.client = client
+        self.prefix = prefix
+        self.timeout = timeout
+        self.ignore_memcache_errors = ignore_memcache_errors
+
+    def load_bytecode(self, bucket: Bucket) -> None:
+        try:
+            code = self.client.get(self.prefix + bucket.key)
+        except Exception:
+            if not self.ignore_memcache_errors:
+                raise
+        else:
+            bucket.bytecode_from_string(code)
+
+    def dump_bytecode(self, bucket: Bucket) -> None:
+        key = self.prefix + bucket.key
+        value = bucket.bytecode_to_string()
+
+        try:
+            if self.timeout is not None:
+                self.client.set(key, value, self.timeout)
+            else:
+                self.client.set(key, value)
+        except Exception:
+            if not self.ignore_memcache_errors:
+                raise
diff --git a/venv/lib/python3.8/site-packages/jinja2/compiler.py b/venv/lib/python3.8/site-packages/jinja2/compiler.py
new file mode 100644
index 0000000..3458095
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/jinja2/compiler.py
@@ -0,0 +1,1957 @@
+"""Compiles nodes from the parser into Python code."""
+import typing as t
+from contextlib import contextmanager
+from functools import update_wrapper
+from io import StringIO
+from itertools import chain
+from keyword import iskeyword as is_python_keyword
+
+from markupsafe import escape
+from markupsafe import Markup
+
+from . import nodes
+from .exceptions import TemplateAssertionError
+from .idtracking import Symbols
+from .idtracking import VAR_LOAD_ALIAS
+from .idtracking import VAR_LOAD_PARAMETER
+from .idtracking import VAR_LOAD_RESOLVE
+from .idtracking import VAR_LOAD_UNDEFINED
+from .nodes import EvalContext
+from .optimizer import Optimizer
+from .utils import _PassArg
+from .utils import concat
+from .visitor import NodeVisitor
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+    from .environment import Environment
+
+F = t.TypeVar("F", bound=t.Callable[..., t.Any])
+
+operators = {
+    "eq": "==",
+    "ne": "!=",
+    "gt": ">",
+    "gteq": ">=",
+    "lt": "<",
+    "lteq": "<=",
+    "in": "in",
+    "notin": "not in",
+}
+
+
+def optimizeconst(f: F) -> F:
+    def new_func(
+        self: "CodeGenerator", node: nodes.Expr, frame: "Frame", **kwargs: t.Any
+    ) -> t.Any:
+        # Only optimize if the frame is not volatile
+        if self.optimizer is not None and not frame.eval_ctx.volatile:
+            new_node = self.optimizer.visit(node, frame.eval_ctx)
+
+            if new_node != node:
+                return self.visit(new_node, frame)
+
+        return f(self, node, frame, **kwargs)
+
+    return update_wrapper(t.cast(F, new_func), f)
+
+
+def _make_binop(op: str) -> t.Callable[["CodeGenerator", nodes.BinExpr, "Frame"], None]:
+    @optimizeconst
+    def visitor(self: "CodeGenerator", node: nodes.BinExpr, frame: Frame) -> None:
+        if (
+            self.environment.sandboxed
+            and op in self.environment.intercepted_binops  # type: ignore
+        ):
+            self.write(f"environment.call_binop(context, {op!r}, ")
+            self.visit(node.left, frame)
+            self.write(", ")
+            self.visit(node.right, frame)
+        else:
+            self.write("(")
+            self.visit(node.left, frame)
+            self.write(f" {op} ")
+            self.visit(node.right, frame)
+
+        self.write(")")
+
+    return visitor
+
+
+def _make_unop(
+    op: str,
+) -> t.Callable[["CodeGenerator", nodes.UnaryExpr, "Frame"], None]:
+    @optimizeconst
+    def visitor(self: "CodeGenerator", node: nodes.UnaryExpr, frame: Frame) -> None:
+        if (
+            self.environment.sandboxed
+            and op in self.environment.intercepted_unops  # type: ignore
+        ):
+            self.write(f"environment.call_unop(context, {op!r}, ")
+            self.visit(node.node, frame)
+        else:
+            self.write("(" + op)
+            self.visit(node.node, frame)
+
+        self.write(")")
+
+    return visitor
+
+
+def generate(
+    node: nodes.Template,
+    environment: "Environment",
+    name: t.Optional[str],
+    filename: t.Optional[str],
+    stream: t.Optional[t.TextIO] = None,
+    defer_init: bool = False,
+    optimized: bool = True,
+) -> t.Optional[str]:
+    """Generate the python source for a node tree."""
+    if not isinstance(node, nodes.Template):
+        raise TypeError("Can't compile non template nodes")
+
+    generator = environment.code_generator_class(
+        environment, name, filename, stream, defer_init, optimized
+    )
+    generator.visit(node)
+
+    if stream is None:
+        return generator.stream.getvalue()  # type: ignore
+
+    return None
+
+
+def has_safe_repr(value: t.Any) -> bool:
+    """Does the node have a safe representation?"""
+    if value is None or value is NotImplemented or value is Ellipsis:
+        return True
+
+    if type(value) in {bool, int, float, complex, range, str, Markup}:
+        return True
+
+    if type(value) in {tuple, list, set, frozenset}:
+        return all(has_safe_repr(v) for v in value)
+
+    if type(value) is dict:
+        return all(has_safe_repr(k) and has_safe_repr(v) for k, v in value.items())
+
+    return False
+
+
+def find_undeclared(
+    nodes: t.Iterable[nodes.Node], names: t.Iterable[str]
+) -> t.Set[str]:
+    """Check if the names passed are accessed undeclared.  The return value
+    is a set of all the undeclared names from the sequence of names found.
+    """
+    visitor = UndeclaredNameVisitor(names)
+    try:
+        for node in nodes:
+            visitor.visit(node)
+    except VisitorExit:
+        pass
+    return visitor.undeclared
+
+
+class MacroRef:
+    def __init__(self, node: t.Union[nodes.Macro, nodes.CallBlock]) -> None:
+        self.node = node
+        self.accesses_caller = False
+        self.accesses_kwargs = False
+        self.accesses_varargs = False
+
+
+class Frame:
+    """Holds compile time information for us."""
+
+    def __init__(
+        self,
+        eval_ctx: EvalContext,
+        parent: t.Optional["Frame"] = None,
+        level: t.Optional[int] = None,
+    ) -> None:
+        self.eval_ctx = eval_ctx
+
+        # the parent of this frame
+        self.parent = parent
+
+        if parent is None:
+            self.symbols = Symbols(level=level)
+
+            # in some dynamic inheritance situations the compiler needs to add
+            # write tests around output statements.
+            self.require_output_check = False
+
+            # inside some tags we are using a buffer rather than yield statements.
+            # this for example affects {% filter %} or {% macro %}.  If a frame
+            # is buffered this variable points to the name of the list used as
+            # buffer.
+            self.buffer: t.Optional[str] = None
+
+            # the name of the block we're in, otherwise None.
+            self.block: t.Optional[str] = None
+
+        else:
+            self.symbols = Symbols(parent.symbols, level=level)
+            self.require_output_check = parent.require_output_check
+            self.buffer = parent.buffer
+            self.block = parent.block
+
+        # a toplevel frame is the root + soft frames such as if conditions.
+        self.toplevel = False
+
+        # the root frame is basically just the outermost frame, so no if
+        # conditions.  This information is used to optimize inheritance
+        # situations.
+        self.rootlevel = False
+
+        # variables set inside of loops and blocks should not affect outer frames,
+        # but they still needs to be kept track of as part of the active context.
+        self.loop_frame = False
+        self.block_frame = False
+
+        # track whether the frame is being used in an if-statement or conditional
+        # expression as it determines which errors should be raised during runtime
+        # or compile time.
+        self.soft_frame = False
+
+    def copy(self) -> "Frame":
+        """Create a copy of the current one."""
+        rv = object.__new__(self.__class__)
+        rv.__dict__.update(self.__dict__)
+        rv.symbols = self.symbols.copy()
+        return rv
+
+    def inner(self, isolated: bool = False) -> "Frame":
+        """Return an inner frame."""
+        if isolated:
+            return Frame(self.eval_ctx, level=self.symbols.level + 1)
+        return Frame(self.eval_ctx, self)
+
+    def soft(self) -> "Frame":
+        """Return a soft frame.  A soft frame may not be modified as
+        standalone thing as it shares the resources with the frame it
+        was created of, but it's not a rootlevel frame any longer.
+
+        This is only used to implement if-statements and conditional
+        expressions.
+        """
+        rv = self.copy()
+        rv.rootlevel = False
+        rv.soft_frame = True
+        return rv
+
+    __copy__ = copy
+
+
+class VisitorExit(RuntimeError):
+    """Exception used by the `UndeclaredNameVisitor` to signal a stop."""
+
+
+class DependencyFinderVisitor(NodeVisitor):
+    """A visitor that collects filter and test calls."""
+
+    def __init__(self) -> None:
+        self.filters: t.Set[str] = set()
+        self.tests: t.Set[str] = set()
+
+    def visit_Filter(self, node: nodes.Filter) -> None:
+        self.generic_visit(node)
+        self.filters.add(node.name)
+
+    def visit_Test(self, node: nodes.Test) -> None:
+        self.generic_visit(node)
+        self.tests.add(node.name)
+
+    def visit_Block(self, node: nodes.Block) -> None:
+        """Stop visiting at blocks."""
+
+
+class UndeclaredNameVisitor(NodeVisitor):
+    """A visitor that checks if a name is accessed without being
+    declared.  This is different from the frame visitor as it will
+    not stop at closure frames.
+    """
+
+    def __init__(self, names: t.Iterable[str]) -> None:
+        self.names = set(names)
+        self.undeclared: t.Set[str] = set()
+
+    def visit_Name(self, node: nodes.Name) -> None:
+        if node.ctx == "load" and node.name in self.names:
+            self.undeclared.add(node.name)
+            if self.undeclared == self.names:
+                raise VisitorExit()
+        else:
+            self.names.discard(node.name)
+
+    def visit_Block(self, node: nodes.Block) -> None:
+        """Stop visiting a blocks."""
+
+
+class CompilerExit(Exception):
+    """Raised if the compiler encountered a situation where it just
+    doesn't make sense to further process the code.  Any block that
+    raises such an exception is not further processed.
+    """
+
+
+class CodeGenerator(NodeVisitor):
+    def __init__(
+        self,
+        environment: "Environment",
+        name: t.Optional[str],
+        filename: t.Optional[str],
+        stream: t.Optional[t.TextIO] = None,
+        defer_init: bool = False,
+        optimized: bool = True,
+    ) -> None:
+        if stream is None:
+            stream = StringIO()
+        self.environment = environment
+        self.name = name
+        self.filename = filename
+        self.stream = stream
+        self.created_block_context = False
+        self.defer_init = defer_init
+        self.optimizer: t.Optional[Optimizer] = None
+
+        if optimized:
+            self.optimizer = Optimizer(environment)
+
+        # aliases for imports
+        self.import_aliases: t.Dict[str, str] = {}
+
+        # a registry for all blocks.  Because blocks are moved out
+        # into the global python scope they are registered here
+        self.blocks: t.Dict[str, nodes.Block] = {}
+
+        # the number of extends statements so far
+        self.extends_so_far = 0
+
+        # some templates have a rootlevel extends.  In this case we
+        # can safely assume that we're a child template and do some
+        # more optimizations.
+        self.has_known_extends = False
+
+        # the current line number
+        self.code_lineno = 1
+
+        # registry of all filters and tests (global, not block local)
+        self.tests: t.Dict[str, str] = {}
+        self.filters: t.Dict[str, str] = {}
+
+        # the debug information
+        self.debug_info: t.List[t.Tuple[int, int]] = []
+        self._write_debug_info: t.Optional[int] = None
+
+        # the number of new lines before the next write()
+        self._new_lines = 0
+
+        # the line number of the last written statement
+        self._last_line = 0
+
+        # true if nothing was written so far.
+        self._first_write = True
+
+        # used by the `temporary_identifier` method to get new
+        # unique, temporary identifier
+        self._last_identifier = 0
+
+        # the current indentation
+        self._indentation = 0
+
+        # Tracks toplevel assignments
+        self._assign_stack: t.List[t.Set[str]] = []
+
+        # Tracks parameter definition blocks
+        self._param_def_block: t.List[t.Set[str]] = []
+
+        # Tracks the current context.
+        self._context_reference_stack = ["context"]
+
+    @property
+    def optimized(self) -> bool:
+        return self.optimizer is not None
+
+    # -- Various compilation helpers
+
+    def fail(self, msg: str, lineno: int) -> "te.NoReturn":
+        """Fail with a :exc:`TemplateAssertionError`."""
+        raise TemplateAssertionError(msg, lineno, self.name, self.filename)
+
+    def temporary_identifier(self) -> str:
+        """Get a new unique identifier."""
+        self._last_identifier += 1
+        return f"t_{self._last_identifier}"
+
+    def buffer(self, frame: Frame) -> None:
+        """Enable buffering for the frame from that point onwards."""
+        frame.buffer = self.temporary_identifier()
+        self.writeline(f"{frame.buffer} = []")
+
+    def return_buffer_contents(
+        self, frame: Frame, force_unescaped: bool = False
+    ) -> None:
+        """Return the buffer contents of the frame."""
+        if not force_unescaped:
+            if frame.eval_ctx.volatile:
+                self.writeline("if context.eval_ctx.autoescape:")
+                self.indent()
+                self.writeline(f"return Markup(concat({frame.buffer}))")
+                self.outdent()
+                self.writeline("else:")
+                self.indent()
+                self.writeline(f"return concat({frame.buffer})")
+                self.outdent()
+                return
+            elif frame.eval_ctx.autoescape:
+                self.writeline(f"return Markup(concat({frame.buffer}))")
+                return
+        self.writeline(f"return concat({frame.buffer})")
+
+    def indent(self) -> None:
+        """Indent by one."""
+        self._indentation += 1
+
+    def outdent(self, step: int = 1) -> None:
+        """Outdent by step."""
+        self._indentation -= step
+
+    def start_write(self, frame: Frame, node: t.Optional[nodes.Node] = None) -> None:
+        """Yield or write into the frame buffer."""
+        if frame.buffer is None:
+            self.writeline("yield ", node)
+        else:
+            self.writeline(f"{frame.buffer}.append(", node)
+
+    def end_write(self, frame: Frame) -> None:
+        """End the writing process started by `start_write`."""
+        if frame.buffer is not None:
+            self.write(")")
+
+    def simple_write(
+        self, s: str, frame: Frame, node: t.Optional[nodes.Node] = None
+    ) -> None:
+        """Simple shortcut for start_write + write + end_write."""
+        self.start_write(frame, node)
+        self.write(s)
+        self.end_write(frame)
+
+    def blockvisit(self, nodes: t.Iterable[nodes.Node], frame: Frame) -> None:
+        """Visit a list of nodes as block in a frame.  If the current frame
+        is no buffer a dummy ``if 0: yield None`` is written automatically.
+        """
+        try:
+            self.writeline("pass")
+            for node in nodes:
+                self.visit(node, frame)
+        except CompilerExit:
+            pass
+
+    def write(self, x: str) -> None:
+        """Write a string into the output stream."""
+        if self._new_lines:
+            if not self._first_write:
+                self.stream.write("\n" * self._new_lines)
+                self.code_lineno += self._new_lines
+                if self._write_debug_info is not None:
+                    self.debug_info.append((self._write_debug_info, self.code_lineno))
+                    self._write_debug_info = None
+            self._first_write = False
+            self.stream.write("    " * self._indentation)
+            self._new_lines = 0
+        self.stream.write(x)
+
+    def writeline(
+        self, x: str, node: t.Optional[nodes.Node] = None, extra: int = 0
+    ) -> None:
+        """Combination of newline and write."""
+        self.newline(node, extra)
+        self.write(x)
+
+    def newline(self, node: t.Optional[nodes.Node] = None, extra: int = 0) -> None:
+        """Add one or more newlines before the next write."""
+        self._new_lines = max(self._new_lines, 1 + extra)
+        if node is not None and node.lineno != self._last_line:
+            self._write_debug_info = node.lineno
+            self._last_line = node.lineno
+
+    def signature(
+        self,
+        node: t.Union[nodes.Call, nodes.Filter, nodes.Test],
+        frame: Frame,
+        extra_kwargs: t.Optional[t.Mapping[str, t.Any]] = None,
+    ) -> None:
+        """Writes a function call to the stream for the current node.
+        A leading comma is added automatically.  The extra keyword
+        arguments may not include python keywords otherwise a syntax
+        error could occur.  The extra keyword arguments should be given
+        as python dict.
+        """
+        # if any of the given keyword arguments is a python keyword
+        # we have to make sure that no invalid call is created.
+        kwarg_workaround = any(
+            is_python_keyword(t.cast(str, k))
+            for k in chain((x.key for x in node.kwargs), extra_kwargs or ())
+        )
+
+        for arg in node.args:
+            self.write(", ")
+            self.visit(arg, frame)
+
+        if not kwarg_workaround:
+            for kwarg in node.kwargs:
+                self.write(", ")
+                self.visit(kwarg, frame)
+            if extra_kwargs is not None:
+                for key, value in extra_kwargs.items():
+                    self.write(f", {key}={value}")
+        if node.dyn_args:
+            self.write(", *")
+            self.visit(node.dyn_args, frame)
+
+        if kwarg_workaround:
+            if node.dyn_kwargs is not None:
+                self.write(", **dict({")
+            else:
+                self.write(", **{")
+            for kwarg in node.kwargs:
+                self.write(f"{kwarg.key!r}: ")
+                self.visit(kwarg.value, frame)
+                self.write(", ")
+            if extra_kwargs is not None:
+                for key, value in extra_kwargs.items():
+                    self.write(f"{key!r}: {value}, ")
+            if node.dyn_kwargs is not None:
+                self.write("}, **")
+                self.visit(node.dyn_kwargs, frame)
+                self.write(")")
+            else:
+                self.write("}")
+
+        elif node.dyn_kwargs is not None:
+            self.write(", **")
+            self.visit(node.dyn_kwargs, frame)
+
+    def pull_dependencies(self, nodes: t.Iterable[nodes.Node]) -> None:
+        """Find all filter and test names used in the template and
+        assign them to variables in the compiled namespace. Checking
+        that the names are registered with the environment is done when
+        compiling the Filter and Test nodes. If the node is in an If or
+        CondExpr node, the check is done at runtime instead.
+
+        .. versionchanged:: 3.0
+            Filters and tests in If and CondExpr nodes are checked at
+            runtime instead of compile time.
+        """
+        visitor = DependencyFinderVisitor()
+
+        for node in nodes:
+            visitor.visit(node)
+
+        for id_map, names, dependency in (self.filters, visitor.filters, "filters"), (
+            self.tests,
+            visitor.tests,
+            "tests",
+        ):
+            for name in sorted(names):
+                if name not in id_map:
+                    id_map[name] = self.temporary_identifier()
+
+                # add check during runtime that dependencies used inside of executed
+                # blocks are defined, as this step may be skipped during compile time
+                self.writeline("try:")
+                self.indent()
+                self.writeline(f"{id_map[name]} = environment.{dependency}[{name!r}]")
+                self.outdent()
+                self.writeline("except KeyError:")
+                self.indent()
+                self.writeline("@internalcode")
+                self.writeline(f"def {id_map[name]}(*unused):")
+                self.indent()
+                self.writeline(
+                    f'raise TemplateRuntimeError("No {dependency[:-1]}'
+                    f' named {name!r} found.")'
+                )
+                self.outdent()
+                self.outdent()
+
+    def enter_frame(self, frame: Frame) -> None:
+        undefs = []
+        for target, (action, param) in frame.symbols.loads.items():
+            if action == VAR_LOAD_PARAMETER:
+                pass
+            elif action == VAR_LOAD_RESOLVE:
+                self.writeline(f"{target} = {self.get_resolve_func()}({param!r})")
+            elif action == VAR_LOAD_ALIAS:
+                self.writeline(f"{target} = {param}")
+            elif action == VAR_LOAD_UNDEFINED:
+                undefs.append(target)
+            else:
+                raise NotImplementedError("unknown load instruction")
+        if undefs:
+            self.writeline(f"{' = '.join(undefs)} = missing")
+
+    def leave_frame(self, frame: Frame, with_python_scope: bool = False) -> None:
+        if not with_python_scope:
+            undefs = []
+            for target in frame.symbols.loads:
+                undefs.append(target)
+            if undefs:
+                self.writeline(f"{' = '.join(undefs)} = missing")
+
+    def choose_async(self, async_value: str = "async ", sync_value: str = "") -> str:
+        return async_value if self.environment.is_async else sync_value
+
+    def func(self, name: str) -> str:
+        return f"{self.choose_async()}def {name}"
+
+    def macro_body(
+        self, node: t.Union[nodes.Macro, nodes.CallBlock], frame: Frame
+    ) -> t.Tuple[Frame, MacroRef]:
+        """Dump the function def of a macro or call block."""
+        frame = frame.inner()
+        frame.symbols.analyze_node(node)
+        macro_ref = MacroRef(node)
+
+        explicit_caller = None
+        skip_special_params = set()
+        args = []
+
+        for idx, arg in enumerate(node.args):
+            if arg.name == "caller":
+                explicit_caller = idx
+            if arg.name in ("kwargs", "varargs"):
+                skip_special_params.add(arg.name)
+            args.append(frame.symbols.ref(arg.name))
+
+        undeclared = find_undeclared(node.body, ("caller", "kwargs", "varargs"))
+
+        if "caller" in undeclared:
+            # In older Jinja versions there was a bug that allowed caller
+            # to retain the special behavior even if it was mentioned in
+            # the argument list.  However thankfully this was only really
+            # working if it was the last argument.  So we are explicitly
+            # checking this now and error out if it is anywhere else in
+            # the argument list.
+            if explicit_caller is not None:
+                try:
+                    node.defaults[explicit_caller - len(node.args)]
+                except IndexError:
+                    self.fail(
+                        "When defining macros or call blocks the "
+                        'special "caller" argument must be omitted '
+                        "or be given a default.",
+                        node.lineno,
+                    )
+            else:
+                args.append(frame.symbols.declare_parameter("caller"))
+            macro_ref.accesses_caller = True
+        if "kwargs" in undeclared and "kwargs" not in skip_special_params:
+            args.append(frame.symbols.declare_parameter("kwargs"))
+            macro_ref.accesses_kwargs = True
+        if "varargs" in undeclared and "varargs" not in skip_special_params:
+            args.append(frame.symbols.declare_parameter("varargs"))
+            macro_ref.accesses_varargs = True
+
+        # macros are delayed, they never require output checks
+        frame.require_output_check = False
+        frame.symbols.analyze_node(node)
+        self.writeline(f"{self.func('macro')}({', '.join(args)}):", node)
+        self.indent()
+
+        self.buffer(frame)
+        self.enter_frame(frame)
+
+        self.push_parameter_definitions(frame)
+        for idx, arg in enumerate(node.args):
+            ref = frame.symbols.ref(arg.name)
+            self.writeline(f"if {ref} is missing:")
+            self.indent()
+            try:
+                default = node.defaults[idx - len(node.args)]
+            except IndexError:
+                self.writeline(
+                    f'{ref} = undefined("parameter {arg.name!r} was not provided",'
+                    f" name={arg.name!r})"
+                )
+            else:
+                self.writeline(f"{ref} = ")
+                self.visit(default, frame)
+            self.mark_parameter_stored(ref)
+            self.outdent()
+        self.pop_parameter_definitions()
+
+        self.blockvisit(node.body, frame)
+        self.return_buffer_contents(frame, force_unescaped=True)
+        self.leave_frame(frame, with_python_scope=True)
+        self.outdent()
+
+        return frame, macro_ref
+
+    def macro_def(self, macro_ref: MacroRef, frame: Frame) -> None:
+        """Dump the macro definition for the def created by macro_body."""
+        arg_tuple = ", ".join(repr(x.name) for x in macro_ref.node.args)
+        name = getattr(macro_ref.node, "name", None)
+        if len(macro_ref.node.args) == 1:
+            arg_tuple += ","
+        self.write(
+            f"Macro(environment, macro, {name!r}, ({arg_tuple}),"
+            f" {macro_ref.accesses_kwargs!r}, {macro_ref.accesses_varargs!r},"
+            f" {macro_ref.accesses_caller!r}, context.eval_ctx.autoescape)"
+        )
+
+    def position(self, node: nodes.Node) -> str:
+        """Return a human readable position for the node."""
+        rv = f"line {node.lineno}"
+        if self.name is not None:
+            rv = f"{rv} in {self.name!r}"
+        return rv
+
+    def dump_local_context(self, frame: Frame) -> str:
+        items_kv = ", ".join(
+            f"{name!r}: {target}"
+            for name, target in frame.symbols.dump_stores().items()
+        )
+        return f"{{{items_kv}}}"
+
+    def write_commons(self) -> None:
+        """Writes a common preamble that is used by root and block functions.
+        Primarily this sets up common local helpers and enforces a generator
+        through a dead branch.
+        """
+        self.writeline("resolve = context.resolve_or_missing")
+        self.writeline("undefined = environment.undefined")
+        self.writeline("concat = environment.concat")
+        # always use the standard Undefined class for the implicit else of
+        # conditional expressions
+        self.writeline("cond_expr_undefined = Undefined")
+        self.writeline("if 0: yield None")
+
+    def push_parameter_definitions(self, frame: Frame) -> None:
+        """Pushes all parameter targets from the given frame into a local
+        stack that permits tracking of yet to be assigned parameters.  In
+        particular this enables the optimization from `visit_Name` to skip
+        undefined expressions for parameters in macros as macros can reference
+        otherwise unbound parameters.
+        """
+        self._param_def_block.append(frame.symbols.dump_param_targets())
+
+    def pop_parameter_definitions(self) -> None:
+        """Pops the current parameter definitions set."""
+        self._param_def_block.pop()
+
+    def mark_parameter_stored(self, target: str) -> None:
+        """Marks a parameter in the current parameter definitions as stored.
+        This will skip the enforced undefined checks.
+        """
+        if self._param_def_block:
+            self._param_def_block[-1].discard(target)
+
+    def push_context_reference(self, target: str) -> None:
+        self._context_reference_stack.append(target)
+
+    def pop_context_reference(self) -> None:
+        self._context_reference_stack.pop()
+
+    def get_context_ref(self) -> str:
+        return self._context_reference_stack[-1]
+
+    def get_resolve_func(self) -> str:
+        target = self._context_reference_stack[-1]
+        if target == "context":
+            return "resolve"
+        return f"{target}.resolve"
+
+    def derive_context(self, frame: Frame) -> str:
+        return f"{self.get_context_ref()}.derived({self.dump_local_context(frame)})"
+
+    def parameter_is_undeclared(self, target: str) -> bool:
+        """Checks if a given target is an undeclared parameter."""
+        if not self._param_def_block:
+            return False
+        return target in self._param_def_block[-1]
+
+    def push_assign_tracking(self) -> None:
+        """Pushes a new layer for assignment tracking."""
+        self._assign_stack.append(set())
+
+    def pop_assign_tracking(self, frame: Frame) -> None:
+        """Pops the topmost level for assignment tracking and updates the
+        context variables if necessary.
+        """
+        vars = self._assign_stack.pop()
+        if (
+            not frame.block_frame
+            and not frame.loop_frame
+            and not frame.toplevel
+            or not vars
+        ):
+            return
+        public_names = [x for x in vars if x[:1] != "_"]
+        if len(vars) == 1:
+            name = next(iter(vars))
+            ref = frame.symbols.ref(name)
+            if frame.loop_frame:
+                self.writeline(f"_loop_vars[{name!r}] = {ref}")
+                return
+            if frame.block_frame:
+                self.writeline(f"_block_vars[{name!r}] = {ref}")
+                return
+            self.writeline(f"context.vars[{name!r}] = {ref}")
+        else:
+            if frame.loop_frame:
+                self.writeline("_loop_vars.update({")
+            elif frame.block_frame:
+                self.writeline("_block_vars.update({")
+            else:
+                self.writeline("context.vars.update({")
+            for idx, name in enumerate(vars):
+                if idx:
+                    self.write(", ")
+                ref = frame.symbols.ref(name)
+                self.write(f"{name!r}: {ref}")
+            self.write("})")
+        if not frame.block_frame and not frame.loop_frame and public_names:
+            if len(public_names) == 1:
+                self.writeline(f"context.exported_vars.add({public_names[0]!r})")
+            else:
+                names_str = ", ".join(map(repr, public_names))
+                self.writeline(f"context.exported_vars.update(({names_str}))")
+
+    # -- Statement Visitors
+
+    def visit_Template(
+        self, node: nodes.Template, frame: t.Optional[Frame] = None
+    ) -> None:
+        assert frame is None, "no root frame allowed"
+        eval_ctx = EvalContext(self.environment, self.name)
+
+        from .runtime import exported, async_exported
+
+        if self.environment.is_async:
+            exported_names = sorted(exported + async_exported)
+        else:
+            exported_names = sorted(exported)
+
+        self.writeline("from jinja2.runtime import " + ", ".join(exported_names))
+
+        # if we want a deferred initialization we cannot move the
+        # environment into a local name
+        envenv = "" if self.defer_init else ", environment=environment"
+
+        # do we have an extends tag at all?  If not, we can save some
+        # overhead by just not processing any inheritance code.
+        have_extends = node.find(nodes.Extends) is not None
+
+        # find all blocks
+        for block in node.find_all(nodes.Block):
+            if block.name in self.blocks:
+                self.fail(f"block {block.name!r} defined twice", block.lineno)
+            self.blocks[block.name] = block
+
+        # find all imports and import them
+        for import_ in node.find_all(nodes.ImportedName):
+            if import_.importname not in self.import_aliases:
+                imp = import_.importname
+                self.import_aliases[imp] = alias = self.temporary_identifier()
+                if "." in imp:
+                    module, obj = imp.rsplit(".", 1)
+                    self.writeline(f"from {module} import {obj} as {alias}")
+                else:
+                    self.writeline(f"import {imp} as {alias}")
+
+        # add the load name
+        self.writeline(f"name = {self.name!r}")
+
+        # generate the root render function.
+        self.writeline(
+            f"{self.func('root')}(context, missing=missing{envenv}):", extra=1
+        )
+        self.indent()
+        self.write_commons()
+
+        # process the root
+        frame = Frame(eval_ctx)
+        if "self" in find_undeclared(node.body, ("self",)):
+            ref = frame.symbols.declare_parameter("self")
+            self.writeline(f"{ref} = TemplateReference(context)")
+        frame.symbols.analyze_node(node)
+        frame.toplevel = frame.rootlevel = True
+        frame.require_output_check = have_extends and not self.has_known_extends
+        if have_extends:
+            self.writeline("parent_template = None")
+        self.enter_frame(frame)
+        self.pull_dependencies(node.body)
+        self.blockvisit(node.body, frame)
+        self.leave_frame(frame, with_python_scope=True)
+        self.outdent()
+
+        # make sure that the parent root is called.
+        if have_extends:
+            if not self.has_known_extends:
+                self.indent()
+                self.writeline("if parent_template is not None:")
+            self.indent()
+            if not self.environment.is_async:
+                self.writeline("yield from parent_template.root_render_func(context)")
+            else:
+                self.writeline(
+                    "async for event in parent_template.root_render_func(context):"
+                )
+                self.indent()
+                self.writeline("yield event")
+                self.outdent()
+            self.outdent(1 + (not self.has_known_extends))
+
+        # at this point we now have the blocks collected and can visit them too.
+        for name, block in self.blocks.items():
+            self.writeline(
+                f"{self.func('block_' + name)}(context, missing=missing{envenv}):",
+                block,
+                1,
+            )
+            self.indent()
+            self.write_commons()
+            # It's important that we do not make this frame a child of the
+            # toplevel template.  This would cause a variety of
+            # interesting issues with identifier tracking.
+            block_frame = Frame(eval_ctx)
+            block_frame.block_frame = True
+            undeclared = find_undeclared(block.body, ("self", "super"))
+            if "self" in undeclared:
+                ref = block_frame.symbols.declare_parameter("self")
+                self.writeline(f"{ref} = TemplateReference(context)")
+            if "super" in undeclared:
+                ref = block_frame.symbols.declare_parameter("super")
+                self.writeline(f"{ref} = context.super({name!r}, block_{name})")
+            block_frame.symbols.analyze_node(block)
+            block_frame.block = name
+            self.writeline("_block_vars = {}")
+            self.enter_frame(block_frame)
+            self.pull_dependencies(block.body)
+            self.blockvisit(block.body, block_frame)
+            self.leave_frame(block_frame, with_python_scope=True)
+            self.outdent()
+
+        blocks_kv_str = ", ".join(f"{x!r}: block_{x}" for x in self.blocks)
+        self.writeline(f"blocks = {{{blocks_kv_str}}}", extra=1)
+        debug_kv_str = "&".join(f"{k}={v}" for k, v in self.debug_info)
+        self.writeline(f"debug_info = {debug_kv_str!r}")
+
+    def visit_Block(self, node: nodes.Block, frame: Frame) -> None:
+        """Call a block and register it for the template."""
+        level = 0
+        if frame.toplevel:
+            # if we know that we are a child template, there is no need to
+            # check if we are one
+            if self.has_known_extends:
+                return
+            if self.extends_so_far > 0:
+                self.writeline("if parent_template is None:")
+                self.indent()
+                level += 1
+
+        if node.scoped:
+            context = self.derive_context(frame)
+        else:
+            context = self.get_context_ref()
+
+        if node.required:
+            self.writeline(f"if len(context.blocks[{node.name!r}]) <= 1:", node)
+            self.indent()
+            self.writeline(
+                f'raise TemplateRuntimeError("Required block {node.name!r} not found")',
+                node,
+            )
+            self.outdent()
+
+        if not self.environment.is_async and frame.buffer is None:
+            self.writeline(
+                f"yield from context.blocks[{node.name!r}][0]({context})", node
+            )
+        else:
+            self.writeline(
+                f"{self.choose_async()}for event in"
+                f" context.blocks[{node.name!r}][0]({context}):",
+                node,
+            )
+            self.indent()
+            self.simple_write("event", frame)
+            self.outdent()
+
+        self.outdent(level)
+
+    def visit_Extends(self, node: nodes.Extends, frame: Frame) -> None:
+        """Calls the extender."""
+        if not frame.toplevel:
+            self.fail("cannot use extend from a non top-level scope", node.lineno)
+
+        # if the number of extends statements in general is zero so
+        # far, we don't have to add a check if something extended
+        # the template before this one.
+        if self.extends_so_far > 0:
+
+            # if we have a known extends we just add a template runtime
+            # error into the generated code.  We could catch that at compile
+            # time too, but i welcome it not to confuse users by throwing the
+            # same error at different times just "because we can".
+            if not self.has_known_extends:
+                self.writeline("if parent_template is not None:")
+                self.indent()
+            self.writeline('raise TemplateRuntimeError("extended multiple times")')
+
+            # if we have a known extends already we don't need that code here
+            # as we know that the template execution will end here.
+            if self.has_known_extends:
+                raise CompilerExit()
+            else:
+                self.outdent()
+
+        self.writeline("parent_template = environment.get_template(", node)
+        self.visit(node.template, frame)
+        self.write(f", {self.name!r})")
+        self.writeline("for name, parent_block in parent_template.blocks.items():")
+        self.indent()
+        self.writeline("context.blocks.setdefault(name, []).append(parent_block)")
+        self.outdent()
+
+        # if this extends statement was in the root level we can take
+        # advantage of that information and simplify the generated code
+        # in the top level from this point onwards
+        if frame.rootlevel:
+            self.has_known_extends = True
+
+        # and now we have one more
+        self.extends_so_far += 1
+
+    def visit_Include(self, node: nodes.Include, frame: Frame) -> None:
+        """Handles includes."""
+        if node.ignore_missing:
+            self.writeline("try:")
+            self.indent()
+
+        func_name = "get_or_select_template"
+        if isinstance(node.template, nodes.Const):
+            if isinstance(node.template.value, str):
+                func_name = "get_template"
+            elif isinstance(node.template.value, (tuple, list)):
+                func_name = "select_template"
+        elif isinstance(node.template, (nodes.Tuple, nodes.List)):
+            func_name = "select_template"
+
+        self.writeline(f"template = environment.{func_name}(", node)
+        self.visit(node.template, frame)
+        self.write(f", {self.name!r})")
+        if node.ignore_missing:
+            self.outdent()
+            self.writeline("except TemplateNotFound:")
+            self.indent()
+            self.writeline("pass")
+            self.outdent()
+            self.writeline("else:")
+            self.indent()
+
+        skip_event_yield = False
+        if node.with_context:
+            self.writeline(
+                f"{self.choose_async()}for event in template.root_render_func("
+                "template.new_context(context.get_all(), True,"
+                f" {self.dump_local_context(frame)})):"
+            )
+        elif self.environment.is_async:
+            self.writeline(
+                "for event in (await template._get_default_module_async())"
+                "._body_stream:"
+            )
+        else:
+            self.writeline("yield from template._get_default_module()._body_stream")
+            skip_event_yield = True
+
+        if not skip_event_yield:
+            self.indent()
+            self.simple_write("event", frame)
+            self.outdent()
+
+        if node.ignore_missing:
+            self.outdent()
+
+    def _import_common(
+        self, node: t.Union[nodes.Import, nodes.FromImport], frame: Frame
+    ) -> None:
+        self.write(f"{self.choose_async('await ')}environment.get_template(")
+        self.visit(node.template, frame)
+        self.write(f", {self.name!r}).")
+
+        if node.with_context:
+            f_name = f"make_module{self.choose_async('_async')}"
+            self.write(
+                f"{f_name}(context.get_all(), True, {self.dump_local_context(frame)})"
+            )
+        else:
+            self.write(f"_get_default_module{self.choose_async('_async')}(context)")
+
+    def visit_Import(self, node: nodes.Import, frame: Frame) -> None:
+        """Visit regular imports."""
+        self.writeline(f"{frame.symbols.ref(node.target)} = ", node)
+        if frame.toplevel:
+            self.write(f"context.vars[{node.target!r}] = ")
+
+        self._import_common(node, frame)
+
+        if frame.toplevel and not node.target.startswith("_"):
+            self.writeline(f"context.exported_vars.discard({node.target!r})")
+
+    def visit_FromImport(self, node: nodes.FromImport, frame: Frame) -> None:
+        """Visit named imports."""
+        self.newline(node)
+        self.write("included_template = ")
+        self._import_common(node, frame)
+        var_names = []
+        discarded_names = []
+        for name in node.names:
+            if isinstance(name, tuple):
+                name, alias = name
+            else:
+                alias = name
+            self.writeline(
+                f"{frame.symbols.ref(alias)} ="
+                f" getattr(included_template, {name!r}, missing)"
+            )
+            self.writeline(f"if {frame.symbols.ref(alias)} is missing:")
+            self.indent()
+            message = (
+                "the template {included_template.__name__!r}"
+                f" (imported on {self.position(node)})"
+                f" does not export the requested name {name!r}"
+            )
+            self.writeline(
+                f"{frame.symbols.ref(alias)} = undefined(f{message!r}, name={name!r})"
+            )
+            self.outdent()
+            if frame.toplevel:
+                var_names.append(alias)
+                if not alias.startswith("_"):
+                    discarded_names.append(alias)
+
+        if var_names:
+            if len(var_names) == 1:
+                name = var_names[0]
+                self.writeline(f"context.vars[{name!r}] = {frame.symbols.ref(name)}")
+            else:
+                names_kv = ", ".join(
+                    f"{name!r}: {frame.symbols.ref(name)}" for name in var_names
+                )
+                self.writeline(f"context.vars.update({{{names_kv}}})")
+        if discarded_names:
+            if len(discarded_names) == 1:
+                self.writeline(f"context.exported_vars.discard({discarded_names[0]!r})")
+            else:
+                names_str = ", ".join(map(repr, discarded_names))
+                self.writeline(
+                    f"context.exported_vars.difference_update(({names_str}))"
+                )
+
+    def visit_For(self, node: nodes.For, frame: Frame) -> None:
+        loop_frame = frame.inner()
+        loop_frame.loop_frame = True
+        test_frame = frame.inner()
+        else_frame = frame.inner()
+
+        # try to figure out if we have an extended loop.  An extended loop
+        # is necessary if the loop is in recursive mode if the special loop
+        # variable is accessed in the body if the body is a scoped block.
+        extended_loop = (
+            node.recursive
+            or "loop"
+            in find_undeclared(node.iter_child_nodes(only=("body",)), ("loop",))
+            or any(block.scoped for block in node.find_all(nodes.Block))
+        )
+
+        loop_ref = None
+        if extended_loop:
+            loop_ref = loop_frame.symbols.declare_parameter("loop")
+
+        loop_frame.symbols.analyze_node(node, for_branch="body")
+        if node.else_:
+            else_frame.symbols.analyze_node(node, for_branch="else")
+
+        if node.test:
+            loop_filter_func = self.temporary_identifier()
+            test_frame.symbols.analyze_node(node, for_branch="test")
+            self.writeline(f"{self.func(loop_filter_func)}(fiter):", node.test)
+            self.indent()
+            self.enter_frame(test_frame)
+            self.writeline(self.choose_async("async for ", "for "))
+            self.visit(node.target, loop_frame)
+            self.write(" in ")
+            self.write(self.choose_async("auto_aiter(fiter)", "fiter"))
+            self.write(":")
+            self.indent()
+            self.writeline("if ", node.test)
+            self.visit(node.test, test_frame)
+            self.write(":")
+            self.indent()
+            self.writeline("yield ")
+            self.visit(node.target, loop_frame)
+            self.outdent(3)
+            self.leave_frame(test_frame, with_python_scope=True)
+
+        # if we don't have an recursive loop we have to find the shadowed
+        # variables at that point.  Because loops can be nested but the loop
+        # variable is a special one we have to enforce aliasing for it.
+        if node.recursive:
+            self.writeline(
+                f"{self.func('loop')}(reciter, loop_render_func, depth=0):", node
+            )
+            self.indent()
+            self.buffer(loop_frame)
+
+            # Use the same buffer for the else frame
+            else_frame.buffer = loop_frame.buffer
+
+        # make sure the loop variable is a special one and raise a template
+        # assertion error if a loop tries to write to loop
+        if extended_loop:
+            self.writeline(f"{loop_ref} = missing")
+
+        for name in node.find_all(nodes.Name):
+            if name.ctx == "store" and name.name == "loop":
+                self.fail(
+                    "Can't assign to special loop variable in for-loop target",
+                    name.lineno,
+                )
+
+        if node.else_:
+            iteration_indicator = self.temporary_identifier()
+            self.writeline(f"{iteration_indicator} = 1")
+
+        self.writeline(self.choose_async("async for ", "for "), node)
+        self.visit(node.target, loop_frame)
+        if extended_loop:
+            self.write(f", {loop_ref} in {self.choose_async('Async')}LoopContext(")
+        else:
+            self.write(" in ")
+
+        if node.test:
+            self.write(f"{loop_filter_func}(")
+        if node.recursive:
+            self.write("reciter")
+        else:
+            if self.environment.is_async and not extended_loop:
+                self.write("auto_aiter(")
+            self.visit(node.iter, frame)
+            if self.environment.is_async and not extended_loop:
+                self.write(")")
+        if node.test:
+            self.write(")")
+
+        if node.recursive:
+            self.write(", undefined, loop_render_func, depth):")
+        else:
+            self.write(", undefined):" if extended_loop else ":")
+
+        self.indent()
+        self.enter_frame(loop_frame)
+
+        self.writeline("_loop_vars = {}")
+        self.blockvisit(node.body, loop_frame)
+        if node.else_:
+            self.writeline(f"{iteration_indicator} = 0")
+        self.outdent()
+        self.leave_frame(
+            loop_frame, with_python_scope=node.recursive and not node.else_
+        )
+
+        if node.else_:
+            self.writeline(f"if {iteration_indicator}:")
+            self.indent()
+            self.enter_frame(else_frame)
+            self.blockvisit(node.else_, else_frame)
+            self.leave_frame(else_frame)
+            self.outdent()
+
+        # if the node was recursive we have to return the buffer contents
+        # and start the iteration code
+        if node.recursive:
+            self.return_buffer_contents(loop_frame)
+            self.outdent()
+            self.start_write(frame, node)
+            self.write(f"{self.choose_async('await ')}loop(")
+            if self.environment.is_async:
+                self.write("auto_aiter(")
+            self.visit(node.iter, frame)
+            if self.environment.is_async:
+                self.write(")")
+            self.write(", loop)")
+            self.end_write(frame)
+
+        # at the end of the iteration, clear any assignments made in the
+        # loop from the top level
+        if self._assign_stack:
+            self._assign_stack[-1].difference_update(loop_frame.symbols.stores)
+
+    def visit_If(self, node: nodes.If, frame: Frame) -> None:
+        if_frame = frame.soft()
+        self.writeline("if ", node)
+        self.visit(node.test, if_frame)
+        self.write(":")
+        self.indent()
+        self.blockvisit(node.body, if_frame)
+        self.outdent()
+        for elif_ in node.elif_:
+            self.writeline("elif ", elif_)
+            self.visit(elif_.test, if_frame)
+            self.write(":")
+            self.indent()
+            self.blockvisit(elif_.body, if_frame)
+            self.outdent()
+        if node.else_:
+            self.writeline("else:")
+            self.indent()
+            self.blockvisit(node.else_, if_frame)
+            self.outdent()
+
+    def visit_Macro(self, node: nodes.Macro, frame: Frame) -> None:
+        macro_frame, macro_ref = self.macro_body(node, frame)
+        self.newline()
+        if frame.toplevel:
+            if not node.name.startswith("_"):
+                self.write(f"context.exported_vars.add({node.name!r})")
+            self.writeline(f"context.vars[{node.name!r}] = ")
+        self.write(f"{frame.symbols.ref(node.name)} = ")
+        self.macro_def(macro_ref, macro_frame)
+
+    def visit_CallBlock(self, node: nodes.CallBlock, frame: Frame) -> None:
+        call_frame, macro_ref = self.macro_body(node, frame)
+        self.writeline("caller = ")
+        self.macro_def(macro_ref, call_frame)
+        self.start_write(frame, node)
+        self.visit_Call(node.call, frame, forward_caller=True)
+        self.end_write(frame)
+
+    def visit_FilterBlock(self, node: nodes.FilterBlock, frame: Frame) -> None:
+        filter_frame = frame.inner()
+        filter_frame.symbols.analyze_node(node)
+        self.enter_frame(filter_frame)
+        self.buffer(filter_frame)
+        self.blockvisit(node.body, filter_frame)
+        self.start_write(frame, node)
+        self.visit_Filter(node.filter, filter_frame)
+        self.end_write(frame)
+        self.leave_frame(filter_frame)
+
+    def visit_With(self, node: nodes.With, frame: Frame) -> None:
+        with_frame = frame.inner()
+        with_frame.symbols.analyze_node(node)
+        self.enter_frame(with_frame)
+        for target, expr in zip(node.targets, node.values):
+            self.newline()
+            self.visit(target, with_frame)
+            self.write(" = ")
+            self.visit(expr, frame)
+        self.blockvisit(node.body, with_frame)
+        self.leave_frame(with_frame)
+
+    def visit_ExprStmt(self, node: nodes.ExprStmt, frame: Frame) -> None:
+        self.newline(node)
+        self.visit(node.node, frame)
+
+    class _FinalizeInfo(t.NamedTuple):
+        const: t.Optional[t.Callable[..., str]]
+        src: t.Optional[str]
+
+    @staticmethod
+    def _default_finalize(value: t.Any) -> t.Any:
+        """The default finalize function if the environment isn't
+        configured with one. Or, if the environment has one, this is
+        called on that function's output for constants.
+        """
+        return str(value)
+
+    _finalize: t.Optional[_FinalizeInfo] = None
+
+    def _make_finalize(self) -> _FinalizeInfo:
+        """Build the finalize function to be used on constants and at
+        runtime. Cached so it's only created once for all output nodes.
+
+        Returns a ``namedtuple`` with the following attributes:
+
+        ``const``
+            A function to finalize constant data at compile time.
+
+        ``src``
+            Source code to output around nodes to be evaluated at
+            runtime.
+        """
+        if self._finalize is not None:
+            return self._finalize
+
+        finalize: t.Optional[t.Callable[..., t.Any]]
+        finalize = default = self._default_finalize
+        src = None
+
+        if self.environment.finalize:
+            src = "environment.finalize("
+            env_finalize = self.environment.finalize
+            pass_arg = {
+                _PassArg.context: "context",
+                _PassArg.eval_context: "context.eval_ctx",
+                _PassArg.environment: "environment",
+            }.get(
+                _PassArg.from_obj(env_finalize)  # type: ignore
+            )
+            finalize = None
+
+            if pass_arg is None:
+
+                def finalize(value: t.Any) -> t.Any:
+                    return default(env_finalize(value))
+
+            else:
+                src = f"{src}{pass_arg}, "
+
+                if pass_arg == "environment":
+
+                    def finalize(value: t.Any) -> t.Any:
+                        return default(env_finalize(self.environment, value))
+
+        self._finalize = self._FinalizeInfo(finalize, src)
+        return self._finalize
+
+    def _output_const_repr(self, group: t.Iterable[t.Any]) -> str:
+        """Given a group of constant values converted from ``Output``
+        child nodes, produce a string to write to the template module
+        source.
+        """
+        return repr(concat(group))
+
+    def _output_child_to_const(
+        self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo
+    ) -> str:
+        """Try to optimize a child of an ``Output`` node by trying to
+        convert it to constant, finalized data at compile time.
+
+        If :exc:`Impossible` is raised, the node is not constant and
+        will be evaluated at runtime. Any other exception will also be
+        evaluated at runtime for easier debugging.
+        """
+        const = node.as_const(frame.eval_ctx)
+
+        if frame.eval_ctx.autoescape:
+            const = escape(const)
+
+        # Template data doesn't go through finalize.
+        if isinstance(node, nodes.TemplateData):
+            return str(const)
+
+        return finalize.const(const)  # type: ignore
+
+    def _output_child_pre(
+        self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo
+    ) -> None:
+        """Output extra source code before visiting a child of an
+        ``Output`` node.
+        """
+        if frame.eval_ctx.volatile:
+            self.write("(escape if context.eval_ctx.autoescape else str)(")
+        elif frame.eval_ctx.autoescape:
+            self.write("escape(")
+        else:
+            self.write("str(")
+
+        if finalize.src is not None:
+            self.write(finalize.src)
+
+    def _output_child_post(
+        self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo
+    ) -> None:
+        """Output extra source code after visiting a child of an
+        ``Output`` node.
+        """
+        self.write(")")
+
+        if finalize.src is not None:
+            self.write(")")
+
+    def visit_Output(self, node: nodes.Output, frame: Frame) -> None:
+        # If an extends is active, don't render outside a block.
+        if frame.require_output_check:
+            # A top-level extends is known to exist at compile time.
+            if self.has_known_extends:
+                return
+
+            self.writeline("if parent_template is None:")
+            self.indent()
+
+        finalize = self._make_finalize()
+        body: t.List[t.Union[t.List[t.Any], nodes.Expr]] = []
+
+        # Evaluate constants at compile time if possible. Each item in
+        # body will be either a list of static data or a node to be
+        # evaluated at runtime.
+        for child in node.nodes:
+            try:
+                if not (
+                    # If the finalize function requires runtime context,
+                    # constants can't be evaluated at compile time.
+                    finalize.const
+                    # Unless it's basic template data that won't be
+                    # finalized anyway.
+                    or isinstance(child, nodes.TemplateData)
+                ):
+                    raise nodes.Impossible()
+
+                const = self._output_child_to_const(child, frame, finalize)
+            except (nodes.Impossible, Exception):
+                # The node was not constant and needs to be evaluated at
+                # runtime. Or another error was raised, which is easier
+                # to debug at runtime.
+                body.append(child)
+                continue
+
+            if body and isinstance(body[-1], list):
+                body[-1].append(const)
+            else:
+                body.append([const])
+
+        if frame.buffer is not None:
+            if len(body) == 1:
+                self.writeline(f"{frame.buffer}.append(")
+            else:
+                self.writeline(f"{frame.buffer}.extend((")
+
+            self.indent()
+
+        for item in body:
+            if isinstance(item, list):
+                # A group of constant data to join and output.
+                val = self._output_const_repr(item)
+
+                if frame.buffer is None:
+                    self.writeline("yield " + val)
+                else:
+                    self.writeline(val + ",")
+            else:
+                if frame.buffer is None:
+                    self.writeline("yield ", item)
+                else:
+                    self.newline(item)
+
+                # A node to be evaluated at runtime.
+                self._output_child_pre(item, frame, finalize)
+                self.visit(item, frame)
+                self._output_child_post(item, frame, finalize)
+
+                if frame.buffer is not None:
+                    self.write(",")
+
+        if frame.buffer is not None:
+            self.outdent()
+            self.writeline(")" if len(body) == 1 else "))")
+
+        if frame.require_output_check:
+            self.outdent()
+
+    def visit_Assign(self, node: nodes.Assign, frame: Frame) -> None:
+        self.push_assign_tracking()
+        self.newline(node)
+        self.visit(node.target, frame)
+        self.write(" = ")
+        self.visit(node.node, frame)
+        self.pop_assign_tracking(frame)
+
+    def visit_AssignBlock(self, node: nodes.AssignBlock, frame: Frame) -> None:
+        self.push_assign_tracking()
+        block_frame = frame.inner()
+        # This is a special case.  Since a set block always captures we
+        # will disable output checks.  This way one can use set blocks
+        # toplevel even in extended templates.
+        block_frame.require_output_check = False
+        block_frame.symbols.analyze_node(node)
+        self.enter_frame(block_frame)
+        self.buffer(block_frame)
+        self.blockvisit(node.body, block_frame)
+        self.newline(node)
+        self.visit(node.target, frame)
+        self.write(" = (Markup if context.eval_ctx.autoescape else identity)(")
+        if node.filter is not None:
+            self.visit_Filter(node.filter, block_frame)
+        else:
+            self.write(f"concat({block_frame.buffer})")
+        self.write(")")
+        self.pop_assign_tracking(frame)
+        self.leave_frame(block_frame)
+
+    # -- Expression Visitors
+
+    def visit_Name(self, node: nodes.Name, frame: Frame) -> None:
+        if node.ctx == "store" and (
+            frame.toplevel or frame.loop_frame or frame.block_frame
+        ):
+            if self._assign_stack:
+                self._assign_stack[-1].add(node.name)
+        ref = frame.symbols.ref(node.name)
+
+        # If we are looking up a variable we might have to deal with the
+        # case where it's undefined.  We can skip that case if the load
+        # instruction indicates a parameter which are always defined.
+        if node.ctx == "load":
+            load = frame.symbols.find_load(ref)
+            if not (
+                load is not None
+                and load[0] == VAR_LOAD_PARAMETER
+                and not self.parameter_is_undeclared(ref)
+            ):
+                self.write(
+                    f"(undefined(name={node.name!r}) if {ref} is missing else {ref})"
+                )
+                return
+
+        self.write(ref)
+
+    def visit_NSRef(self, node: nodes.NSRef, frame: Frame) -> None:
+        # NSRefs can only be used to store values; since they use the normal
+        # `foo.bar` notation they will be parsed as a normal attribute access
+        # when used anywhere but in a `set` context
+        ref = frame.symbols.ref(node.name)
+        self.writeline(f"if not isinstance({ref}, Namespace):")
+        self.indent()
+        self.writeline(
+            "raise TemplateRuntimeError"
+            '("cannot assign attribute on non-namespace object")'
+        )
+        self.outdent()
+        self.writeline(f"{ref}[{node.attr!r}]")
+
+    def visit_Const(self, node: nodes.Const, frame: Frame) -> None:
+        val = node.as_const(frame.eval_ctx)
+        if isinstance(val, float):
+            self.write(str(val))
+        else:
+            self.write(repr(val))
+
+    def visit_TemplateData(self, node: nodes.TemplateData, frame: Frame) -> None:
+        try:
+            self.write(repr(node.as_const(frame.eval_ctx)))
+        except nodes.Impossible:
+            self.write(
+                f"(Markup if context.eval_ctx.autoescape else identity)({node.data!r})"
+            )
+
+    def visit_Tuple(self, node: nodes.Tuple, frame: Frame) -> None:
+        self.write("(")
+        idx = -1
+        for idx, item in enumerate(node.items):
+            if idx:
+                self.write(", ")
+            self.visit(item, frame)
+        self.write(",)" if idx == 0 else ")")
+
+    def visit_List(self, node: nodes.List, frame: Frame) -> None:
+        self.write("[")
+        for idx, item in enumerate(node.items):
+            if idx:
+                self.write(", ")
+            self.visit(item, frame)
+        self.write("]")
+
+    def visit_Dict(self, node: nodes.Dict, frame: Frame) -> None:
+        self.write("{")
+        for idx, item in enumerate(node.items):
+            if idx:
+                self.write(", ")
+            self.visit(item.key, frame)
+            self.write(": ")
+            self.visit(item.value, frame)
+        self.write("}")
+
+    visit_Add = _make_binop("+")
+    visit_Sub = _make_binop("-")
+    visit_Mul = _make_binop("*")
+    visit_Div = _make_binop("/")
+    visit_FloorDiv = _make_binop("//")
+    visit_Pow = _make_binop("**")
+    visit_Mod = _make_binop("%")
+    visit_And = _make_binop("and")
+    visit_Or = _make_binop("or")
+    visit_Pos = _make_unop("+")
+    visit_Neg = _make_unop("-")
+    visit_Not = _make_unop("not ")
+
+    @optimizeconst
+    def visit_Concat(self, node: nodes.Concat, frame: Frame) -> None:
+        if frame.eval_ctx.volatile:
+            func_name = "(markup_join if context.eval_ctx.volatile else str_join)"
+        elif frame.eval_ctx.autoescape:
+            func_name = "markup_join"
+        else:
+            func_name = "str_join"
+        self.write(f"{func_name}((")
+        for arg in node.nodes:
+            self.visit(arg, frame)
+            self.write(", ")
+        self.write("))")
+
+    @optimizeconst
+    def visit_Compare(self, node: nodes.Compare, frame: Frame) -> None:
+        self.write("(")
+        self.visit(node.expr, frame)
+        for op in node.ops:
+            self.visit(op, frame)
+        self.write(")")
+
+    def visit_Operand(self, node: nodes.Operand, frame: Frame) -> None:
+        self.write(f" {operators[node.op]} ")
+        self.visit(node.expr, frame)
+
+    @optimizeconst
+    def visit_Getattr(self, node: nodes.Getattr, frame: Frame) -> None:
+        if self.environment.is_async:
+            self.write("(await auto_await(")
+
+        self.write("environment.getattr(")
+        self.visit(node.node, frame)
+        self.write(f", {node.attr!r})")
+
+        if self.environment.is_async:
+            self.write("))")
+
+    @optimizeconst
+    def visit_Getitem(self, node: nodes.Getitem, frame: Frame) -> None:
+        # slices bypass the environment getitem method.
+        if isinstance(node.arg, nodes.Slice):
+            self.visit(node.node, frame)
+            self.write("[")
+            self.visit(node.arg, frame)
+            self.write("]")
+        else:
+            if self.environment.is_async:
+                self.write("(await auto_await(")
+
+            self.write("environment.getitem(")
+            self.visit(node.node, frame)
+            self.write(", ")
+            self.visit(node.arg, frame)
+            self.write(")")
+
+            if self.environment.is_async:
+                self.write("))")
+
+    def visit_Slice(self, node: nodes.Slice, frame: Frame) -> None:
+        if node.start is not None:
+            self.visit(node.start, frame)
+        self.write(":")
+        if node.stop is not None:
+            self.visit(node.stop, frame)
+        if node.step is not None:
+            self.write(":")
+            self.visit(node.step, frame)
+
+    @contextmanager
+    def _filter_test_common(
+        self, node: t.Union[nodes.Filter, nodes.Test], frame: Frame, is_filter: bool
+    ) -> t.Iterator[None]:
+        if self.environment.is_async:
+            self.write("(await auto_await(")
+
+        if is_filter:
+            self.write(f"{self.filters[node.name]}(")
+            func = self.environment.filters.get(node.name)
+        else:
+            self.write(f"{self.tests[node.name]}(")
+            func = self.environment.tests.get(node.name)
+
+        # When inside an If or CondExpr frame, allow the filter to be
+        # undefined at compile time and only raise an error if it's
+        # actually called at runtime. See pull_dependencies.
+        if func is None and not frame.soft_frame:
+            type_name = "filter" if is_filter else "test"
+            self.fail(f"No {type_name} named {node.name!r}.", node.lineno)
+
+        pass_arg = {
+            _PassArg.context: "context",
+            _PassArg.eval_context: "context.eval_ctx",
+            _PassArg.environment: "environment",
+        }.get(
+            _PassArg.from_obj(func)  # type: ignore
+        )
+
+        if pass_arg is not None:
+            self.write(f"{pass_arg}, ")
+
+        # Back to the visitor function to handle visiting the target of
+        # the filter or test.
+        yield
+
+        self.signature(node, frame)
+        self.write(")")
+
+        if self.environment.is_async:
+            self.write("))")
+
+    @optimizeconst
+    def visit_Filter(self, node: nodes.Filter, frame: Frame) -> None:
+        with self._filter_test_common(node, frame, True):
+            # if the filter node is None we are inside a filter block
+            # and want to write to the current buffer
+            if node.node is not None:
+                self.visit(node.node, frame)
+            elif frame.eval_ctx.volatile:
+                self.write(
+                    f"(Markup(concat({frame.buffer}))"
+                    f" if context.eval_ctx.autoescape else concat({frame.buffer}))"
+                )
+            elif frame.eval_ctx.autoescape:
+                self.write(f"Markup(concat({frame.buffer}))")
+            else:
+                self.write(f"concat({frame.buffer})")
+
+    @optimizeconst
+    def visit_Test(self, node: nodes.Test, frame: Frame) -> None:
+        with self._filter_test_common(node, frame, False):
+            self.visit(node.node, frame)
+
+    @optimizeconst
+    def visit_CondExpr(self, node: nodes.CondExpr, frame: Frame) -> None:
+        frame = frame.soft()
+
+        def write_expr2() -> None:
+            if node.expr2 is not None:
+                self.visit(node.expr2, frame)
+                return
+
+            self.write(
+                f'cond_expr_undefined("the inline if-expression on'
+                f" {self.position(node)} evaluated to false and no else"
+                f' section was defined.")'
+            )
+
+        self.write("(")
+        self.visit(node.expr1, frame)
+        self.write(" if ")
+        self.visit(node.test, frame)
+        self.write(" else ")
+        write_expr2()
+        self.write(")")
+
+    @optimizeconst
+    def visit_Call(
+        self, node: nodes.Call, frame: Frame, forward_caller: bool = False
+    ) -> None:
+        if self.environment.is_async:
+            self.write("(await auto_await(")
+        if self.environment.sandboxed:
+            self.write("environment.call(context, ")
+        else:
+            self.write("context.call(")
+        self.visit(node.node, frame)
+        extra_kwargs = {"caller": "caller"} if forward_caller else None
+        loop_kwargs = {"_loop_vars": "_loop_vars"} if frame.loop_frame else {}
+        block_kwargs = {"_block_vars": "_block_vars"} if frame.block_frame else {}
+        if extra_kwargs:
+            extra_kwargs.update(loop_kwargs, **block_kwargs)
+        elif loop_kwargs or block_kwargs:
+            extra_kwargs = dict(loop_kwargs, **block_kwargs)
+        self.signature(node, frame, extra_kwargs)
+        self.write(")")
+        if self.environment.is_async:
+            self.write("))")
+
+    def visit_Keyword(self, node: nodes.Keyword, frame: Frame) -> None:
+        self.write(node.key + "=")
+        self.visit(node.value, frame)
+
+    # -- Unused nodes for extensions
+
+    def visit_MarkSafe(self, node: nodes.MarkSafe, frame: Frame) -> None:
+        self.write("Markup(")
+        self.visit(node.expr, frame)
+        self.write(")")
+
+    def visit_MarkSafeIfAutoescape(
+        self, node: nodes.MarkSafeIfAutoescape, frame: Frame
+    ) -> None:
+        self.write("(Markup if context.eval_ctx.autoescape else identity)(")
+        self.visit(node.expr, frame)
+        self.write(")")
+
+    def visit_EnvironmentAttribute(
+        self, node: nodes.EnvironmentAttribute, frame: Frame
+    ) -> None:
+        self.write("environment." + node.name)
+
+    def visit_ExtensionAttribute(
+        self, node: nodes.ExtensionAttribute, frame: Frame
+    ) -> None:
+        self.write(f"environment.extensions[{node.identifier!r}].{node.name}")
+
+    def visit_ImportedName(self, node: nodes.ImportedName, frame: Frame) -> None:
+        self.write(self.import_aliases[node.importname])
+
+    def visit_InternalName(self, node: nodes.InternalName, frame: Frame) -> None:
+        self.write(node.name)
+
+    def visit_ContextReference(
+        self, node: nodes.ContextReference, frame: Frame
+    ) -> None:
+        self.write("context")
+
+    def visit_DerivedContextReference(
+        self, node: nodes.DerivedContextReference, frame: Frame
+    ) -> None:
+        self.write(self.derive_context(frame))
+
+    def visit_Continue(self, node: nodes.Continue, frame: Frame) -> None:
+        self.writeline("continue", node)
+
+    def visit_Break(self, node: nodes.Break, frame: Frame) -> None:
+        self.writeline("break", node)
+
+    def visit_Scope(self, node: nodes.Scope, frame: Frame) -> None:
+        scope_frame = frame.inner()
+        scope_frame.symbols.analyze_node(node)
+        self.enter_frame(scope_frame)
+        self.blockvisit(node.body, scope_frame)
+        self.leave_frame(scope_frame)
+
+    def visit_OverlayScope(self, node: nodes.OverlayScope, frame: Frame) -> None:
+        ctx = self.temporary_identifier()
+        self.writeline(f"{ctx} = {self.derive_context(frame)}")
+        self.writeline(f"{ctx}.vars = ")
+        self.visit(node.context, frame)
+        self.push_context_reference(ctx)
+
+        scope_frame = frame.inner(isolated=True)
+        scope_frame.symbols.analyze_node(node)
+        self.enter_frame(scope_frame)
+        self.blockvisit(node.body, scope_frame)
+        self.leave_frame(scope_frame)
+        self.pop_context_reference()
+
+    def visit_EvalContextModifier(
+        self, node: nodes.EvalContextModifier, frame: Frame
+    ) -> None:
+        for keyword in node.options:
+            self.writeline(f"context.eval_ctx.{keyword.key} = ")
+            self.visit(keyword.value, frame)
+            try:
+                val = keyword.value.as_const(frame.eval_ctx)
+            except nodes.Impossible:
+                frame.eval_ctx.volatile = True
+            else:
+                setattr(frame.eval_ctx, keyword.key, val)
+
+    def visit_ScopedEvalContextModifier(
+        self, node: nodes.ScopedEvalContextModifier, frame: Frame
+    ) -> None:
+        old_ctx_name = self.temporary_identifier()
+        saved_ctx = frame.eval_ctx.save()
+        self.writeline(f"{old_ctx_name} = context.eval_ctx.save()")
+        self.visit_EvalContextModifier(node, frame)
+        for child in node.body:
+            self.visit(child, frame)
+        frame.eval_ctx.revert(saved_ctx)
+        self.writeline(f"context.eval_ctx.revert({old_ctx_name})")
diff --git a/venv/lib/python3.8/site-packages/jinja2/constants.py b/venv/lib/python3.8/site-packages/jinja2/constants.py
new file mode 100644
index 0000000..41a1c23
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/jinja2/constants.py
@@ -0,0 +1,20 @@
+#: list of lorem ipsum words used by the lipsum() helper function
+LOREM_IPSUM_WORDS = """\
+a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at
+auctor augue bibendum blandit class commodo condimentum congue consectetuer
+consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus
+diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend
+elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames
+faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac
+hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum
+justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem
+luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie
+mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non
+nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque
+penatibus per pharetra phasellus placerat platea porta porttitor posuere
+potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus
+ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit
+sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor
+tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices
+ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus
+viverra volutpat vulputate"""
diff --git a/venv/lib/python3.8/site-packages/jinja2/debug.py b/venv/lib/python3.8/site-packages/jinja2/debug.py
new file mode 100644
index 0000000..7ed7e92
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/jinja2/debug.py
@@ -0,0 +1,191 @@
+import sys
+import typing as t
+from types import CodeType
+from types import TracebackType
+
+from .exceptions import TemplateSyntaxError
+from .utils import internal_code
+from .utils import missing
+
+if t.TYPE_CHECKING:
+    from .runtime import Context
+
+
+def rewrite_traceback_stack(source: t.Optional[str] = None) -> BaseException:
+    """Rewrite the current exception to replace any tracebacks from
+    within compiled template code with tracebacks that look like they
+    came from the template source.
+
+    This must be called within an ``except`` block.
+
+    :param source: For ``TemplateSyntaxError``, the original source if
+        known.
+    :return: The original exception with the rewritten traceback.
+    """
+    _, exc_value, tb = sys.exc_info()
+    exc_value = t.cast(BaseException, exc_value)
+    tb = t.cast(TracebackType, tb)
+
+    if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated:
+        exc_value.translated = True
+        exc_value.source = source
+        # Remove the old traceback, otherwise the frames from the
+        # compiler still show up.
+        exc_value.with_traceback(None)
+        # Outside of runtime, so the frame isn't executing template
+        # code, but it still needs to point at the template.
+        tb = fake_traceback(
+            exc_value, None, exc_value.filename or "", exc_value.lineno
+        )
+    else:
+        # Skip the frame for the render function.
+        tb = tb.tb_next
+
+    stack = []
+
+    # Build the stack of traceback object, replacing any in template
+    # code with the source file and line information.
+    while tb is not None:
+        # Skip frames decorated with @internalcode. These are internal
+        # calls that aren't useful in template debugging output.
+        if tb.tb_frame.f_code in internal_code:
+            tb = tb.tb_next
+            continue
+
+        template = tb.tb_frame.f_globals.get("__jinja_template__")
+
+        if template is not None:
+            lineno = template.get_corresponding_lineno(tb.tb_lineno)
+            fake_tb = fake_traceback(exc_value, tb, template.filename, lineno)
+            stack.append(fake_tb)
+        else:
+            stack.append(tb)
+
+        tb = tb.tb_next
+
+    tb_next = None
+
+    # Assign tb_next in reverse to avoid circular references.
+    for tb in reversed(stack):
+        tb.tb_next = tb_next
+        tb_next = tb
+
+    return exc_value.with_traceback(tb_next)
+
+
+def fake_traceback(  # type: ignore
+    exc_value: BaseException, tb: t.Optional[TracebackType], filename: str, lineno: int
+) -> TracebackType:
+    """Produce a new traceback object that looks like it came from the
+    template source instead of the compiled code. The filename, line
+    number, and location name will point to the template, and the local
+    variables will be the current template context.
+
+    :param exc_value: The original exception to be re-raised to create
+        the new traceback.
+    :param tb: The original traceback to get the local variables and
+        code info from.
+    :param filename: The template filename.
+    :param lineno: The line number in the template source.
+    """
+    if tb is not None:
+        # Replace the real locals with the context that would be
+        # available at that point in the template.
+        locals = get_template_locals(tb.tb_frame.f_locals)
+        locals.pop("__jinja_exception__", None)
+    else:
+        locals = {}
+
+    globals = {
+        "__name__": filename,
+        "__file__": filename,
+        "__jinja_exception__": exc_value,
+    }
+    # Raise an exception at the correct line number.
+    code: CodeType = compile(
+        "\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec"
+    )
+
+    # Build a new code object that points to the template file and
+    # replaces the location with a block name.
+    location = "template"
+
+    if tb is not None:
+        function = tb.tb_frame.f_code.co_name
+
+        if function == "root":
+            location = "top-level template code"
+        elif function.startswith("block_"):
+            location = f"block {function[6:]!r}"
+
+    if sys.version_info >= (3, 8):
+        code = code.replace(co_name=location)
+    else:
+        code = CodeType(
+            code.co_argcount,
+            code.co_kwonlyargcount,
+            code.co_nlocals,
+            code.co_stacksize,
+            code.co_flags,
+            code.co_code,
+            code.co_consts,
+            code.co_names,
+            code.co_varnames,
+            code.co_filename,
+            location,
+            code.co_firstlineno,
+            code.co_lnotab,
+            code.co_freevars,
+            code.co_cellvars,
+        )
+
+    # Execute the new code, which is guaranteed to raise, and return
+    # the new traceback without this frame.
+    try:
+        exec(code, globals, locals)
+    except BaseException:
+        return sys.exc_info()[2].tb_next  # type: ignore
+
+
+def get_template_locals(real_locals: t.Mapping[str, t.Any]) -> t.Dict[str, t.Any]:
+    """Based on the runtime locals, get the context that would be
+    available at that point in the template.
+    """
+    # Start with the current template context.
+    ctx: "t.Optional[Context]" = real_locals.get("context")
+
+    if ctx is not None:
+        data: t.Dict[str, t.Any] = ctx.get_all().copy()
+    else:
+        data = {}
+
+    # Might be in a derived context that only sets local variables
+    # rather than pushing a context. Local variables follow the scheme
+    # l_depth_name. Find the highest-depth local that has a value for
+    # each name.
+    local_overrides: t.Dict[str, t.Tuple[int, t.Any]] = {}
+
+    for name, value in real_locals.items():
+        if not name.startswith("l_") or value is missing:
+            # Not a template variable, or no longer relevant.
+            continue
+
+        try:
+            _, depth_str, name = name.split("_", 2)
+            depth = int(depth_str)
+        except ValueError:
+            continue
+
+        cur_depth = local_overrides.get(name, (-1,))[0]
+
+        if cur_depth < depth:
+            local_overrides[name] = (depth, value)
+
+    # Modify the context with any derived context.
+    for name, (_, value) in local_overrides.items():
+        if value is missing:
+            data.pop(name, None)
+        else:
+            data[name] = value
+
+    return data
diff --git a/venv/lib/python3.8/site-packages/jinja2/defaults.py b/venv/lib/python3.8/site-packages/jinja2/defaults.py
new file mode 100644
index 0000000..638cad3
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/jinja2/defaults.py
@@ -0,0 +1,48 @@
+import typing as t
+
+from .filters import FILTERS as DEFAULT_FILTERS  # noqa: F401
+from .tests import TESTS as DEFAULT_TESTS  # noqa: F401
+from .utils import Cycler
+from .utils import generate_lorem_ipsum
+from .utils import Joiner
+from .utils import Namespace
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+
+# defaults for the parser / lexer
+BLOCK_START_STRING = "{%"
+BLOCK_END_STRING = "%}"
+VARIABLE_START_STRING = "{{"
+VARIABLE_END_STRING = "}}"
+COMMENT_START_STRING = "{#"
+COMMENT_END_STRING = "#}"
+LINE_STATEMENT_PREFIX: t.Optional[str] = None
+LINE_COMMENT_PREFIX: t.Optional[str] = None
+TRIM_BLOCKS = False
+LSTRIP_BLOCKS = False
+NEWLINE_SEQUENCE: "te.Literal['\\n', '\\r\\n', '\\r']" = "\n"
+KEEP_TRAILING_NEWLINE = False
+
+# default filters, tests and namespace
+
+DEFAULT_NAMESPACE = {
+    "range": range,
+    "dict": dict,
+    "lipsum": generate_lorem_ipsum,
+    "cycler": Cycler,
+    "joiner": Joiner,
+    "namespace": Namespace,
+}
+
+# default policies
+DEFAULT_POLICIES: t.Dict[str, t.Any] = {
+    "compiler.ascii_str": True,
+    "urlize.rel": "noopener",
+    "urlize.target": None,
+    "urlize.extra_schemes": None,
+    "truncate.leeway": 5,
+    "json.dumps_function": None,
+    "json.dumps_kwargs": {"sort_keys": True},
+    "ext.i18n.trimmed": False,
+}
diff --git a/venv/lib/python3.8/site-packages/jinja2/environment.py b/venv/lib/python3.8/site-packages/jinja2/environment.py
new file mode 100644
index 0000000..ea04e8b
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/jinja2/environment.py
@@ -0,0 +1,1667 @@
+"""Classes for managing templates and their runtime and compile time
+options.
+"""
+import os
+import typing
+import typing as t
+import weakref
+from collections import ChainMap
+from functools import lru_cache
+from functools import partial
+from functools import reduce
+from types import CodeType
+
+from markupsafe import Markup
+
+from . import nodes
+from .compiler import CodeGenerator
+from .compiler import generate
+from .defaults import BLOCK_END_STRING
+from .defaults import BLOCK_START_STRING
+from .defaults import COMMENT_END_STRING
+from .defaults import COMMENT_START_STRING
+from .defaults import DEFAULT_FILTERS
+from .defaults import DEFAULT_NAMESPACE
+from .defaults import DEFAULT_POLICIES
+from .defaults import DEFAULT_TESTS
+from .defaults import KEEP_TRAILING_NEWLINE
+from .defaults import LINE_COMMENT_PREFIX
+from .defaults import LINE_STATEMENT_PREFIX
+from .defaults import LSTRIP_BLOCKS
+from .defaults import NEWLINE_SEQUENCE
+from .defaults import TRIM_BLOCKS
+from .defaults import VARIABLE_END_STRING
+from .defaults import VARIABLE_START_STRING
+from .exceptions import TemplateNotFound
+from .exceptions import TemplateRuntimeError
+from .exceptions import TemplatesNotFound
+from .exceptions import TemplateSyntaxError
+from .exceptions import UndefinedError
+from .lexer import get_lexer
+from .lexer import Lexer
+from .lexer import TokenStream
+from .nodes import EvalContext
+from .parser import Parser
+from .runtime import Context
+from .runtime import new_context
+from .runtime import Undefined
+from .utils import _PassArg
+from .utils import concat
+from .utils import consume
+from .utils import import_string
+from .utils import internalcode
+from .utils import LRUCache
+from .utils import missing
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+    from .bccache import BytecodeCache
+    from .ext import Extension
+    from .loaders import BaseLoader
+
+_env_bound = t.TypeVar("_env_bound", bound="Environment")
+
+
+# for direct template usage we have up to ten living environments
+@lru_cache(maxsize=10)
+def get_spontaneous_environment(cls: t.Type[_env_bound], *args: t.Any) -> _env_bound:
+    """Return a new spontaneous environment. A spontaneous environment
+    is used for templates created directly rather than through an
+    existing environment.
+
+    :param cls: Environment class to create.
+    :param args: Positional arguments passed to environment.
+    """
+    env = cls(*args)
+    env.shared = True
+    return env
+
+
+def create_cache(
+    size: int,
+) -> t.Optional[t.MutableMapping[t.Tuple[weakref.ref, str], "Template"]]:
+    """Return the cache class for the given size."""
+    if size == 0:
+        return None
+
+    if size < 0:
+        return {}
+
+    return LRUCache(size)  # type: ignore
+
+
+def copy_cache(
+    cache: t.Optional[t.MutableMapping],
+) -> t.Optional[t.MutableMapping[t.Tuple[weakref.ref, str], "Template"]]:
+    """Create an empty copy of the given cache."""
+    if cache is None:
+        return None
+
+    if type(cache) is dict:
+        return {}
+
+    return LRUCache(cache.capacity)  # type: ignore
+
+
+def load_extensions(
+    environment: "Environment",
+    extensions: t.Sequence[t.Union[str, t.Type["Extension"]]],
+) -> t.Dict[str, "Extension"]:
+    """Load the extensions from the list and bind it to the environment.
+    Returns a dict of instantiated extensions.
+    """
+    result = {}
+
+    for extension in extensions:
+        if isinstance(extension, str):
+            extension = t.cast(t.Type["Extension"], import_string(extension))
+
+        result[extension.identifier] = extension(environment)
+
+    return result
+
+
+def _environment_config_check(environment: "Environment") -> "Environment":
+    """Perform a sanity check on the environment."""
+    assert issubclass(
+        environment.undefined, Undefined
+    ), "'undefined' must be a subclass of 'jinja2.Undefined'."
+    assert (
+        environment.block_start_string
+        != environment.variable_start_string
+        != environment.comment_start_string
+    ), "block, variable and comment start strings must be different."
+    assert environment.newline_sequence in {
+        "\r",
+        "\r\n",
+        "\n",
+    }, "'newline_sequence' must be one of '\\n', '\\r\\n', or '\\r'."
+    return environment
+
+
+class Environment:
+    r"""The core component of Jinja is the `Environment`.  It contains
+    important shared variables like configuration, filters, tests,
+    globals and others.  Instances of this class may be modified if
+    they are not shared and if no template was loaded so far.
+    Modifications on environments after the first template was loaded
+    will lead to surprising effects and undefined behavior.
+
+    Here are the possible initialization parameters:
+
+        `block_start_string`
+            The string marking the beginning of a block.  Defaults to ``'{%'``.
+
+        `block_end_string`
+            The string marking the end of a block.  Defaults to ``'%}'``.
+
+        `variable_start_string`
+            The string marking the beginning of a print statement.
+            Defaults to ``'{{'``.
+
+        `variable_end_string`
+            The string marking the end of a print statement.  Defaults to
+            ``'}}'``.
+
+        `comment_start_string`
+            The string marking the beginning of a comment.  Defaults to ``'{#'``.
+
+        `comment_end_string`
+            The string marking the end of a comment.  Defaults to ``'#}'``.
+
+        `line_statement_prefix`
+            If given and a string, this will be used as prefix for line based
+            statements.  See also :ref:`line-statements`.
+
+        `line_comment_prefix`
+            If given and a string, this will be used as prefix for line based
+            comments.  See also :ref:`line-statements`.
+
+            .. versionadded:: 2.2
+
+        `trim_blocks`
+            If this is set to ``True`` the first newline after a block is
+            removed (block, not variable tag!).  Defaults to `False`.
+
+        `lstrip_blocks`
+            If this is set to ``True`` leading spaces and tabs are stripped
+            from the start of a line to a block.  Defaults to `False`.
+
+        `newline_sequence`
+            The sequence that starts a newline.  Must be one of ``'\r'``,
+            ``'\n'`` or ``'\r\n'``.  The default is ``'\n'`` which is a
+            useful default for Linux and OS X systems as well as web
+            applications.
+
+        `keep_trailing_newline`
+            Preserve the trailing newline when rendering templates.
+            The default is ``False``, which causes a single newline,
+            if present, to be stripped from the end of the template.
+
+            .. versionadded:: 2.7
+
+        `extensions`
+            List of Jinja extensions to use.  This can either be import paths
+            as strings or extension classes.  For more information have a
+            look at :ref:`the extensions documentation `.
+
+        `optimized`
+            should the optimizer be enabled?  Default is ``True``.
+
+        `undefined`
+            :class:`Undefined` or a subclass of it that is used to represent
+            undefined values in the template.
+
+        `finalize`
+            A callable that can be used to process the result of a variable
+            expression before it is output.  For example one can convert
+            ``None`` implicitly into an empty string here.
+
+        `autoescape`
+            If set to ``True`` the XML/HTML autoescaping feature is enabled by
+            default.  For more details about autoescaping see
+            :class:`~markupsafe.Markup`.  As of Jinja 2.4 this can also
+            be a callable that is passed the template name and has to
+            return ``True`` or ``False`` depending on autoescape should be
+            enabled by default.
+
+            .. versionchanged:: 2.4
+               `autoescape` can now be a function
+
+        `loader`
+            The template loader for this environment.
+
+        `cache_size`
+            The size of the cache.  Per default this is ``400`` which means
+            that if more than 400 templates are loaded the loader will clean
+            out the least recently used template.  If the cache size is set to
+            ``0`` templates are recompiled all the time, if the cache size is
+            ``-1`` the cache will not be cleaned.
+
+            .. versionchanged:: 2.8
+               The cache size was increased to 400 from a low 50.
+
+        `auto_reload`
+            Some loaders load templates from locations where the template
+            sources may change (ie: file system or database).  If
+            ``auto_reload`` is set to ``True`` (default) every time a template is
+            requested the loader checks if the source changed and if yes, it
+            will reload the template.  For higher performance it's possible to
+            disable that.
+
+        `bytecode_cache`
+            If set to a bytecode cache object, this object will provide a
+            cache for the internal Jinja bytecode so that templates don't
+            have to be parsed if they were not changed.
+
+            See :ref:`bytecode-cache` for more information.
+
+        `enable_async`
+            If set to true this enables async template execution which
+            allows using async functions and generators.
+    """
+
+    #: if this environment is sandboxed.  Modifying this variable won't make
+    #: the environment sandboxed though.  For a real sandboxed environment
+    #: have a look at jinja2.sandbox.  This flag alone controls the code
+    #: generation by the compiler.
+    sandboxed = False
+
+    #: True if the environment is just an overlay
+    overlayed = False
+
+    #: the environment this environment is linked to if it is an overlay
+    linked_to: t.Optional["Environment"] = None
+
+    #: shared environments have this set to `True`.  A shared environment
+    #: must not be modified
+    shared = False
+
+    #: the class that is used for code generation.  See
+    #: :class:`~jinja2.compiler.CodeGenerator` for more information.
+    code_generator_class: t.Type["CodeGenerator"] = CodeGenerator
+
+    concat = "".join
+
+    #: the context class that is used for templates.  See
+    #: :class:`~jinja2.runtime.Context` for more information.
+    context_class: t.Type[Context] = Context
+
+    template_class: t.Type["Template"]
+
+    def __init__(
+        self,
+        block_start_string: str = BLOCK_START_STRING,
+        block_end_string: str = BLOCK_END_STRING,
+        variable_start_string: str = VARIABLE_START_STRING,
+        variable_end_string: str = VARIABLE_END_STRING,
+        comment_start_string: str = COMMENT_START_STRING,
+        comment_end_string: str = COMMENT_END_STRING,
+        line_statement_prefix: t.Optional[str] = LINE_STATEMENT_PREFIX,
+        line_comment_prefix: t.Optional[str] = LINE_COMMENT_PREFIX,
+        trim_blocks: bool = TRIM_BLOCKS,
+        lstrip_blocks: bool = LSTRIP_BLOCKS,
+        newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = NEWLINE_SEQUENCE,
+        keep_trailing_newline: bool = KEEP_TRAILING_NEWLINE,
+        extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = (),
+        optimized: bool = True,
+        undefined: t.Type[Undefined] = Undefined,
+        finalize: t.Optional[t.Callable[..., t.Any]] = None,
+        autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = False,
+        loader: t.Optional["BaseLoader"] = None,
+        cache_size: int = 400,
+        auto_reload: bool = True,
+        bytecode_cache: t.Optional["BytecodeCache"] = None,
+        enable_async: bool = False,
+    ):
+        # !!Important notice!!
+        #   The constructor accepts quite a few arguments that should be
+        #   passed by keyword rather than position.  However it's important to
+        #   not change the order of arguments because it's used at least
+        #   internally in those cases:
+        #       -   spontaneous environments (i18n extension and Template)
+        #       -   unittests
+        #   If parameter changes are required only add parameters at the end
+        #   and don't change the arguments (or the defaults!) of the arguments
+        #   existing already.
+
+        # lexer / parser information
+        self.block_start_string = block_start_string
+        self.block_end_string = block_end_string
+        self.variable_start_string = variable_start_string
+        self.variable_end_string = variable_end_string
+        self.comment_start_string = comment_start_string
+        self.comment_end_string = comment_end_string
+        self.line_statement_prefix = line_statement_prefix
+        self.line_comment_prefix = line_comment_prefix
+        self.trim_blocks = trim_blocks
+        self.lstrip_blocks = lstrip_blocks
+        self.newline_sequence = newline_sequence
+        self.keep_trailing_newline = keep_trailing_newline
+
+        # runtime information
+        self.undefined: t.Type[Undefined] = undefined
+        self.optimized = optimized
+        self.finalize = finalize
+        self.autoescape = autoescape
+
+        # defaults
+        self.filters = DEFAULT_FILTERS.copy()
+        self.tests = DEFAULT_TESTS.copy()
+        self.globals = DEFAULT_NAMESPACE.copy()
+
+        # set the loader provided
+        self.loader = loader
+        self.cache = create_cache(cache_size)
+        self.bytecode_cache = bytecode_cache
+        self.auto_reload = auto_reload
+
+        # configurable policies
+        self.policies = DEFAULT_POLICIES.copy()
+
+        # load extensions
+        self.extensions = load_extensions(self, extensions)
+
+        self.is_async = enable_async
+        _environment_config_check(self)
+
+    def add_extension(self, extension: t.Union[str, t.Type["Extension"]]) -> None:
+        """Adds an extension after the environment was created.
+
+        .. versionadded:: 2.5
+        """
+        self.extensions.update(load_extensions(self, [extension]))
+
+    def extend(self, **attributes: t.Any) -> None:
+        """Add the items to the instance of the environment if they do not exist
+        yet.  This is used by :ref:`extensions ` to register
+        callbacks and configuration values without breaking inheritance.
+        """
+        for key, value in attributes.items():
+            if not hasattr(self, key):
+                setattr(self, key, value)
+
+    def overlay(
+        self,
+        block_start_string: str = missing,
+        block_end_string: str = missing,
+        variable_start_string: str = missing,
+        variable_end_string: str = missing,
+        comment_start_string: str = missing,
+        comment_end_string: str = missing,
+        line_statement_prefix: t.Optional[str] = missing,
+        line_comment_prefix: t.Optional[str] = missing,
+        trim_blocks: bool = missing,
+        lstrip_blocks: bool = missing,
+        newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = missing,
+        keep_trailing_newline: bool = missing,
+        extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = missing,
+        optimized: bool = missing,
+        undefined: t.Type[Undefined] = missing,
+        finalize: t.Optional[t.Callable[..., t.Any]] = missing,
+        autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = missing,
+        loader: t.Optional["BaseLoader"] = missing,
+        cache_size: int = missing,
+        auto_reload: bool = missing,
+        bytecode_cache: t.Optional["BytecodeCache"] = missing,
+        enable_async: bool = False,
+    ) -> "Environment":
+        """Create a new overlay environment that shares all the data with the
+        current environment except for cache and the overridden attributes.
+        Extensions cannot be removed for an overlayed environment.  An overlayed
+        environment automatically gets all the extensions of the environment it
+        is linked to plus optional extra extensions.
+
+        Creating overlays should happen after the initial environment was set
+        up completely.  Not all attributes are truly linked, some are just
+        copied over so modifications on the original environment may not shine
+        through.
+
+        .. versionchanged:: 3.1.2
+            Added the ``newline_sequence``,, ``keep_trailing_newline``,
+            and ``enable_async`` parameters to match ``__init__``.
+        """
+        args = dict(locals())
+        del args["self"], args["cache_size"], args["extensions"], args["enable_async"]
+
+        rv = object.__new__(self.__class__)
+        rv.__dict__.update(self.__dict__)
+        rv.overlayed = True
+        rv.linked_to = self
+
+        for key, value in args.items():
+            if value is not missing:
+                setattr(rv, key, value)
+
+        if cache_size is not missing:
+            rv.cache = create_cache(cache_size)
+        else:
+            rv.cache = copy_cache(self.cache)
+
+        rv.extensions = {}
+        for key, value in self.extensions.items():
+            rv.extensions[key] = value.bind(rv)
+        if extensions is not missing:
+            rv.extensions.update(load_extensions(rv, extensions))
+
+        if enable_async is not missing:
+            rv.is_async = enable_async
+
+        return _environment_config_check(rv)
+
+    @property
+    def lexer(self) -> Lexer:
+        """The lexer for this environment."""
+        return get_lexer(self)
+
+    def iter_extensions(self) -> t.Iterator["Extension"]:
+        """Iterates over the extensions by priority."""
+        return iter(sorted(self.extensions.values(), key=lambda x: x.priority))
+
+    def getitem(
+        self, obj: t.Any, argument: t.Union[str, t.Any]
+    ) -> t.Union[t.Any, Undefined]:
+        """Get an item or attribute of an object but prefer the item."""
+        try:
+            return obj[argument]
+        except (AttributeError, TypeError, LookupError):
+            if isinstance(argument, str):
+                try:
+                    attr = str(argument)
+                except Exception:
+                    pass
+                else:
+                    try:
+                        return getattr(obj, attr)
+                    except AttributeError:
+                        pass
+            return self.undefined(obj=obj, name=argument)
+
+    def getattr(self, obj: t.Any, attribute: str) -> t.Any:
+        """Get an item or attribute of an object but prefer the attribute.
+        Unlike :meth:`getitem` the attribute *must* be a string.
+        """
+        try:
+            return getattr(obj, attribute)
+        except AttributeError:
+            pass
+        try:
+            return obj[attribute]
+        except (TypeError, LookupError, AttributeError):
+            return self.undefined(obj=obj, name=attribute)
+
+    def _filter_test_common(
+        self,
+        name: t.Union[str, Undefined],
+        value: t.Any,
+        args: t.Optional[t.Sequence[t.Any]],
+        kwargs: t.Optional[t.Mapping[str, t.Any]],
+        context: t.Optional[Context],
+        eval_ctx: t.Optional[EvalContext],
+        is_filter: bool,
+    ) -> t.Any:
+        if is_filter:
+            env_map = self.filters
+            type_name = "filter"
+        else:
+            env_map = self.tests
+            type_name = "test"
+
+        func = env_map.get(name)  # type: ignore
+
+        if func is None:
+            msg = f"No {type_name} named {name!r}."
+
+            if isinstance(name, Undefined):
+                try:
+                    name._fail_with_undefined_error()
+                except Exception as e:
+                    msg = f"{msg} ({e}; did you forget to quote the callable name?)"
+
+            raise TemplateRuntimeError(msg)
+
+        args = [value, *(args if args is not None else ())]
+        kwargs = kwargs if kwargs is not None else {}
+        pass_arg = _PassArg.from_obj(func)
+
+        if pass_arg is _PassArg.context:
+            if context is None:
+                raise TemplateRuntimeError(
+                    f"Attempted to invoke a context {type_name} without context."
+                )
+
+            args.insert(0, context)
+        elif pass_arg is _PassArg.eval_context:
+            if eval_ctx is None:
+                if context is not None:
+                    eval_ctx = context.eval_ctx
+                else:
+                    eval_ctx = EvalContext(self)
+
+            args.insert(0, eval_ctx)
+        elif pass_arg is _PassArg.environment:
+            args.insert(0, self)
+
+        return func(*args, **kwargs)
+
+    def call_filter(
+        self,
+        name: str,
+        value: t.Any,
+        args: t.Optional[t.Sequence[t.Any]] = None,
+        kwargs: t.Optional[t.Mapping[str, t.Any]] = None,
+        context: t.Optional[Context] = None,
+        eval_ctx: t.Optional[EvalContext] = None,
+    ) -> t.Any:
+        """Invoke a filter on a value the same way the compiler does.
+
+        This might return a coroutine if the filter is running from an
+        environment in async mode and the filter supports async
+        execution. It's your responsibility to await this if needed.
+
+        .. versionadded:: 2.7
+        """
+        return self._filter_test_common(
+            name, value, args, kwargs, context, eval_ctx, True
+        )
+
+    def call_test(
+        self,
+        name: str,
+        value: t.Any,
+        args: t.Optional[t.Sequence[t.Any]] = None,
+        kwargs: t.Optional[t.Mapping[str, t.Any]] = None,
+        context: t.Optional[Context] = None,
+        eval_ctx: t.Optional[EvalContext] = None,
+    ) -> t.Any:
+        """Invoke a test on a value the same way the compiler does.
+
+        This might return a coroutine if the test is running from an
+        environment in async mode and the test supports async execution.
+        It's your responsibility to await this if needed.
+
+        .. versionchanged:: 3.0
+            Tests support ``@pass_context``, etc. decorators. Added
+            the ``context`` and ``eval_ctx`` parameters.
+
+        .. versionadded:: 2.7
+        """
+        return self._filter_test_common(
+            name, value, args, kwargs, context, eval_ctx, False
+        )
+
+    @internalcode
+    def parse(
+        self,
+        source: str,
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+    ) -> nodes.Template:
+        """Parse the sourcecode and return the abstract syntax tree.  This
+        tree of nodes is used by the compiler to convert the template into
+        executable source- or bytecode.  This is useful for debugging or to
+        extract information from templates.
+
+        If you are :ref:`developing Jinja extensions `
+        this gives you a good overview of the node tree generated.
+        """
+        try:
+            return self._parse(source, name, filename)
+        except TemplateSyntaxError:
+            self.handle_exception(source=source)
+
+    def _parse(
+        self, source: str, name: t.Optional[str], filename: t.Optional[str]
+    ) -> nodes.Template:
+        """Internal parsing function used by `parse` and `compile`."""
+        return Parser(self, source, name, filename).parse()
+
+    def lex(
+        self,
+        source: str,
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+    ) -> t.Iterator[t.Tuple[int, str, str]]:
+        """Lex the given sourcecode and return a generator that yields
+        tokens as tuples in the form ``(lineno, token_type, value)``.
+        This can be useful for :ref:`extension development `
+        and debugging templates.
+
+        This does not perform preprocessing.  If you want the preprocessing
+        of the extensions to be applied you have to filter source through
+        the :meth:`preprocess` method.
+        """
+        source = str(source)
+        try:
+            return self.lexer.tokeniter(source, name, filename)
+        except TemplateSyntaxError:
+            self.handle_exception(source=source)
+
+    def preprocess(
+        self,
+        source: str,
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+    ) -> str:
+        """Preprocesses the source with all extensions.  This is automatically
+        called for all parsing and compiling methods but *not* for :meth:`lex`
+        because there you usually only want the actual source tokenized.
+        """
+        return reduce(
+            lambda s, e: e.preprocess(s, name, filename),
+            self.iter_extensions(),
+            str(source),
+        )
+
+    def _tokenize(
+        self,
+        source: str,
+        name: t.Optional[str],
+        filename: t.Optional[str] = None,
+        state: t.Optional[str] = None,
+    ) -> TokenStream:
+        """Called by the parser to do the preprocessing and filtering
+        for all the extensions.  Returns a :class:`~jinja2.lexer.TokenStream`.
+        """
+        source = self.preprocess(source, name, filename)
+        stream = self.lexer.tokenize(source, name, filename, state)
+
+        for ext in self.iter_extensions():
+            stream = ext.filter_stream(stream)  # type: ignore
+
+            if not isinstance(stream, TokenStream):
+                stream = TokenStream(stream, name, filename)  # type: ignore
+
+        return stream
+
+    def _generate(
+        self,
+        source: nodes.Template,
+        name: t.Optional[str],
+        filename: t.Optional[str],
+        defer_init: bool = False,
+    ) -> str:
+        """Internal hook that can be overridden to hook a different generate
+        method in.
+
+        .. versionadded:: 2.5
+        """
+        return generate(  # type: ignore
+            source,
+            self,
+            name,
+            filename,
+            defer_init=defer_init,
+            optimized=self.optimized,
+        )
+
+    def _compile(self, source: str, filename: str) -> CodeType:
+        """Internal hook that can be overridden to hook a different compile
+        method in.
+
+        .. versionadded:: 2.5
+        """
+        return compile(source, filename, "exec")  # type: ignore
+
+    @typing.overload
+    def compile(  # type: ignore
+        self,
+        source: t.Union[str, nodes.Template],
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+        raw: "te.Literal[False]" = False,
+        defer_init: bool = False,
+    ) -> CodeType:
+        ...
+
+    @typing.overload
+    def compile(
+        self,
+        source: t.Union[str, nodes.Template],
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+        raw: "te.Literal[True]" = ...,
+        defer_init: bool = False,
+    ) -> str:
+        ...
+
+    @internalcode
+    def compile(
+        self,
+        source: t.Union[str, nodes.Template],
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+        raw: bool = False,
+        defer_init: bool = False,
+    ) -> t.Union[str, CodeType]:
+        """Compile a node or template source code.  The `name` parameter is
+        the load name of the template after it was joined using
+        :meth:`join_path` if necessary, not the filename on the file system.
+        the `filename` parameter is the estimated filename of the template on
+        the file system.  If the template came from a database or memory this
+        can be omitted.
+
+        The return value of this method is a python code object.  If the `raw`
+        parameter is `True` the return value will be a string with python
+        code equivalent to the bytecode returned otherwise.  This method is
+        mainly used internally.
+
+        `defer_init` is use internally to aid the module code generator.  This
+        causes the generated code to be able to import without the global
+        environment variable to be set.
+
+        .. versionadded:: 2.4
+           `defer_init` parameter added.
+        """
+        source_hint = None
+        try:
+            if isinstance(source, str):
+                source_hint = source
+                source = self._parse(source, name, filename)
+            source = self._generate(source, name, filename, defer_init=defer_init)
+            if raw:
+                return source
+            if filename is None:
+                filename = "