New build #508
340 changed files with 12797 additions and 5034 deletions
|
@ -22,6 +22,7 @@ build apk:
|
|||
- rm -rf ~/.buildozer/android/crystax-ndk-10.3.2/platforms/android-9
|
||||
- ln -s ~/.buildozer/android/crystax-ndk-10.3.2/platforms/android-21 ~/.buildozer/android/crystax-ndk-10.3.2/platforms/android-9
|
||||
- cp -f $CI_PROJECT_DIR/scripts/build-target-python.sh ~/.buildozer/android/crystax-ndk-10.3.2/build/tools/build-target-python.sh
|
||||
- cp -f $CI_PROJECT_DIR/scripts/mangled-glibc-syscalls.h ~/.buildozer/android/crystax-ndk-10.3.2/platforms/android-21/arch-arm/usr/include/crystax/bionic/libc/include/sys/mangled-glibc-syscalls.h
|
||||
- rm ~/.buildozer/android/crystax-ndk-10.3.2-linux-x86_64.tar.xz
|
||||
- git secret reveal
|
||||
- mv buildozer.spec.travis buildozer.spec
|
||||
|
|
|
@ -63,10 +63,49 @@ class SplashScreen extends React.PureComponent {
|
|||
});
|
||||
}
|
||||
|
||||
navigateToMain = () => {
|
||||
const { navigation } = this.props;
|
||||
const resetAction = StackActions.reset({
|
||||
index: 0,
|
||||
actions: [
|
||||
NavigationActions.navigate({ routeName: 'Main'})
|
||||
]
|
||||
});
|
||||
navigation.dispatch(resetAction);
|
||||
|
||||
const launchUrl = navigation.state.params.launchUrl || this.state.launchUrl;
|
||||
if (launchUrl) {
|
||||
if (launchUrl.startsWith('lbry://?verify=')) {
|
||||
let verification = {};
|
||||
try {
|
||||
verification = JSON.parse(atob(launchUrl.substring(15)));
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
if (verification.token && verification.recaptcha) {
|
||||
AsyncStorage.setItem(Constants.KEY_SHOULD_VERIFY_EMAIL, 'true');
|
||||
try {
|
||||
verifyUserEmail(verification.token, verification.recaptcha);
|
||||
} catch (error) {
|
||||
const message = 'Invalid Verification Token';
|
||||
verifyUserEmailFailure(message);
|
||||
notify({ message });
|
||||
}
|
||||
} else {
|
||||
notify({
|
||||
message: 'Invalid Verification URI',
|
||||
});
|
||||
}
|
||||
} else {
|
||||
navigateToUri(navigation, launchUrl);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
componentWillReceiveProps(nextProps) {
|
||||
const {
|
||||
emailToVerify,
|
||||
navigation,
|
||||
getSync,
|
||||
setEmailToVerify,
|
||||
verifyUserEmail,
|
||||
verifyUserEmailFailure
|
||||
|
@ -81,41 +120,15 @@ class SplashScreen extends React.PureComponent {
|
|||
}
|
||||
|
||||
// user is authenticated, navigate to the main view
|
||||
const resetAction = StackActions.reset({
|
||||
index: 0,
|
||||
actions: [
|
||||
NavigationActions.navigate({ routeName: 'Main'})
|
||||
]
|
||||
});
|
||||
navigation.dispatch(resetAction);
|
||||
|
||||
const launchUrl = navigation.state.params.launchUrl || this.state.launchUrl;
|
||||
if (launchUrl) {
|
||||
if (launchUrl.startsWith('lbry://?verify=')) {
|
||||
let verification = {};
|
||||
try {
|
||||
verification = JSON.parse(atob(launchUrl.substring(15)));
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
if (verification.token && verification.recaptcha) {
|
||||
AsyncStorage.setItem(Constants.KEY_SHOULD_VERIFY_EMAIL, 'true');
|
||||
try {
|
||||
verifyUserEmail(verification.token, verification.recaptcha);
|
||||
} catch (error) {
|
||||
const message = 'Invalid Verification Token';
|
||||
verifyUserEmailFailure(message);
|
||||
notify({ message });
|
||||
}
|
||||
} else {
|
||||
notify({
|
||||
message: 'Invalid Verification URI',
|
||||
});
|
||||
}
|
||||
} else {
|
||||
navigateToUri(navigation, launchUrl);
|
||||
}
|
||||
if (user.has_verified_email) {
|
||||
NativeModules.UtilityModule.getSecureValue(Constants.KEY_FIRST_RUN_PASSWORD).then(walletPassword => {
|
||||
getSync(walletPassword);
|
||||
this.navigateToMain();
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
this.navigateToMain();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ version.filename = %(source.dir)s/main.py
|
|||
|
||||
# (list) Application requirements
|
||||
# comma seperated e.g. requirements = sqlite3,kivy
|
||||
requirements = python3crystax, openssl, sqlite3, hostpython3crystax, android, distro, pyjnius, certifi==2018.4.16, constantly, incremental, miniupnpc==1.9, gmpy, appdirs==1.4.3, argparse==1.2.1, docopt, base58==1.0.0, colorama==0.3.7, dnspython==1.12.0, ecdsa==0.13, envparse, jsonrpclib==0.1.7, jsonschema==2.5.1, pbkdf2, pyyaml, qrcode==5.2.2, requests, seccure==0.3.1.3, attrs==18.1.0, pyasn1, pyasn1-modules, service_identity==16.0.0, six==1.9.0, txJSON-RPC, zope.interface==4.3.3, protobuf==3.6.1, keyring==10.4.0, txupnp, git+https://github.com/lbryio/lbryschema.git#egg=lbryschema, git+https://github.com/lbryio/lbry.git@v0.32.4#egg=lbrynet, git+https://github.com/lbryio/aioupnp.git#egg=aioupnp, asn1crypto, treq==17.8.0, funcsigs, mock, pbr, pyopenssl, twisted, idna, Automat, hyperlink, PyHamcrest, netifaces, cryptography, aiohttp==3.5.4, multidict==4.5.2, idna_ssl==1.1.0, typing_extensions==3.6.5, yarl, chardet==3.0.4, async_timeout==3.0.1, aiorpcX==0.9.0, git+https://github.com/lbryio/torba#egg=torba, coincurve
|
||||
requirements = python3crystax, openssl, sqlite3, hostpython3crystax, android, distro, pyjnius, certifi==2018.4.16, constantly, incremental, appdirs==1.4.3, argparse==1.2.1, docopt, base58==1.0.0, colorama==0.3.7, dnspython==1.12.0, ecdsa==0.13, envparse, jsonrpclib==0.1.7, jsonschema==2.5.1, pbkdf2, pyyaml, qrcode==5.2.2, requests, seccure==0.3.1.3, attrs==18.1.0, pyasn1, pyasn1-modules, service_identity==16.0.0, six==1.9.0, txJSON-RPC, zope.interface==4.3.3, protobuf==3.6.1, keyring==10.4.0, txupnp, git+https://github.com/lbryio/lbryschema.git#egg=lbryschema, git+https://github.com/lbryio/lbry.git@v0.34.0#egg=lbrynet, git+https://github.com/lbryio/aioupnp.git#egg=aioupnp, asn1crypto, treq==17.8.0, funcsigs, mock, pbr, pyopenssl, twisted, idna, Automat, hyperlink, PyHamcrest, netifaces, cryptography, aiohttp==3.5.4, multidict==4.5.2, idna_ssl==1.1.0, typing_extensions==3.6.5, yarl, chardet==3.0.4, async_timeout==3.0.1, aiorpcX==0.9.0, git+https://github.com/lbryio/torba#egg=torba, coincurve
|
||||
|
||||
# (str) Custom source folders for requirements
|
||||
# Sets custom source for any requirements with recipes
|
||||
|
|
|
@ -36,7 +36,7 @@ version.filename = %(source.dir)s/main.py
|
|||
|
||||
# (list) Application requirements
|
||||
# comma seperated e.g. requirements = sqlite3,kivy
|
||||
requirements = python3crystax, openssl, sqlite3, hostpython3crystax, android, distro, pyjnius, certifi==2018.4.16, constantly, incremental, miniupnpc==1.9, gmpy, appdirs==1.4.3, argparse==1.2.1, docopt, base58==1.0.0, colorama==0.3.7, dnspython==1.12.0, ecdsa==0.13, envparse, jsonrpclib==0.1.7, jsonschema==2.5.1, pbkdf2, pyyaml, qrcode==5.2.2, requests, seccure==0.3.1.3, attrs==18.1.0, pyasn1, pyasn1-modules, service_identity==16.0.0, six==1.9.0, txJSON-RPC, zope.interface==4.3.3, protobuf==3.6.1, keyring==10.4.0, txupnp, git+https://github.com/lbryio/lbryschema.git#egg=lbryschema, git+https://github.com/lbryio/lbry.git@v0.32.4#egg=lbrynet, git+https://github.com/lbryio/aioupnp.git#egg=aioupnp, asn1crypto, treq==17.8.0, funcsigs, mock, pbr, pyopenssl, twisted, idna, Automat, hyperlink, PyHamcrest, netifaces, cryptography, aiohttp==3.5.4, multidict==4.5.2, idna_ssl==1.1.0, typing_extensions==3.6.5, yarl, chardet==3.0.4, async_timeout==3.0.1, aiorpcX==0.9.0, git+https://github.com/lbryio/torba#egg=torba, coincurve
|
||||
requirements = python3crystax, openssl, sqlite3, hostpython3crystax, android, distro, pyjnius, certifi==2018.4.16, constantly, incremental, appdirs==1.4.3, argparse==1.2.1, docopt, base58==1.0.0, colorama==0.3.7, dnspython==1.12.0, ecdsa==0.13, envparse, jsonrpclib==0.1.7, jsonschema==2.5.1, pbkdf2, pyyaml, qrcode==5.2.2, requests, seccure==0.3.1.3, attrs==18.1.0, pyasn1, pyasn1-modules, service_identity==16.0.0, six==1.9.0, txJSON-RPC, zope.interface==4.3.3, protobuf==3.6.1, keyring==10.4.0, txupnp, git+https://github.com/lbryio/lbryschema.git#egg=lbryschema, git+https://github.com/lbryio/lbry.git@v0.34.0#egg=lbrynet, git+https://github.com/lbryio/aioupnp.git#egg=aioupnp, asn1crypto, treq==17.8.0, funcsigs, mock, pbr, pyopenssl, twisted, idna, Automat, hyperlink, PyHamcrest, netifaces, cryptography, aiohttp==3.5.4, multidict==4.5.2, idna_ssl==1.1.0, typing_extensions==3.6.5, yarl, chardet==3.0.4, async_timeout==3.0.1, aiorpcX==0.9.0, git+https://github.com/lbryio/torba#egg=torba, coincurve
|
||||
|
||||
# (str) Custom source folders for requirements
|
||||
# Sets custom source for any requirements with recipes
|
||||
|
|
|
@ -36,7 +36,7 @@ version.filename = %(source.dir)s/main.py
|
|||
|
||||
# (list) Application requirements
|
||||
# comma seperated e.g. requirements = sqlite3,kivy
|
||||
requirements = python3crystax, openssl, sqlite3, hostpython3crystax, android, distro, pyjnius, certifi==2018.4.16, constantly, incremental, miniupnpc==1.9, gmpy, appdirs==1.4.3, argparse==1.2.1, docopt, base58==1.0.0, colorama==0.3.7, dnspython==1.12.0, ecdsa==0.13, envparse, jsonrpclib==0.1.7, jsonschema==2.5.1, pbkdf2, pyyaml, qrcode==5.2.2, requests, seccure==0.3.1.3, attrs==18.1.0, pyasn1, pyasn1-modules, service_identity==16.0.0, six==1.9.0, txJSON-RPC, zope.interface==4.3.3, protobuf==3.6.1, keyring==10.4.0, txupnp, git+https://github.com/lbryio/lbryschema.git#egg=lbryschema, git+https://github.com/lbryio/lbry.git@v0.32.4#egg=lbrynet, git+https://github.com/lbryio/aioupnp.git#egg=aioupnp, asn1crypto, treq==17.8.0, funcsigs, mock, pbr, pyopenssl, twisted, idna, Automat, hyperlink, PyHamcrest, netifaces, cryptography, aiohttp==3.5.4, multidict==4.5.2, idna_ssl==1.1.0, typing_extensions==3.6.5, yarl, chardet==3.0.4, async_timeout==3.0.1, aiorpcX==0.9.0, git+https://github.com/lbryio/torba#egg=torba, coincurve
|
||||
requirements = python3crystax, openssl, sqlite3, hostpython3crystax, android, distro, pyjnius, certifi==2018.4.16, constantly, incremental, appdirs==1.4.3, argparse==1.2.1, docopt, base58==1.0.0, colorama==0.3.7, dnspython==1.12.0, ecdsa==0.13, envparse, jsonrpclib==0.1.7, jsonschema==2.5.1, pbkdf2, pyyaml, qrcode==5.2.2, requests, seccure==0.3.1.3, attrs==18.1.0, pyasn1, pyasn1-modules, service_identity==16.0.0, six==1.9.0, txJSON-RPC, zope.interface==4.3.3, protobuf==3.6.1, keyring==10.4.0, txupnp, git+https://github.com/lbryio/lbryschema.git#egg=lbryschema, git+https://github.com/lbryio/lbry.git@v0.34.0#egg=lbrynet, git+https://github.com/lbryio/aioupnp.git#egg=aioupnp, asn1crypto, treq==17.8.0, funcsigs, mock, pbr, pyopenssl, twisted, idna, Automat, hyperlink, PyHamcrest, netifaces, cryptography, aiohttp==3.5.4, multidict==4.5.2, idna_ssl==1.1.0, typing_extensions==3.6.5, yarl, chardet==3.0.4, async_timeout==3.0.1, aiorpcX==0.9.0, git+https://github.com/lbryio/torba#egg=torba, coincurve
|
||||
|
||||
# (str) Custom source folders for requirements
|
||||
# Sets custom source for any requirements with recipes
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
from os.path import (join, dirname)
|
||||
from os import environ, uname
|
||||
import sys
|
||||
from distutils.spawn import find_executable
|
||||
from os import environ
|
||||
from os.path import (exists, join, dirname, split)
|
||||
from glob import glob
|
||||
|
||||
from pythonforandroid.logger import warning
|
||||
from pythonforandroid.recipe import Recipe
|
||||
from pythonforandroid.util import BuildInterruptingException, build_platform
|
||||
|
||||
|
||||
class Arch(object):
|
||||
|
@ -19,6 +19,12 @@ class Arch(object):
|
|||
super(Arch, self).__init__()
|
||||
self.ctx = ctx
|
||||
|
||||
# Allows injecting additional linker paths used by any recipe.
|
||||
# This can also be modified by recipes (like the librt recipe)
|
||||
# to make sure that some sort of global resource is available &
|
||||
# linked for all others.
|
||||
self.extra_global_link_paths = []
|
||||
|
||||
def __str__(self):
|
||||
return self.arch
|
||||
|
||||
|
@ -30,24 +36,65 @@ class Arch(object):
|
|||
d.format(arch=self))
|
||||
for d in self.ctx.include_dirs]
|
||||
|
||||
def get_env(self, with_flags_in_cc=True):
|
||||
@property
|
||||
def target(self):
|
||||
target_data = self.command_prefix.split('-')
|
||||
return '-'.join(
|
||||
[target_data[0], 'none', target_data[1], target_data[2]])
|
||||
|
||||
def get_env(self, with_flags_in_cc=True, clang=False):
|
||||
env = {}
|
||||
|
||||
env["CFLAGS"] = " ".join([
|
||||
"-DANDROID", "-mandroid", "-fomit-frame-pointer",
|
||||
"--sysroot", self.ctx.ndk_platform])
|
||||
cflags = [
|
||||
'-DANDROID',
|
||||
'-fomit-frame-pointer',
|
||||
'-D__ANDROID_API__={}'.format(self.ctx.ndk_api)]
|
||||
if not clang:
|
||||
cflags.append('-mandroid')
|
||||
else:
|
||||
cflags.append('-target ' + self.target)
|
||||
toolchain = '{android_host}-{toolchain_version}'.format(
|
||||
android_host=self.ctx.toolchain_prefix,
|
||||
toolchain_version=self.ctx.toolchain_version)
|
||||
toolchain = join(self.ctx.ndk_dir, 'toolchains', toolchain,
|
||||
'prebuilt', build_platform)
|
||||
cflags.append('-gcc-toolchain {}'.format(toolchain))
|
||||
|
||||
env['CFLAGS'] = ' '.join(cflags)
|
||||
|
||||
# Link the extra global link paths first before anything else
|
||||
# (such that overriding system libraries with them is possible)
|
||||
env['LDFLAGS'] = ' ' + " ".join([
|
||||
"-L'" + l.replace("'", "'\"'\"'") + "'" # no shlex.quote in py2
|
||||
for l in self.extra_global_link_paths
|
||||
]) + ' '
|
||||
|
||||
sysroot = join(self.ctx._ndk_dir, 'sysroot')
|
||||
if exists(sysroot):
|
||||
# post-15 NDK per
|
||||
# https://android.googlesource.com/platform/ndk/+/ndk-r15-release/docs/UnifiedHeaders.md
|
||||
env['CFLAGS'] += ' -isystem {}/sysroot/usr/include/{}'.format(
|
||||
self.ctx.ndk_dir, self.ctx.toolchain_prefix)
|
||||
env['CFLAGS'] += ' -I{}/sysroot/usr/include/{}'.format(
|
||||
self.ctx.ndk_dir, self.command_prefix)
|
||||
else:
|
||||
sysroot = self.ctx.ndk_platform
|
||||
env['CFLAGS'] += ' -I{}'.format(self.ctx.ndk_platform)
|
||||
env['CFLAGS'] += ' -isysroot {} '.format(sysroot)
|
||||
env['CFLAGS'] += '-I' + join(self.ctx.get_python_install_dir(),
|
||||
'include/python{}'.format(
|
||||
self.ctx.python_recipe.version[0:3])
|
||||
)
|
||||
|
||||
env['LDFLAGS'] += '--sysroot={} '.format(self.ctx.ndk_platform)
|
||||
|
||||
env["CXXFLAGS"] = env["CFLAGS"]
|
||||
|
||||
env["LDFLAGS"] = " ".join(['-lm', '-L' + self.ctx.get_libs_dir(self.arch)])
|
||||
env["LDFLAGS"] += " ".join(['-lm', '-L' + self.ctx.get_libs_dir(self.arch)])
|
||||
|
||||
if self.ctx.ndk == 'crystax':
|
||||
env['LDFLAGS'] += ' -L{}/sources/crystax/libs/{} -lcrystax'.format(self.ctx.ndk_dir, self.arch)
|
||||
|
||||
py_platform = sys.platform
|
||||
if py_platform in ['linux2', 'linux3']:
|
||||
py_platform = 'linux'
|
||||
|
||||
toolchain_prefix = self.ctx.toolchain_prefix
|
||||
toolchain_version = self.ctx.toolchain_version
|
||||
command_prefix = self.command_prefix
|
||||
|
@ -63,53 +110,71 @@ class Arch(object):
|
|||
env['NDK_CCACHE'] = self.ctx.ccache
|
||||
env.update({k: v for k, v in environ.items() if k.startswith('CCACHE_')})
|
||||
|
||||
cc = find_executable('{command_prefix}-gcc'.format(
|
||||
command_prefix=command_prefix), path=environ['PATH'])
|
||||
if clang:
|
||||
llvm_dirname = split(
|
||||
glob(join(self.ctx.ndk_dir, 'toolchains', 'llvm*'))[-1])[-1]
|
||||
clang_path = join(self.ctx.ndk_dir, 'toolchains', llvm_dirname,
|
||||
'prebuilt', build_platform, 'bin')
|
||||
environ['PATH'] = '{clang_path}:{path}'.format(
|
||||
clang_path=clang_path, path=environ['PATH'])
|
||||
exe = join(clang_path, 'clang')
|
||||
execxx = join(clang_path, 'clang++')
|
||||
else:
|
||||
exe = '{command_prefix}-gcc'.format(command_prefix=command_prefix)
|
||||
execxx = '{command_prefix}-g++'.format(command_prefix=command_prefix)
|
||||
|
||||
cc = find_executable(exe, path=environ['PATH'])
|
||||
if cc is None:
|
||||
print('Searching path are: {!r}'.format(environ['PATH']))
|
||||
warning('Couldn\'t find executable for CC. This indicates a '
|
||||
'problem locating the {} executable in the Android '
|
||||
'NDK, not that you don\'t have a normal compiler '
|
||||
'installed. Exiting.')
|
||||
exit(1)
|
||||
raise BuildInterruptingException(
|
||||
'Couldn\'t find executable for CC. This indicates a '
|
||||
'problem locating the {} executable in the Android '
|
||||
'NDK, not that you don\'t have a normal compiler '
|
||||
'installed. Exiting.'.format(exe))
|
||||
|
||||
if with_flags_in_cc:
|
||||
env['CC'] = '{ccache}{command_prefix}-gcc {cflags}'.format(
|
||||
command_prefix=command_prefix,
|
||||
env['CC'] = '{ccache}{exe} {cflags}'.format(
|
||||
exe=exe,
|
||||
ccache=ccache,
|
||||
cflags=env['CFLAGS'])
|
||||
env['CXX'] = '{ccache}{command_prefix}-g++ {cxxflags}'.format(
|
||||
command_prefix=command_prefix,
|
||||
env['CXX'] = '{ccache}{execxx} {cxxflags}'.format(
|
||||
execxx=execxx,
|
||||
ccache=ccache,
|
||||
cxxflags=env['CXXFLAGS'])
|
||||
else:
|
||||
env['CC'] = '{ccache}{command_prefix}-gcc'.format(
|
||||
command_prefix=command_prefix,
|
||||
env['CC'] = '{ccache}{exe}'.format(
|
||||
exe=exe,
|
||||
ccache=ccache)
|
||||
env['CXX'] = '{ccache}{command_prefix}-g++'.format(
|
||||
command_prefix=command_prefix,
|
||||
env['CXX'] = '{ccache}{execxx}'.format(
|
||||
execxx=execxx,
|
||||
ccache=ccache)
|
||||
|
||||
env['AR'] = '{}-ar'.format(command_prefix)
|
||||
env['RANLIB'] = '{}-ranlib'.format(command_prefix)
|
||||
env['LD'] = '{}-ld'.format(command_prefix)
|
||||
# env['LDSHARED'] = join(self.ctx.root_dir, 'tools', 'liblink')
|
||||
# env['LDSHARED'] = env['LD']
|
||||
env['LDSHARED'] = env["CC"] + " -pthread -shared " +\
|
||||
"-Wl,-O1 -Wl,-Bsymbolic-functions "
|
||||
if self.ctx.python_recipe and self.ctx.python_recipe.from_crystax:
|
||||
# For crystax python, we can't use the host python headers:
|
||||
env["CFLAGS"] += ' -I{}/sources/python/{}/include/python/'.\
|
||||
format(self.ctx.ndk_dir, self.ctx.python_recipe.version[0:3])
|
||||
env['STRIP'] = '{}-strip --strip-unneeded'.format(command_prefix)
|
||||
env['MAKE'] = 'make -j5'
|
||||
env['READELF'] = '{}-readelf'.format(command_prefix)
|
||||
env['NM'] = '{}-nm'.format(command_prefix)
|
||||
|
||||
hostpython_recipe = Recipe.get_recipe('hostpython2', self.ctx)
|
||||
|
||||
# AND: This hardcodes python version 2.7, needs fixing
|
||||
hostpython_recipe = Recipe.get_recipe(
|
||||
'host' + self.ctx.python_recipe.name, self.ctx)
|
||||
env['BUILDLIB_PATH'] = join(
|
||||
hostpython_recipe.get_build_dir(self.arch),
|
||||
'build', 'lib.linux-{}-2.7'.format(uname()[-1]))
|
||||
'build', 'lib.{}-{}'.format(
|
||||
build_platform, self.ctx.python_recipe.major_minor_version_string)
|
||||
)
|
||||
|
||||
env['PATH'] = environ['PATH']
|
||||
|
||||
env['ARCH'] = self.arch
|
||||
env['NDK_API'] = 'android-{}'.format(str(self.ctx.ndk_api))
|
||||
|
||||
if self.ctx.python_recipe and self.ctx.python_recipe.from_crystax:
|
||||
env['CRYSTAX_PYTHON_VERSION'] = self.ctx.python_recipe.version
|
||||
|
@ -123,12 +188,18 @@ class ArchARM(Arch):
|
|||
command_prefix = 'arm-linux-androideabi'
|
||||
platform_dir = 'arch-arm'
|
||||
|
||||
@property
|
||||
def target(self):
|
||||
target_data = self.command_prefix.split('-')
|
||||
return '-'.join(
|
||||
['armv7a', 'none', target_data[1], target_data[2]])
|
||||
|
||||
|
||||
class ArchARMv7_a(ArchARM):
|
||||
arch = 'armeabi-v7a'
|
||||
|
||||
def get_env(self, with_flags_in_cc=True):
|
||||
env = super(ArchARMv7_a, self).get_env(with_flags_in_cc)
|
||||
def get_env(self, with_flags_in_cc=True, clang=False):
|
||||
env = super(ArchARMv7_a, self).get_env(with_flags_in_cc, clang=clang)
|
||||
env['CFLAGS'] = (env['CFLAGS'] +
|
||||
(' -march=armv7-a -mfloat-abi=softfp '
|
||||
'-mfpu=vfp -mthumb'))
|
||||
|
@ -142,8 +213,8 @@ class Archx86(Arch):
|
|||
command_prefix = 'i686-linux-android'
|
||||
platform_dir = 'arch-x86'
|
||||
|
||||
def get_env(self, with_flags_in_cc=True):
|
||||
env = super(Archx86, self).get_env(with_flags_in_cc)
|
||||
def get_env(self, with_flags_in_cc=True, clang=False):
|
||||
env = super(Archx86, self).get_env(with_flags_in_cc, clang=clang)
|
||||
env['CFLAGS'] = (env['CFLAGS'] +
|
||||
' -march=i686 -mtune=intel -mssse3 -mfpmath=sse -m32')
|
||||
env['CXXFLAGS'] = env['CFLAGS']
|
||||
|
@ -152,12 +223,12 @@ class Archx86(Arch):
|
|||
|
||||
class Archx86_64(Arch):
|
||||
arch = 'x86_64'
|
||||
toolchain_prefix = 'x86'
|
||||
toolchain_prefix = 'x86_64'
|
||||
command_prefix = 'x86_64-linux-android'
|
||||
platform_dir = 'arch-x86'
|
||||
platform_dir = 'arch-x86_64'
|
||||
|
||||
def get_env(self, with_flags_in_cc=True):
|
||||
env = super(Archx86_64, self).get_env(with_flags_in_cc)
|
||||
def get_env(self, with_flags_in_cc=True, clang=False):
|
||||
env = super(Archx86_64, self).get_env(with_flags_in_cc, clang=clang)
|
||||
env['CFLAGS'] = (env['CFLAGS'] +
|
||||
' -march=x86-64 -msse4.2 -mpopcnt -m64 -mtune=intel')
|
||||
env['CXXFLAGS'] = env['CFLAGS']
|
||||
|
@ -170,8 +241,8 @@ class ArchAarch_64(Arch):
|
|||
command_prefix = 'aarch64-linux-android'
|
||||
platform_dir = 'arch-arm64'
|
||||
|
||||
def get_env(self, with_flags_in_cc=True):
|
||||
env = super(ArchAarch_64, self).get_env(with_flags_in_cc)
|
||||
def get_env(self, with_flags_in_cc=True, clang=False):
|
||||
env = super(ArchAarch_64, self).get_env(with_flags_in_cc, clang=clang)
|
||||
incpath = ' -I' + join(dirname(__file__), 'includes', 'arm64-v8a')
|
||||
env['EXTRA_CFLAGS'] = incpath
|
||||
env['CFLAGS'] += incpath
|
||||
|
|
|
@ -1,17 +1,39 @@
|
|||
from os.path import (join, dirname, isdir, splitext, basename)
|
||||
from os import listdir
|
||||
from os.path import (join, dirname, isdir, normpath, splitext, basename)
|
||||
from os import listdir, walk, sep
|
||||
import sh
|
||||
import shlex
|
||||
import glob
|
||||
import json
|
||||
import importlib
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from pythonforandroid.logger import (warning, shprint, info, logger,
|
||||
debug)
|
||||
from pythonforandroid.util import (current_directory, ensure_dir,
|
||||
temp_directory, which)
|
||||
temp_directory)
|
||||
from pythonforandroid.recipe import Recipe
|
||||
|
||||
|
||||
def copy_files(src_root, dest_root, override=True):
|
||||
for root, dirnames, filenames in walk(src_root):
|
||||
for filename in filenames:
|
||||
subdir = normpath(root.replace(src_root, ""))
|
||||
if subdir.startswith(sep): # ensure it is relative
|
||||
subdir = subdir[1:]
|
||||
dest_dir = join(dest_root, subdir)
|
||||
if not os.path.exists(dest_dir):
|
||||
os.makedirs(dest_dir)
|
||||
src_file = join(root, filename)
|
||||
dest_file = join(dest_dir, filename)
|
||||
if os.path.isfile(src_file):
|
||||
if override and os.path.exists(dest_file):
|
||||
os.unlink(dest_file)
|
||||
if not os.path.exists(dest_file):
|
||||
shutil.copy(src_file, dest_file)
|
||||
else:
|
||||
os.makedirs(dest_file)
|
||||
|
||||
|
||||
class Bootstrap(object):
|
||||
'''An Android project template, containing recipe stuff for
|
||||
compilation and templated fields for APK info.
|
||||
|
@ -27,7 +49,11 @@ class Bootstrap(object):
|
|||
dist_name = None
|
||||
distribution = None
|
||||
|
||||
recipe_depends = ['sdl2']
|
||||
# All bootstraps should include Python in some way:
|
||||
recipe_depends = [
|
||||
("python2", "python2legacy", "python3", "python3crystax"),
|
||||
'android',
|
||||
]
|
||||
|
||||
can_be_chosen_automatically = True
|
||||
'''Determines whether the bootstrap can be chosen as one that
|
||||
|
@ -78,6 +104,9 @@ class Bootstrap(object):
|
|||
def get_dist_dir(self, name):
|
||||
return join(self.ctx.dist_dir, name)
|
||||
|
||||
def get_common_dir(self):
|
||||
return os.path.abspath(join(self.bootstrap_dir, "..", 'common'))
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
modname = self.__class__.__module__
|
||||
|
@ -87,9 +116,10 @@ class Bootstrap(object):
|
|||
'''Ensure that a build dir exists for the recipe. This same single
|
||||
dir will be used for building all different archs.'''
|
||||
self.build_dir = self.get_build_dir()
|
||||
shprint(sh.cp, '-r',
|
||||
join(self.bootstrap_dir, 'build'),
|
||||
self.build_dir)
|
||||
self.common_dir = self.get_common_dir()
|
||||
copy_files(join(self.bootstrap_dir, 'build'), self.build_dir)
|
||||
copy_files(join(self.common_dir, 'build'), self.build_dir,
|
||||
override=False)
|
||||
if self.ctx.symlink_java_src:
|
||||
info('Symlinking java src instead of copying')
|
||||
shprint(sh.rm, '-r', join(self.build_dir, 'src'))
|
||||
|
@ -102,26 +132,15 @@ class Bootstrap(object):
|
|||
fileh.write('target=android-{}'.format(self.ctx.android_api))
|
||||
|
||||
def prepare_dist_dir(self, name):
|
||||
# self.dist_dir = self.get_dist_dir(name)
|
||||
ensure_dir(self.dist_dir)
|
||||
|
||||
def run_distribute(self):
|
||||
# print('Default bootstrap being used doesn\'t know how '
|
||||
# 'to distribute...failing.')
|
||||
# exit(1)
|
||||
with current_directory(self.dist_dir):
|
||||
info('Saving distribution info')
|
||||
with open('dist_info.json', 'w') as fileh:
|
||||
json.dump({'dist_name': self.ctx.dist_name,
|
||||
'bootstrap': self.ctx.bootstrap.name,
|
||||
'archs': [arch.arch for arch in self.ctx.archs],
|
||||
'recipes': self.ctx.recipe_build_order + self.ctx.python_modules},
|
||||
fileh)
|
||||
self.distribution.save_info(self.dist_dir)
|
||||
|
||||
@classmethod
|
||||
def list_bootstraps(cls):
|
||||
'''Find all the available bootstraps and return them.'''
|
||||
forbidden_dirs = ('__pycache__', )
|
||||
forbidden_dirs = ('__pycache__', 'common')
|
||||
bootstraps_dir = join(dirname(__file__), 'bootstraps')
|
||||
for name in listdir(bootstraps_dir):
|
||||
if name in forbidden_dirs:
|
||||
|
@ -152,7 +171,7 @@ class Bootstrap(object):
|
|||
for recipe in recipes:
|
||||
try:
|
||||
recipe = Recipe.get_recipe(recipe, ctx)
|
||||
except IOError:
|
||||
except ValueError:
|
||||
conflicts = []
|
||||
else:
|
||||
conflicts = recipe.conflicts
|
||||
|
@ -160,7 +179,7 @@ class Bootstrap(object):
|
|||
for conflict in conflicts]):
|
||||
ok = False
|
||||
break
|
||||
if ok:
|
||||
if ok and bs not in acceptable_bootstraps:
|
||||
acceptable_bootstraps.append(bs)
|
||||
info('Found {} acceptable bootstraps: {}'.format(
|
||||
len(acceptable_bootstraps),
|
||||
|
@ -249,16 +268,22 @@ class Bootstrap(object):
|
|||
info('Python was loaded from CrystaX, skipping strip')
|
||||
return
|
||||
env = arch.get_env()
|
||||
strip = which('arm-linux-androideabi-strip', env['PATH'])
|
||||
if strip is None:
|
||||
warning('Can\'t find strip in PATH...')
|
||||
return
|
||||
strip = sh.Command(strip)
|
||||
filens = shprint(sh.find, join(self.dist_dir, 'private'),
|
||||
join(self.dist_dir, 'libs'),
|
||||
tokens = shlex.split(env['STRIP'])
|
||||
strip = sh.Command(tokens[0])
|
||||
if len(tokens) > 1:
|
||||
strip = strip.bake(tokens[1:])
|
||||
|
||||
libs_dir = join(self.dist_dir, '_python_bundle',
|
||||
'_python_bundle', 'modules')
|
||||
if self.ctx.python_recipe.name == 'python2legacy':
|
||||
libs_dir = join(self.dist_dir, 'private')
|
||||
filens = shprint(sh.find, libs_dir, join(self.dist_dir, 'libs'),
|
||||
'-iname', '*.so', _env=env).stdout.decode('utf-8')
|
||||
|
||||
logger.info('Stripping libraries in private dir')
|
||||
for filen in filens.split('\n'):
|
||||
if not filen:
|
||||
continue # skip the last ''
|
||||
try:
|
||||
strip(filen, _env=env)
|
||||
except sh.ErrorReturnCode_1:
|
||||
|
|
22
p4a/pythonforandroid/bootstraps/common/build/ant.properties
Normal file
22
p4a/pythonforandroid/bootstraps/common/build/ant.properties
Normal file
|
@ -0,0 +1,22 @@
|
|||
# This file is used to override default values used by the Ant build system.
|
||||
#
|
||||
# This file must be checked into Version Control Systems, as it is
|
||||
# integral to the build system of your project.
|
||||
|
||||
# This file is only used by the Ant script.
|
||||
|
||||
# You can use this to override default values such as
|
||||
# 'source.dir' for the location of your java source folder and
|
||||
# 'out.dir' for the location of your output folder.
|
||||
|
||||
# You can also use it define how the release builds are signed by declaring
|
||||
# the following properties:
|
||||
# 'key.store' for the location of your keystore and
|
||||
# 'key.alias' for the name of the key to use.
|
||||
# The password will be asked during the build when you use the 'release' target.
|
||||
|
||||
source.absolute.dir = tmp-src
|
||||
|
||||
resource.absolute.dir = src/main/res
|
||||
|
||||
asset.absolute.dir = src/main/assets
|
795
p4a/pythonforandroid/bootstraps/common/build/build.py
Normal file
795
p4a/pythonforandroid/bootstraps/common/build/build.py
Normal file
|
@ -0,0 +1,795 @@
|
|||
#!/usr/bin/env python2.7
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import json
|
||||
from os.path import (
|
||||
dirname, join, isfile, realpath,
|
||||
relpath, split, exists, basename
|
||||
)
|
||||
from os import listdir, makedirs, remove
|
||||
import os
|
||||
import shlex
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tarfile
|
||||
import tempfile
|
||||
import time
|
||||
from zipfile import ZipFile
|
||||
|
||||
from distutils.version import LooseVersion
|
||||
from fnmatch import fnmatch
|
||||
import jinja2
|
||||
|
||||
|
||||
def get_dist_info_for(key):
|
||||
try:
|
||||
with open(join(dirname(__file__), 'dist_info.json'), 'r') as fileh:
|
||||
info = json.load(fileh)
|
||||
value = str(info[key])
|
||||
except (OSError, KeyError) as e:
|
||||
print("BUILD FAILURE: Couldn't extract the key `" + key + "` " +
|
||||
"from dist_info.json: " + str(e))
|
||||
sys.exit(1)
|
||||
return value
|
||||
|
||||
|
||||
def get_hostpython():
|
||||
return get_dist_info_for('hostpython')
|
||||
|
||||
|
||||
def get_python_version():
|
||||
return get_dist_info_for('python_version')
|
||||
|
||||
|
||||
def get_bootstrap_name():
|
||||
return get_dist_info_for('bootstrap')
|
||||
|
||||
|
||||
if os.name == 'nt':
|
||||
ANDROID = 'android.bat'
|
||||
ANT = 'ant.bat'
|
||||
else:
|
||||
ANDROID = 'android'
|
||||
ANT = 'ant'
|
||||
|
||||
curdir = dirname(__file__)
|
||||
|
||||
PYTHON = get_hostpython()
|
||||
PYTHON_VERSION = get_python_version()
|
||||
if PYTHON is not None and not exists(PYTHON):
|
||||
PYTHON = None
|
||||
|
||||
BLACKLIST_PATTERNS = [
|
||||
# code versionning
|
||||
'^*.hg/*',
|
||||
'^*.git/*',
|
||||
'^*.bzr/*',
|
||||
'^*.svn/*',
|
||||
|
||||
# temp files
|
||||
'~',
|
||||
'*.bak',
|
||||
'*.swp',
|
||||
]
|
||||
# pyc/py
|
||||
if PYTHON is not None:
|
||||
BLACKLIST_PATTERNS.append('*.py')
|
||||
if PYTHON_VERSION and int(PYTHON_VERSION[0]) == 2:
|
||||
# we only blacklist `.pyc` for python2 because in python3 the compiled
|
||||
# extension is `.pyc` (.pyo files not exists for python >= 3.6)
|
||||
BLACKLIST_PATTERNS.append('*.pyc')
|
||||
|
||||
WHITELIST_PATTERNS = []
|
||||
if get_bootstrap_name() in ('sdl2', 'webview', 'service_only'):
|
||||
WHITELIST_PATTERNS.append('pyconfig.h')
|
||||
|
||||
python_files = []
|
||||
|
||||
|
||||
environment = jinja2.Environment(loader=jinja2.FileSystemLoader(
|
||||
join(curdir, 'templates')))
|
||||
|
||||
|
||||
def try_unlink(fn):
|
||||
if exists(fn):
|
||||
os.unlink(fn)
|
||||
|
||||
|
||||
def ensure_dir(path):
|
||||
if not exists(path):
|
||||
makedirs(path)
|
||||
|
||||
|
||||
def render(template, dest, **kwargs):
|
||||
'''Using jinja2, render `template` to the filename `dest`, supplying the
|
||||
|
||||
keyword arguments as template parameters.
|
||||
'''
|
||||
|
||||
dest_dir = dirname(dest)
|
||||
if dest_dir and not exists(dest_dir):
|
||||
makedirs(dest_dir)
|
||||
|
||||
template = environment.get_template(template)
|
||||
text = template.render(**kwargs)
|
||||
|
||||
f = open(dest, 'wb')
|
||||
f.write(text.encode('utf-8'))
|
||||
f.close()
|
||||
|
||||
|
||||
def is_whitelist(name):
|
||||
return match_filename(WHITELIST_PATTERNS, name)
|
||||
|
||||
|
||||
def is_blacklist(name):
|
||||
if is_whitelist(name):
|
||||
return False
|
||||
return match_filename(BLACKLIST_PATTERNS, name)
|
||||
|
||||
|
||||
def match_filename(pattern_list, name):
|
||||
for pattern in pattern_list:
|
||||
if pattern.startswith('^'):
|
||||
pattern = pattern[1:]
|
||||
else:
|
||||
pattern = '*/' + pattern
|
||||
if fnmatch(name, pattern):
|
||||
return True
|
||||
|
||||
|
||||
def listfiles(d):
|
||||
basedir = d
|
||||
subdirlist = []
|
||||
for item in os.listdir(d):
|
||||
fn = join(d, item)
|
||||
if isfile(fn):
|
||||
yield fn
|
||||
else:
|
||||
subdirlist.append(join(basedir, item))
|
||||
for subdir in subdirlist:
|
||||
for fn in listfiles(subdir):
|
||||
yield fn
|
||||
|
||||
|
||||
def make_python_zip():
|
||||
'''
|
||||
Search for all the python related files, and construct the pythonXX.zip
|
||||
According to
|
||||
# http://randomsplat.com/id5-cross-compiling-python-for-embedded-linux.html
|
||||
site-packages, config and lib-dynload will be not included.
|
||||
'''
|
||||
|
||||
if not exists('private'):
|
||||
print('No compiled python is present to zip, skipping.')
|
||||
return
|
||||
|
||||
global python_files
|
||||
d = realpath(join('private', 'lib', 'python2.7'))
|
||||
|
||||
def select(fn):
|
||||
if is_blacklist(fn):
|
||||
return False
|
||||
fn = realpath(fn)
|
||||
assert(fn.startswith(d))
|
||||
fn = fn[len(d):]
|
||||
if (fn.startswith('/site-packages/')
|
||||
or fn.startswith('/config/')
|
||||
or fn.startswith('/lib-dynload/')
|
||||
or fn.startswith('/libpymodules.so')):
|
||||
return False
|
||||
return fn
|
||||
|
||||
# get a list of all python file
|
||||
python_files = [x for x in listfiles(d) if select(x)]
|
||||
|
||||
# create the final zipfile
|
||||
zfn = join('private', 'lib', 'python27.zip')
|
||||
zf = ZipFile(zfn, 'w')
|
||||
|
||||
# put all the python files in it
|
||||
for fn in python_files:
|
||||
afn = fn[len(d):]
|
||||
zf.write(fn, afn)
|
||||
zf.close()
|
||||
|
||||
|
||||
def make_tar(tfn, source_dirs, ignore_path=[], optimize_python=True):
|
||||
'''
|
||||
Make a zip file `fn` from the contents of source_dis.
|
||||
'''
|
||||
|
||||
# selector function
|
||||
def select(fn):
|
||||
rfn = realpath(fn)
|
||||
for p in ignore_path:
|
||||
if p.endswith('/'):
|
||||
p = p[:-1]
|
||||
if rfn.startswith(p):
|
||||
return False
|
||||
if rfn in python_files:
|
||||
return False
|
||||
return not is_blacklist(fn)
|
||||
|
||||
# get the files and relpath file of all the directory we asked for
|
||||
files = []
|
||||
for sd in source_dirs:
|
||||
sd = realpath(sd)
|
||||
compile_dir(sd, optimize_python=optimize_python)
|
||||
files += [(x, relpath(realpath(x), sd)) for x in listfiles(sd)
|
||||
if select(x)]
|
||||
|
||||
# create tar.gz of thoses files
|
||||
tf = tarfile.open(tfn, 'w:gz', format=tarfile.USTAR_FORMAT)
|
||||
dirs = []
|
||||
for fn, afn in files:
|
||||
dn = dirname(afn)
|
||||
if dn not in dirs:
|
||||
# create every dirs first if not exist yet
|
||||
d = ''
|
||||
for component in split(dn):
|
||||
d = join(d, component)
|
||||
if d.startswith('/'):
|
||||
d = d[1:]
|
||||
if d == '' or d in dirs:
|
||||
continue
|
||||
dirs.append(d)
|
||||
tinfo = tarfile.TarInfo(d)
|
||||
tinfo.type = tarfile.DIRTYPE
|
||||
tf.addfile(tinfo)
|
||||
|
||||
# put the file
|
||||
tf.add(fn, afn)
|
||||
tf.close()
|
||||
|
||||
|
||||
def compile_dir(dfn, optimize_python=True):
|
||||
'''
|
||||
Compile *.py in directory `dfn` to *.pyo
|
||||
'''
|
||||
|
||||
if PYTHON is None:
|
||||
return
|
||||
|
||||
if int(PYTHON_VERSION[0]) >= 3:
|
||||
args = [PYTHON, '-m', 'compileall', '-b', '-f', dfn]
|
||||
else:
|
||||
args = [PYTHON, '-m', 'compileall', '-f', dfn]
|
||||
if optimize_python:
|
||||
# -OO = strip docstrings
|
||||
args.insert(1, '-OO')
|
||||
return_code = subprocess.call(args)
|
||||
|
||||
if return_code != 0:
|
||||
print('Error while running "{}"'.format(' '.join(args)))
|
||||
print('This probably means one of your Python files has a syntax '
|
||||
'error, see logs above')
|
||||
exit(1)
|
||||
|
||||
|
||||
def make_package(args):
|
||||
# If no launcher is specified, require a main.py/main.pyo:
|
||||
if (get_bootstrap_name() != "sdl" or args.launcher is None) and \
|
||||
get_bootstrap_name() != "webview":
|
||||
# (webview doesn't need an entrypoint, apparently)
|
||||
if args.private is None or (
|
||||
not exists(join(realpath(args.private), 'main.py')) and
|
||||
not exists(join(realpath(args.private), 'main.pyo'))):
|
||||
print('''BUILD FAILURE: No main.py(o) found in your app directory. This
|
||||
file must exist to act as the entry point for you app. If your app is
|
||||
started by a file with a different name, rename it to main.py or add a
|
||||
main.py that loads it.''')
|
||||
sys.exit(1)
|
||||
|
||||
assets_dir = "src/main/assets"
|
||||
|
||||
# Delete the old assets.
|
||||
try_unlink(join(assets_dir, 'public.mp3'))
|
||||
try_unlink(join(assets_dir, 'private.mp3'))
|
||||
ensure_dir(assets_dir)
|
||||
|
||||
# In order to speedup import and initial depack,
|
||||
# construct a python27.zip
|
||||
make_python_zip()
|
||||
|
||||
# Add extra environment variable file into tar-able directory:
|
||||
env_vars_tarpath = tempfile.mkdtemp(prefix="p4a-extra-env-")
|
||||
with open(os.path.join(env_vars_tarpath, "p4a_env_vars.txt"), "w") as f:
|
||||
f.write("P4A_IS_WINDOWED=" + str(args.window) + "\n")
|
||||
if hasattr(args, "orientation"):
|
||||
f.write("P4A_ORIENTATION=" + str(args.orientation) + "\n")
|
||||
f.write("P4A_NUMERIC_VERSION=" + str(args.numeric_version) + "\n")
|
||||
f.write("P4A_MINSDK=" + str(args.min_sdk_version) + "\n")
|
||||
|
||||
# Package up the private data (public not supported).
|
||||
tar_dirs = [env_vars_tarpath]
|
||||
if args.private:
|
||||
tar_dirs.append(args.private)
|
||||
for python_bundle_dir in ('private', 'crystax_python', '_python_bundle'):
|
||||
if exists(python_bundle_dir):
|
||||
tar_dirs.append(python_bundle_dir)
|
||||
if get_bootstrap_name() == "webview":
|
||||
tar_dirs.append('webview_includes')
|
||||
if args.private or args.launcher:
|
||||
make_tar(
|
||||
join(assets_dir, 'private.mp3'), tar_dirs, args.ignore_path,
|
||||
optimize_python=args.optimize_python)
|
||||
|
||||
# Remove extra env vars tar-able directory:
|
||||
shutil.rmtree(env_vars_tarpath)
|
||||
|
||||
# Prepare some variables for templating process
|
||||
res_dir = "src/main/res"
|
||||
default_icon = 'templates/kivy-icon.png'
|
||||
default_presplash = 'templates/kivy-presplash.jpg'
|
||||
shutil.copy(
|
||||
args.icon or default_icon,
|
||||
join(res_dir, 'drawable/icon.png')
|
||||
)
|
||||
if get_bootstrap_name() != "service_only":
|
||||
shutil.copy(
|
||||
args.presplash or default_presplash,
|
||||
join(res_dir, 'drawable/presplash.jpg')
|
||||
)
|
||||
|
||||
# If extra Java jars were requested, copy them into the libs directory
|
||||
jars = []
|
||||
if args.add_jar:
|
||||
for jarname in args.add_jar:
|
||||
if not exists(jarname):
|
||||
print('Requested jar does not exist: {}'.format(jarname))
|
||||
sys.exit(-1)
|
||||
shutil.copy(jarname, 'src/main/libs')
|
||||
jars.append(basename(jarname))
|
||||
|
||||
# If extra aar were requested, copy them into the libs directory
|
||||
aars = []
|
||||
if args.add_aar:
|
||||
ensure_dir("libs")
|
||||
for aarname in args.add_aar:
|
||||
if not exists(aarname):
|
||||
print('Requested aar does not exists: {}'.format(aarname))
|
||||
sys.exit(-1)
|
||||
shutil.copy(aarname, 'libs')
|
||||
aars.append(basename(aarname).rsplit('.', 1)[0])
|
||||
|
||||
versioned_name = (args.name.replace(' ', '').replace('\'', '') +
|
||||
'-' + args.version)
|
||||
|
||||
version_code = 0
|
||||
if not args.numeric_version:
|
||||
# Set version code in format (arch-minsdk-app_version)
|
||||
with open(join(dirname(__file__), 'dist_info.json'), 'r') as dist_info:
|
||||
dist_data = json.load(dist_info)
|
||||
arch = dist_data["archs"][0]
|
||||
arch_dict = {"x86_64": "9", "arm64-v8a": "8", "armeabi-v7a": "7", "x86": "6"}
|
||||
arch_code = arch_dict.get(arch, '1')
|
||||
min_sdk = args.min_sdk_version
|
||||
for i in args.version.split('.'):
|
||||
version_code *= 100
|
||||
version_code += int(i)
|
||||
args.numeric_version = "{}{}{}".format(arch_code, min_sdk, version_code)
|
||||
|
||||
if args.intent_filters:
|
||||
with open(args.intent_filters) as fd:
|
||||
args.intent_filters = fd.read()
|
||||
|
||||
if not args.add_activity:
|
||||
args.add_activity = []
|
||||
|
||||
if not args.activity_launch_mode:
|
||||
args.activity_launch_mode = ''
|
||||
|
||||
if args.extra_source_dirs:
|
||||
esd = []
|
||||
for spec in args.extra_source_dirs:
|
||||
if ':' in spec:
|
||||
specdir, specincludes = spec.split(':')
|
||||
else:
|
||||
specdir = spec
|
||||
specincludes = '**'
|
||||
esd.append((realpath(specdir), specincludes))
|
||||
args.extra_source_dirs = esd
|
||||
else:
|
||||
args.extra_source_dirs = []
|
||||
|
||||
service = False
|
||||
if args.private:
|
||||
service_main = join(realpath(args.private), 'service', 'main.py')
|
||||
if exists(service_main) or exists(service_main + 'o'):
|
||||
service = True
|
||||
|
||||
service_names = []
|
||||
for sid, spec in enumerate(args.services):
|
||||
spec = spec.split(':')
|
||||
name = spec[0]
|
||||
entrypoint = spec[1]
|
||||
options = spec[2:]
|
||||
|
||||
foreground = 'foreground' in options
|
||||
sticky = 'sticky' in options
|
||||
|
||||
service_names.append(name)
|
||||
service_target_path =\
|
||||
'src/main/java/{}/Service{}.java'.format(
|
||||
args.package.replace(".", "/"),
|
||||
name.capitalize()
|
||||
)
|
||||
render(
|
||||
'Service.tmpl.java',
|
||||
service_target_path,
|
||||
name=name,
|
||||
entrypoint=entrypoint,
|
||||
args=args,
|
||||
foreground=foreground,
|
||||
sticky=sticky,
|
||||
service_id=sid + 1,
|
||||
)
|
||||
|
||||
# Find the SDK directory and target API
|
||||
with open('project.properties', 'r') as fileh:
|
||||
target = fileh.read().strip()
|
||||
android_api = target.split('-')[1]
|
||||
try:
|
||||
int(android_api)
|
||||
except (ValueError, TypeError):
|
||||
raise ValueError(
|
||||
"failed to extract the Android API level from " +
|
||||
"build.properties. expected int, got: '" +
|
||||
str(android_api) + "'"
|
||||
)
|
||||
with open('local.properties', 'r') as fileh:
|
||||
sdk_dir = fileh.read().strip()
|
||||
sdk_dir = sdk_dir[8:]
|
||||
|
||||
# Try to build with the newest available build tools
|
||||
ignored = {".DS_Store", ".ds_store"}
|
||||
build_tools_versions = [x for x in listdir(join(sdk_dir, 'build-tools')) if x not in ignored]
|
||||
build_tools_versions = sorted(build_tools_versions,
|
||||
key=LooseVersion)
|
||||
build_tools_version = build_tools_versions[-1]
|
||||
|
||||
# Folder name for launcher (used by SDL2 bootstrap)
|
||||
url_scheme = 'kivy'
|
||||
|
||||
# Render out android manifest:
|
||||
manifest_path = "src/main/AndroidManifest.xml"
|
||||
render_args = {
|
||||
"args": args,
|
||||
"service": service,
|
||||
"service_names": service_names,
|
||||
"android_api": android_api
|
||||
}
|
||||
if get_bootstrap_name() == "sdl2":
|
||||
render_args["url_scheme"] = url_scheme
|
||||
render(
|
||||
'AndroidManifest.tmpl.xml',
|
||||
manifest_path,
|
||||
**render_args)
|
||||
|
||||
# Copy the AndroidManifest.xml to the dist root dir so that ant
|
||||
# can also use it
|
||||
if exists('AndroidManifest.xml'):
|
||||
remove('AndroidManifest.xml')
|
||||
shutil.copy(manifest_path, 'AndroidManifest.xml')
|
||||
|
||||
# gradle build templates
|
||||
render(
|
||||
'build.tmpl.gradle',
|
||||
'build.gradle',
|
||||
args=args,
|
||||
aars=aars,
|
||||
jars=jars,
|
||||
android_api=android_api,
|
||||
build_tools_version=build_tools_version
|
||||
)
|
||||
|
||||
# ant build templates
|
||||
render(
|
||||
'build.tmpl.xml',
|
||||
'build.xml',
|
||||
args=args,
|
||||
versioned_name=versioned_name)
|
||||
|
||||
# String resources:
|
||||
render_args = {
|
||||
"args": args,
|
||||
"private_version": str(time.time())
|
||||
}
|
||||
if get_bootstrap_name() == "sdl2":
|
||||
render_args["url_scheme"] = url_scheme
|
||||
render(
|
||||
'strings.tmpl.xml',
|
||||
join(res_dir, 'values/strings.xml'),
|
||||
**render_args)
|
||||
|
||||
if exists(join("templates", "custom_rules.tmpl.xml")):
|
||||
render(
|
||||
'custom_rules.tmpl.xml',
|
||||
'custom_rules.xml',
|
||||
args=args)
|
||||
|
||||
if get_bootstrap_name() == "webview":
|
||||
render('WebViewLoader.tmpl.java',
|
||||
'src/main/java/org/kivy/android/WebViewLoader.java',
|
||||
args=args)
|
||||
|
||||
if args.sign:
|
||||
render('build.properties', 'build.properties')
|
||||
else:
|
||||
if exists('build.properties'):
|
||||
os.remove('build.properties')
|
||||
|
||||
# Apply java source patches if any are present:
|
||||
if exists(join('src', 'patches')):
|
||||
print("Applying Java source code patches...")
|
||||
for patch_name in os.listdir(join('src', 'patches')):
|
||||
patch_path = join('src', 'patches', patch_name)
|
||||
print("Applying patch: " + str(patch_path))
|
||||
try:
|
||||
subprocess.check_output([
|
||||
# -N: insist this is FORWARd patch, don't reverse apply
|
||||
# -p1: strip first path component
|
||||
# -t: batch mode, don't ask questions
|
||||
"patch", "-N", "-p1", "-t", "-i", patch_path
|
||||
])
|
||||
except subprocess.CalledProcessError as e:
|
||||
if e.returncode == 1:
|
||||
# Return code 1 means it didn't apply, this will
|
||||
# usually mean it is already applied.
|
||||
print("Warning: failed to apply patch (" +
|
||||
"exit code 1), " +
|
||||
"assuming it is already applied: " +
|
||||
str(patch_path)
|
||||
)
|
||||
else:
|
||||
raise e
|
||||
|
||||
|
||||
def parse_args(args=None):
|
||||
global BLACKLIST_PATTERNS, WHITELIST_PATTERNS, PYTHON
|
||||
|
||||
# Get the default minsdk, equal to the NDK API that this dist is built against
|
||||
try:
|
||||
with open('dist_info.json', 'r') as fileh:
|
||||
info = json.load(fileh)
|
||||
default_min_api = int(info['ndk_api'])
|
||||
ndk_api = default_min_api
|
||||
except (OSError, KeyError, ValueError, TypeError):
|
||||
print('WARNING: Failed to read ndk_api from dist info, defaulting to 12')
|
||||
default_min_api = 12 # The old default before ndk_api was introduced
|
||||
ndk_api = 12
|
||||
|
||||
import argparse
|
||||
ap = argparse.ArgumentParser(description='''\
|
||||
Package a Python application for Android (using
|
||||
bootstrap ''' + get_bootstrap_name() + ''').
|
||||
|
||||
For this to work, Java and Ant need to be in your path, as does the
|
||||
tools directory of the Android SDK.
|
||||
''')
|
||||
|
||||
# --private is required unless for sdl2, where there's also --launcher
|
||||
ap.add_argument('--private', dest='private',
|
||||
help='the directory with the app source code files' +
|
||||
' (containing your main.py entrypoint)',
|
||||
required=(get_bootstrap_name() != "sdl2"))
|
||||
ap.add_argument('--package', dest='package',
|
||||
help=('The name of the java package the project will be'
|
||||
' packaged under.'),
|
||||
required=True)
|
||||
ap.add_argument('--name', dest='name',
|
||||
help=('The human-readable name of the project.'),
|
||||
required=True)
|
||||
ap.add_argument('--numeric-version', dest='numeric_version',
|
||||
help=('The numeric version number of the project. If not '
|
||||
'given, this is automatically computed from the '
|
||||
'version.'))
|
||||
ap.add_argument('--version', dest='version',
|
||||
help=('The version number of the project. This should '
|
||||
'consist of numbers and dots, and should have the '
|
||||
'same number of groups of numbers as previous '
|
||||
'versions.'),
|
||||
required=True)
|
||||
if get_bootstrap_name() == "sdl2":
|
||||
ap.add_argument('--launcher', dest='launcher', action='store_true',
|
||||
help=('Provide this argument to build a multi-app '
|
||||
'launcher, rather than a single app.'))
|
||||
ap.add_argument('--permission', dest='permissions', action='append', default=[],
|
||||
help='The permissions to give this app.', nargs='+')
|
||||
ap.add_argument('--meta-data', dest='meta_data', action='append', default=[],
|
||||
help='Custom key=value to add in application metadata')
|
||||
ap.add_argument('--uses-library', dest='android_used_libs', action='append', default=[],
|
||||
help='Used shared libraries included using <uses-library> tag in AndroidManifest.xml')
|
||||
ap.add_argument('--icon', dest='icon',
|
||||
help=('A png file to use as the icon for '
|
||||
'the application.'))
|
||||
ap.add_argument('--service', dest='services', action='append', default=[],
|
||||
help='Declare a new service entrypoint: '
|
||||
'NAME:PATH_TO_PY[:foreground]')
|
||||
if get_bootstrap_name() != "service_only":
|
||||
ap.add_argument('--presplash', dest='presplash',
|
||||
help=('A jpeg file to use as a screen while the '
|
||||
'application is loading.'))
|
||||
ap.add_argument('--presplash-color',
|
||||
dest='presplash_color',
|
||||
default='#000000',
|
||||
help=('A string to set the loading screen '
|
||||
'background color. '
|
||||
'Supported formats are: '
|
||||
'#RRGGBB #AARRGGBB or color names '
|
||||
'like red, green, blue, etc.'))
|
||||
ap.add_argument('--window', dest='window', action='store_true',
|
||||
default=False,
|
||||
help='Indicate if the application will be windowed')
|
||||
ap.add_argument('--orientation', dest='orientation',
|
||||
default='portrait',
|
||||
help=('The orientation that the game will '
|
||||
'display in. '
|
||||
'Usually one of "landscape", "portrait", '
|
||||
'"sensor", or "user" (the same as "sensor" '
|
||||
'but obeying the '
|
||||
'user\'s Android rotation setting). '
|
||||
'The full list of options is given under '
|
||||
'android_screenOrientation at '
|
||||
'https://developer.android.com/guide/'
|
||||
'topics/manifest/'
|
||||
'activity-element.html'))
|
||||
ap.add_argument('--wakelock', dest='wakelock', action='store_true',
|
||||
help=('Indicate if the application needs the device '
|
||||
'to stay on'))
|
||||
ap.add_argument('--blacklist', dest='blacklist',
|
||||
default=join(curdir, 'blacklist.txt'),
|
||||
help=('Use a blacklist file to match unwanted file in '
|
||||
'the final APK'))
|
||||
ap.add_argument('--whitelist', dest='whitelist',
|
||||
default=join(curdir, 'whitelist.txt'),
|
||||
help=('Use a whitelist file to prevent blacklisting of '
|
||||
'file in the final APK'))
|
||||
ap.add_argument('--add-jar', dest='add_jar', action='append',
|
||||
help=('Add a Java .jar to the libs, so you can access its '
|
||||
'classes with pyjnius. You can specify this '
|
||||
'argument more than once to include multiple jars'))
|
||||
ap.add_argument('--add-aar', dest='add_aar', action='append',
|
||||
help=('Add an aar dependency manually'))
|
||||
ap.add_argument('--depend', dest='depends', action='append',
|
||||
help=('Add a external dependency '
|
||||
'(eg: com.android.support:appcompat-v7:19.0.1)'))
|
||||
# The --sdk option has been removed, it is ignored in favour of
|
||||
# --android-api handled by toolchain.py
|
||||
ap.add_argument('--sdk', dest='sdk_version', default=-1,
|
||||
type=int, help=('Deprecated argument, does nothing'))
|
||||
ap.add_argument('--minsdk', dest='min_sdk_version',
|
||||
default=default_min_api, type=int,
|
||||
help=('Minimum Android SDK version that the app supports. '
|
||||
'Defaults to {}.'.format(default_min_api)))
|
||||
ap.add_argument('--allow-minsdk-ndkapi-mismatch', default=False,
|
||||
action='store_true',
|
||||
help=('Allow the --minsdk argument to be different from '
|
||||
'the discovered ndk_api in the dist'))
|
||||
ap.add_argument('--intent-filters', dest='intent_filters',
|
||||
help=('Add intent-filters xml rules to the '
|
||||
'AndroidManifest.xml file. The argument is a '
|
||||
'filename containing xml. The filename should be '
|
||||
'located relative to the python-for-android '
|
||||
'directory'))
|
||||
ap.add_argument('--with-billing', dest='billing_pubkey',
|
||||
help='If set, the billing service will be added (not implemented)')
|
||||
ap.add_argument('--add-source', dest='extra_source_dirs', action='append',
|
||||
help='Include additional source dirs in Java build')
|
||||
if get_bootstrap_name() == "webview":
|
||||
ap.add_argument('--port',
|
||||
help='The port on localhost that the WebView will access',
|
||||
default='5000')
|
||||
ap.add_argument('--try-system-python-compile', dest='try_system_python_compile',
|
||||
action='store_true',
|
||||
help='Use the system python during compileall if possible.')
|
||||
ap.add_argument('--no-compile-pyo', dest='no_compile_pyo', action='store_true',
|
||||
help='Do not optimise .py files to .pyo.')
|
||||
ap.add_argument('--sign', action='store_true',
|
||||
help=('Try to sign the APK with your credentials. You must set '
|
||||
'the appropriate environment variables.'))
|
||||
ap.add_argument('--add-activity', dest='add_activity', action='append',
|
||||
help='Add this Java class as an Activity to the manifest.')
|
||||
ap.add_argument('--activity-launch-mode',
|
||||
dest='activity_launch_mode',
|
||||
default='singleTask',
|
||||
help='Set the launch mode of the main activity in the manifest.')
|
||||
ap.add_argument('--allow-backup', dest='allow_backup', default='true',
|
||||
help="if set to 'false', then android won't backup the application.")
|
||||
ap.add_argument('--no-optimize-python', dest='optimize_python',
|
||||
action='store_false', default=True,
|
||||
help=('Whether to compile to optimised .pyo files, using -OO '
|
||||
'(strips docstrings and asserts)'))
|
||||
|
||||
# Put together arguments, and add those from .p4a config file:
|
||||
if args is None:
|
||||
args = sys.argv[1:]
|
||||
|
||||
def _read_configuration():
|
||||
if not exists(".p4a"):
|
||||
return
|
||||
print("Reading .p4a configuration")
|
||||
with open(".p4a") as fd:
|
||||
lines = fd.readlines()
|
||||
lines = [shlex.split(line)
|
||||
for line in lines if not line.startswith("#")]
|
||||
for line in lines:
|
||||
for arg in line:
|
||||
args.append(arg)
|
||||
_read_configuration()
|
||||
|
||||
args = ap.parse_args(args)
|
||||
args.ignore_path = []
|
||||
|
||||
if args.name and args.name[0] == '"' and args.name[-1] == '"':
|
||||
args.name = args.name[1:-1]
|
||||
|
||||
if ndk_api != args.min_sdk_version:
|
||||
print(('WARNING: --minsdk argument does not match the api that is '
|
||||
'compiled against. Only proceed if you know what you are '
|
||||
'doing, otherwise use --minsdk={} or recompile against api '
|
||||
'{}').format(ndk_api, args.min_sdk_version))
|
||||
if not args.allow_minsdk_ndkapi_mismatch:
|
||||
print('You must pass --allow-minsdk-ndkapi-mismatch to build '
|
||||
'with --minsdk different to the target NDK api from the '
|
||||
'build step')
|
||||
sys.exit(1)
|
||||
else:
|
||||
print('Proceeding with --minsdk not matching build target api')
|
||||
|
||||
if args.billing_pubkey:
|
||||
print('Billing not yet supported!')
|
||||
sys.exit(1)
|
||||
|
||||
if args.sdk_version == -1:
|
||||
print('WARNING: Received a --sdk argument, but this argument is '
|
||||
'deprecated and does nothing.')
|
||||
args.sdk_version = -1 # ensure it is not used
|
||||
|
||||
if args.permissions and isinstance(args.permissions[0], list):
|
||||
args.permissions = [p for perm in args.permissions for p in perm]
|
||||
|
||||
if args.try_system_python_compile:
|
||||
# Hardcoding python2.7 is okay for now, as python3 skips the
|
||||
# compilation anyway
|
||||
if not exists('crystax_python'):
|
||||
python_executable = 'python2.7'
|
||||
try:
|
||||
subprocess.call([python_executable, '--version'])
|
||||
except (OSError, subprocess.CalledProcessError):
|
||||
pass
|
||||
else:
|
||||
PYTHON = python_executable
|
||||
|
||||
if args.no_compile_pyo:
|
||||
PYTHON = None
|
||||
BLACKLIST_PATTERNS.remove('*.py')
|
||||
|
||||
if args.blacklist:
|
||||
with open(args.blacklist) as fd:
|
||||
patterns = [x.strip() for x in fd.read().splitlines()
|
||||
if x.strip() and not x.strip().startswith('#')]
|
||||
BLACKLIST_PATTERNS += patterns
|
||||
|
||||
if args.whitelist:
|
||||
with open(args.whitelist) as fd:
|
||||
patterns = [x.strip() for x in fd.read().splitlines()
|
||||
if x.strip() and not x.strip().startswith('#')]
|
||||
WHITELIST_PATTERNS += patterns
|
||||
|
||||
if args.private is None and \
|
||||
get_bootstrap_name() == 'sdl2' and args.launcher is None:
|
||||
print('Need --private directory or ' +
|
||||
'--launcher (SDL2 bootstrap only)' +
|
||||
'to have something to launch inside the .apk!')
|
||||
sys.exit(1)
|
||||
make_package(args)
|
||||
|
||||
return args
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parse_args()
|
BIN
p4a/pythonforandroid/bootstraps/common/build/gradle/wrapper/gradle-wrapper.jar
vendored
Normal file
BIN
p4a/pythonforandroid/bootstraps/common/build/gradle/wrapper/gradle-wrapper.jar
vendored
Normal file
Binary file not shown.
6
p4a/pythonforandroid/bootstraps/common/build/gradle/wrapper/gradle-wrapper.properties
vendored
Normal file
6
p4a/pythonforandroid/bootstraps/common/build/gradle/wrapper/gradle-wrapper.properties
vendored
Normal file
|
@ -0,0 +1,6 @@
|
|||
#Mon Mar 09 17:19:02 CET 2015
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-4.4-all.zip
|
164
p4a/pythonforandroid/bootstraps/common/build/gradlew
vendored
Executable file
164
p4a/pythonforandroid/bootstraps/common/build/gradlew
vendored
Executable file
|
@ -0,0 +1,164 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
##############################################################################
|
||||
##
|
||||
## Gradle start up script for UN*X
|
||||
##
|
||||
##############################################################################
|
||||
|
||||
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
DEFAULT_JVM_OPTS=""
|
||||
|
||||
APP_NAME="Gradle"
|
||||
APP_BASE_NAME=`basename "$0"`
|
||||
|
||||
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||
MAX_FD="maximum"
|
||||
|
||||
warn ( ) {
|
||||
echo "$*"
|
||||
}
|
||||
|
||||
die ( ) {
|
||||
echo
|
||||
echo "$*"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
# OS specific support (must be 'true' or 'false').
|
||||
cygwin=false
|
||||
msys=false
|
||||
darwin=false
|
||||
case "`uname`" in
|
||||
CYGWIN* )
|
||||
cygwin=true
|
||||
;;
|
||||
Darwin* )
|
||||
darwin=true
|
||||
;;
|
||||
MINGW* )
|
||||
msys=true
|
||||
;;
|
||||
esac
|
||||
|
||||
# For Cygwin, ensure paths are in UNIX format before anything is touched.
|
||||
if $cygwin ; then
|
||||
[ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
|
||||
fi
|
||||
|
||||
# Attempt to set APP_HOME
|
||||
# Resolve links: $0 may be a link
|
||||
PRG="$0"
|
||||
# Need this for relative symlinks.
|
||||
while [ -h "$PRG" ] ; do
|
||||
ls=`ls -ld "$PRG"`
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
PRG="$link"
|
||||
else
|
||||
PRG=`dirname "$PRG"`"/$link"
|
||||
fi
|
||||
done
|
||||
SAVED="`pwd`"
|
||||
cd "`dirname \"$PRG\"`/" >&-
|
||||
APP_HOME="`pwd -P`"
|
||||
cd "$SAVED" >&-
|
||||
|
||||
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
|
||||
|
||||
# Determine the Java command to use to start the JVM.
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD="$JAVA_HOME/jre/sh/java"
|
||||
else
|
||||
JAVACMD="$JAVA_HOME/bin/java"
|
||||
fi
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
else
|
||||
JAVACMD="java"
|
||||
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
|
||||
# Increase the maximum file descriptors if we can.
|
||||
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
|
||||
MAX_FD_LIMIT=`ulimit -H -n`
|
||||
if [ $? -eq 0 ] ; then
|
||||
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
|
||||
MAX_FD="$MAX_FD_LIMIT"
|
||||
fi
|
||||
ulimit -n $MAX_FD
|
||||
if [ $? -ne 0 ] ; then
|
||||
warn "Could not set maximum file descriptor limit: $MAX_FD"
|
||||
fi
|
||||
else
|
||||
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
|
||||
fi
|
||||
fi
|
||||
|
||||
# For Darwin, add options to specify how the application appears in the dock
|
||||
if $darwin; then
|
||||
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
|
||||
fi
|
||||
|
||||
# For Cygwin, switch paths to Windows format before running java
|
||||
if $cygwin ; then
|
||||
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
|
||||
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
|
||||
|
||||
# We build the pattern for arguments to be converted via cygpath
|
||||
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
|
||||
SEP=""
|
||||
for dir in $ROOTDIRSRAW ; do
|
||||
ROOTDIRS="$ROOTDIRS$SEP$dir"
|
||||
SEP="|"
|
||||
done
|
||||
OURCYGPATTERN="(^($ROOTDIRS))"
|
||||
# Add a user-defined pattern to the cygpath arguments
|
||||
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
|
||||
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
|
||||
fi
|
||||
# Now convert the arguments - kludge to limit ourselves to /bin/sh
|
||||
i=0
|
||||
for arg in "$@" ; do
|
||||
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
|
||||
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
|
||||
|
||||
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
|
||||
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
|
||||
else
|
||||
eval `echo args$i`="\"$arg\""
|
||||
fi
|
||||
i=$((i+1))
|
||||
done
|
||||
case $i in
|
||||
(0) set -- ;;
|
||||
(1) set -- "$args0" ;;
|
||||
(2) set -- "$args0" "$args1" ;;
|
||||
(3) set -- "$args0" "$args1" "$args2" ;;
|
||||
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
|
||||
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
|
||||
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
|
||||
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
|
||||
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
|
||||
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
|
||||
function splitJvmOpts() {
|
||||
JVM_OPTS=("$@")
|
||||
}
|
||||
eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
|
||||
JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
|
||||
|
||||
exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
|
90
p4a/pythonforandroid/bootstraps/common/build/gradlew.bat
vendored
Normal file
90
p4a/pythonforandroid/bootstraps/common/build/gradlew.bat
vendored
Normal file
|
@ -0,0 +1,90 @@
|
|||
@if "%DEBUG%" == "" @echo off
|
||||
@rem ##########################################################################
|
||||
@rem
|
||||
@rem Gradle startup script for Windows
|
||||
@rem
|
||||
@rem ##########################################################################
|
||||
|
||||
@rem Set local scope for the variables with windows NT shell
|
||||
if "%OS%"=="Windows_NT" setlocal
|
||||
|
||||
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
set DEFAULT_JVM_OPTS=
|
||||
|
||||
set DIRNAME=%~dp0
|
||||
if "%DIRNAME%" == "" set DIRNAME=.
|
||||
set APP_BASE_NAME=%~n0
|
||||
set APP_HOME=%DIRNAME%
|
||||
|
||||
@rem Find java.exe
|
||||
if defined JAVA_HOME goto findJavaFromJavaHome
|
||||
|
||||
set JAVA_EXE=java.exe
|
||||
%JAVA_EXE% -version >NUL 2>&1
|
||||
if "%ERRORLEVEL%" == "0" goto init
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:findJavaFromJavaHome
|
||||
set JAVA_HOME=%JAVA_HOME:"=%
|
||||
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
||||
|
||||
if exist "%JAVA_EXE%" goto init
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:init
|
||||
@rem Get command-line arguments, handling Windowz variants
|
||||
|
||||
if not "%OS%" == "Windows_NT" goto win9xME_args
|
||||
if "%@eval[2+2]" == "4" goto 4NT_args
|
||||
|
||||
:win9xME_args
|
||||
@rem Slurp the command line arguments.
|
||||
set CMD_LINE_ARGS=
|
||||
set _SKIP=2
|
||||
|
||||
:win9xME_args_slurp
|
||||
if "x%~1" == "x" goto execute
|
||||
|
||||
set CMD_LINE_ARGS=%*
|
||||
goto execute
|
||||
|
||||
:4NT_args
|
||||
@rem Get arguments from the 4NT Shell from JP Software
|
||||
set CMD_LINE_ARGS=%$
|
||||
|
||||
:execute
|
||||
@rem Setup the command line
|
||||
|
||||
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
|
||||
|
||||
@rem Execute Gradle
|
||||
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
|
||||
|
||||
:end
|
||||
@rem End local scope for the variables with windows NT shell
|
||||
if "%ERRORLEVEL%"=="0" goto mainEnd
|
||||
|
||||
:fail
|
||||
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
|
||||
rem the _cmd.exe /c_ return code!
|
||||
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
|
||||
exit /b 1
|
||||
|
||||
:mainEnd
|
||||
if "%OS%"=="Windows_NT" endlocal
|
||||
|
||||
:omega
|
|
@ -0,0 +1 @@
|
|||
include $(call all-subdir-makefiles)
|
|
@ -0,0 +1 @@
|
|||
include $(call all-subdir-makefiles)
|
|
@ -0,0 +1,27 @@
|
|||
LOCAL_PATH := $(call my-dir)
|
||||
|
||||
include $(CLEAR_VARS)
|
||||
|
||||
LOCAL_MODULE := main
|
||||
|
||||
SDL_PATH := ../../SDL
|
||||
|
||||
LOCAL_C_INCLUDES := $(LOCAL_PATH)/$(SDL_PATH)/include
|
||||
|
||||
# Add your application source files here...
|
||||
LOCAL_SRC_FILES := $(SDL_PATH)/src/main/android/SDL_android_main.c \
|
||||
start.c
|
||||
|
||||
LOCAL_CFLAGS += -I$(PYTHON_INCLUDE_ROOT) $(EXTRA_CFLAGS)
|
||||
|
||||
LOCAL_SHARED_LIBRARIES := SDL2 python_shared
|
||||
|
||||
LOCAL_LDLIBS := -lGLESv1_CM -lGLESv2 -llog $(EXTRA_LDLIBS)
|
||||
|
||||
LOCAL_LDFLAGS += -L$(PYTHON_LINK_ROOT) $(APPLICATION_ADDITIONAL_LDFLAGS)
|
||||
|
||||
include $(BUILD_SHARED_LIBRARY)
|
||||
|
||||
ifdef CRYSTAX_PYTHON_VERSION
|
||||
$(call import-module,python/$(CRYSTAX_PYTHON_VERSION))
|
||||
endif
|
|
@ -1,5 +1,4 @@
|
|||
|
||||
|
||||
#define PY_SSIZE_T_CLEAN
|
||||
#include "Python.h"
|
||||
#ifndef Py_PYTHON_H
|
||||
|
@ -15,6 +14,16 @@
|
|||
#include <sys/types.h>
|
||||
#include <errno.h>
|
||||
|
||||
#include "bootstrap_name.h"
|
||||
#ifndef BOOTSTRAP_USES_NO_SDL_HEADERS
|
||||
#include "SDL.h"
|
||||
#ifndef BOOTSTRAP_NAME_PYGAME
|
||||
#include "SDL_opengles2.h"
|
||||
#endif
|
||||
#endif
|
||||
#ifdef BOOTSTRAP_NAME_PYGAME
|
||||
#include "jniwrapperstuff.h"
|
||||
#endif
|
||||
#include "android/log.h"
|
||||
|
||||
#define ENTRYPOINT_MAXLEN 128
|
||||
|
@ -58,7 +67,7 @@ int dir_exists(char *filename) {
|
|||
|
||||
int file_exists(const char *filename) {
|
||||
FILE *file;
|
||||
if (file = fopen(filename, "r")) {
|
||||
if ((file = fopen(filename, "r"))) {
|
||||
fclose(file);
|
||||
return 1;
|
||||
}
|
||||
|
@ -75,25 +84,79 @@ int main(int argc, char *argv[]) {
|
|||
int ret = 0;
|
||||
FILE *fd;
|
||||
|
||||
/* AND: Several filepaths are hardcoded here, these must be made
|
||||
configurable */
|
||||
/* AND: P4A uses env vars...not sure what's best */
|
||||
LOGP("Initialize Python for Android");
|
||||
LOGP("Initializing Python for Android");
|
||||
|
||||
// Set a couple of built-in environment vars:
|
||||
setenv("P4A_BOOTSTRAP", bootstrap_name, 1); // env var to identify p4a to applications
|
||||
env_argument = getenv("ANDROID_ARGUMENT");
|
||||
setenv("ANDROID_APP_PATH", env_argument, 1);
|
||||
env_entrypoint = getenv("ANDROID_ENTRYPOINT");
|
||||
env_logname = getenv("PYTHON_NAME");
|
||||
|
||||
if (!getenv("ANDROID_UNPACK")) {
|
||||
/* ANDROID_UNPACK currently isn't set in services */
|
||||
setenv("ANDROID_UNPACK", env_argument, 1);
|
||||
}
|
||||
if (env_logname == NULL) {
|
||||
env_logname = "python";
|
||||
setenv("PYTHON_NAME", "python", 1);
|
||||
}
|
||||
|
||||
// Set additional file-provided environment vars:
|
||||
LOGP("Setting additional env vars from p4a_env_vars.txt");
|
||||
char env_file_path[256];
|
||||
snprintf(env_file_path, sizeof(env_file_path),
|
||||
"%s/p4a_env_vars.txt", getenv("ANDROID_UNPACK"));
|
||||
FILE *env_file_fd = fopen(env_file_path, "r");
|
||||
if (env_file_fd) {
|
||||
char* line = NULL;
|
||||
size_t len = 0;
|
||||
while (getline(&line, &len, env_file_fd) != -1) {
|
||||
if (strlen(line) > 0) {
|
||||
char *eqsubstr = strstr(line, "=");
|
||||
if (eqsubstr) {
|
||||
size_t eq_pos = eqsubstr - line;
|
||||
|
||||
// Extract name:
|
||||
char env_name[256];
|
||||
strncpy(env_name, line, sizeof(env_name));
|
||||
env_name[eq_pos] = '\0';
|
||||
|
||||
// Extract value (with line break removed:
|
||||
char env_value[256];
|
||||
strncpy(env_value, (char*)(line + eq_pos + 1), sizeof(env_value));
|
||||
if (strlen(env_value) > 0 &&
|
||||
env_value[strlen(env_value)-1] == '\n') {
|
||||
env_value[strlen(env_value)-1] = '\0';
|
||||
if (strlen(env_value) > 0 &&
|
||||
env_value[strlen(env_value)-1] == '\r') {
|
||||
// Also remove windows line breaks (\r\n)
|
||||
env_value[strlen(env_value)-1] = '\0';
|
||||
}
|
||||
}
|
||||
|
||||
// Set value:
|
||||
setenv(env_name, env_value, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
fclose(env_file_fd);
|
||||
} else {
|
||||
LOGP("Warning: no p4a_env_vars.txt found / failed to open!");
|
||||
}
|
||||
|
||||
LOGP("Changing directory to the one provided by ANDROID_ARGUMENT");
|
||||
LOGP(env_argument);
|
||||
chdir(env_argument);
|
||||
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
Py_NoSiteFlag=1;
|
||||
#endif
|
||||
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
Py_SetProgramName("android_python");
|
||||
#else
|
||||
Py_SetProgramName(L"android_python");
|
||||
#endif
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
/* our logging module for android
|
||||
|
@ -103,34 +166,55 @@ int main(int argc, char *argv[]) {
|
|||
|
||||
LOGP("Preparing to initialize python");
|
||||
|
||||
if (dir_exists("crystax_python/")) {
|
||||
LOGP("crystax_python exists");
|
||||
char paths[256];
|
||||
snprintf(paths, 256,
|
||||
"%s/crystax_python/stdlib.zip:%s/crystax_python/modules",
|
||||
env_argument, env_argument);
|
||||
/* snprintf(paths, 256, "%s/stdlib.zip:%s/modules", env_argument,
|
||||
* env_argument); */
|
||||
// Set up the python path
|
||||
char paths[256];
|
||||
|
||||
char crystax_python_dir[256];
|
||||
snprintf(crystax_python_dir, 256,
|
||||
"%s/crystax_python", getenv("ANDROID_UNPACK"));
|
||||
char python_bundle_dir[256];
|
||||
snprintf(python_bundle_dir, 256,
|
||||
"%s/_python_bundle", getenv("ANDROID_UNPACK"));
|
||||
if (dir_exists(crystax_python_dir) || dir_exists(python_bundle_dir)) {
|
||||
if (dir_exists(crystax_python_dir)) {
|
||||
LOGP("crystax_python exists");
|
||||
snprintf(paths, 256,
|
||||
"%s/stdlib.zip:%s/modules",
|
||||
crystax_python_dir, crystax_python_dir);
|
||||
}
|
||||
|
||||
if (dir_exists(python_bundle_dir)) {
|
||||
LOGP("_python_bundle dir exists");
|
||||
snprintf(paths, 256,
|
||||
"%s/stdlib.zip:%s/modules",
|
||||
python_bundle_dir, python_bundle_dir);
|
||||
}
|
||||
|
||||
LOGP("calculated paths to be...");
|
||||
LOGP(paths);
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
wchar_t *wchar_paths = Py_DecodeLocale(paths, NULL);
|
||||
Py_SetPath(wchar_paths);
|
||||
#else
|
||||
char *wchar_paths = paths;
|
||||
LOGP("Can't Py_SetPath in python2, so crystax python2 doesn't work yet");
|
||||
exit(1);
|
||||
#endif
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
wchar_t *wchar_paths = Py_DecodeLocale(paths, NULL);
|
||||
Py_SetPath(wchar_paths);
|
||||
#endif
|
||||
|
||||
LOGP("set wchar paths...");
|
||||
LOGP("set wchar paths...");
|
||||
} else {
|
||||
LOGP("crystax_python does not exist");
|
||||
// We do not expect to see crystax_python any more, so no point
|
||||
// reminding the user about it. If it does exist, we'll have
|
||||
// logged it earlier.
|
||||
LOGP("_python_bundle does not exist");
|
||||
}
|
||||
|
||||
Py_Initialize();
|
||||
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
// Can't Py_SetPath in python2 but we can set PySys_SetPath, which must
|
||||
// be applied after Py_Initialize rather than before like Py_SetPath
|
||||
#if PY_MICRO_VERSION >= 15
|
||||
// Only for python native-build
|
||||
PySys_SetPath(paths);
|
||||
#endif
|
||||
PySys_SetArgv(argc, argv);
|
||||
#endif
|
||||
|
||||
|
@ -153,8 +237,10 @@ int main(int argc, char *argv[]) {
|
|||
*/
|
||||
PyRun_SimpleString("import sys, posix\n");
|
||||
if (dir_exists("lib")) {
|
||||
/* If we built our own python, set up the paths correctly */
|
||||
LOGP("Setting up python from ANDROID_PRIVATE");
|
||||
/* If we built our own python, set up the paths correctly.
|
||||
* This is only the case if we are using the python2legacy recipe
|
||||
*/
|
||||
LOGP("Setting up python from ANDROID_APP_PATH");
|
||||
PyRun_SimpleString("private = posix.environ['ANDROID_APP_PATH']\n"
|
||||
"argument = posix.environ['ANDROID_ARGUMENT']\n"
|
||||
"sys.path[:] = [ \n"
|
||||
|
@ -165,11 +251,24 @@ int main(int argc, char *argv[]) {
|
|||
" argument ]\n");
|
||||
}
|
||||
|
||||
if (dir_exists("crystax_python")) {
|
||||
char add_site_packages_dir[256];
|
||||
char add_site_packages_dir[256];
|
||||
if (dir_exists(crystax_python_dir)) {
|
||||
snprintf(add_site_packages_dir, 256,
|
||||
"sys.path.append('%s/crystax_python/site-packages')",
|
||||
env_argument);
|
||||
"sys.path.append('%s/site-packages')",
|
||||
crystax_python_dir);
|
||||
|
||||
PyRun_SimpleString("import sys\n"
|
||||
"sys.argv = ['notaninterpreterreally']\n"
|
||||
"from os.path import realpath, join, dirname");
|
||||
PyRun_SimpleString(add_site_packages_dir);
|
||||
/* "sys.path.append(join(dirname(realpath(__file__)), 'site-packages'))") */
|
||||
PyRun_SimpleString("sys.path = ['.'] + sys.path");
|
||||
}
|
||||
|
||||
if (dir_exists(python_bundle_dir)) {
|
||||
snprintf(add_site_packages_dir, 256,
|
||||
"sys.path.append('%s/site-packages')",
|
||||
python_bundle_dir);
|
||||
|
||||
PyRun_SimpleString("import sys\n"
|
||||
"sys.argv = ['notaninterpreterreally']\n"
|
||||
|
@ -210,6 +309,11 @@ int main(int argc, char *argv[]) {
|
|||
/* Get the entrypoint, search the .pyo then .py
|
||||
*/
|
||||
char *dot = strrchr(env_entrypoint, '.');
|
||||
#if PY_MAJOR_VERSION > 2
|
||||
char *ext = ".pyc";
|
||||
#else
|
||||
char *ext = ".pyo";
|
||||
#endif
|
||||
if (dot <= 0) {
|
||||
LOGP("Invalid entrypoint, abort.");
|
||||
return -1;
|
||||
|
@ -218,14 +322,14 @@ int main(int argc, char *argv[]) {
|
|||
LOGP("Entrypoint path is too long, try increasing ENTRYPOINT_MAXLEN.");
|
||||
return -1;
|
||||
}
|
||||
if (!strcmp(dot, ".pyo")) {
|
||||
if (!strcmp(dot, ext)) {
|
||||
if (!file_exists(env_entrypoint)) {
|
||||
/* fallback on .py */
|
||||
strcpy(entrypoint, env_entrypoint);
|
||||
entrypoint[strlen(env_entrypoint) - 1] = '\0';
|
||||
LOGP(entrypoint);
|
||||
if (!file_exists(entrypoint)) {
|
||||
LOGP("Entrypoint not found (.pyo, fallback on .py), abort");
|
||||
LOGP("Entrypoint not found (.pyc/.pyo, fallback on .py), abort");
|
||||
return -1;
|
||||
}
|
||||
} else {
|
||||
|
@ -235,7 +339,11 @@ int main(int argc, char *argv[]) {
|
|||
/* if .py is passed, check the pyo version first */
|
||||
strcpy(entrypoint, env_entrypoint);
|
||||
entrypoint[strlen(env_entrypoint) + 1] = '\0';
|
||||
#if PY_MAJOR_VERSION > 2
|
||||
entrypoint[strlen(env_entrypoint)] = 'c';
|
||||
#else
|
||||
entrypoint[strlen(env_entrypoint)] = 'o';
|
||||
#endif
|
||||
if (!file_exists(entrypoint)) {
|
||||
/* fallback on pure python version */
|
||||
if (!file_exists(env_entrypoint)) {
|
||||
|
@ -245,7 +353,7 @@ int main(int argc, char *argv[]) {
|
|||
strcpy(entrypoint, env_entrypoint);
|
||||
}
|
||||
} else {
|
||||
LOGP("Entrypoint have an invalid extension (must be .py or .pyo), abort.");
|
||||
LOGP("Entrypoint have an invalid extension (must be .py or .pyc/.pyo), abort.");
|
||||
return -1;
|
||||
}
|
||||
// LOGP("Entrypoint is:");
|
||||
|
@ -260,6 +368,7 @@ int main(int argc, char *argv[]) {
|
|||
/* run python !
|
||||
*/
|
||||
ret = PyRun_SimpleFile(fd, entrypoint);
|
||||
fclose(fd);
|
||||
|
||||
if (PyErr_Occurred() != NULL) {
|
||||
ret = 1;
|
||||
|
@ -270,19 +379,48 @@ int main(int argc, char *argv[]) {
|
|||
PyErr_Clear();
|
||||
}
|
||||
|
||||
/* close everything
|
||||
*/
|
||||
Py_Finalize();
|
||||
fclose(fd);
|
||||
|
||||
LOGP("Python for android ended.");
|
||||
|
||||
/* Shut down: since regular shutdown causes issues sometimes
|
||||
(seems to be an incomplete shutdown breaking next launch)
|
||||
we'll use sys.exit(ret) to shutdown, since that one works.
|
||||
|
||||
Reference discussion:
|
||||
|
||||
https://github.com/kivy/kivy/pull/6107#issue-246120816
|
||||
*/
|
||||
char terminatecmd[256];
|
||||
snprintf(
|
||||
terminatecmd, sizeof(terminatecmd),
|
||||
"import sys; sys.exit(%d)\n", ret
|
||||
);
|
||||
PyRun_SimpleString(terminatecmd);
|
||||
|
||||
/* This should never actually be reached, but we'll leave the clean-up
|
||||
* here just to be safe.
|
||||
*/
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
Py_Finalize();
|
||||
LOGP("Unexpectedly reached Py_FinalizeEx(), but was successful.");
|
||||
#else
|
||||
if (Py_FinalizeEx() != 0) // properly check success on Python 3
|
||||
LOGP("Unexpectedly reached Py_FinalizeEx(), and got error!");
|
||||
else
|
||||
LOGP("Unexpectedly reached Py_FinalizeEx(), but was successful.");
|
||||
#endif
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL Java_org_kivy_android_PythonService_nativeStart(
|
||||
JNIEnv *env, jobject thiz, jstring j_android_private,
|
||||
jstring j_android_argument, jstring j_service_entrypoint,
|
||||
jstring j_python_name, jstring j_python_home, jstring j_python_path,
|
||||
JNIEnv *env,
|
||||
jobject thiz,
|
||||
jstring j_android_private,
|
||||
jstring j_android_argument,
|
||||
jstring j_service_entrypoint,
|
||||
jstring j_python_name,
|
||||
jstring j_python_home,
|
||||
jstring j_python_path,
|
||||
jstring j_arg) {
|
||||
jboolean iscopy;
|
||||
const char *android_private =
|
||||
|
@ -308,10 +446,7 @@ JNIEXPORT void JNICALL Java_org_kivy_android_PythonService_nativeStart(
|
|||
setenv("PYTHONHOME", python_home, 1);
|
||||
setenv("PYTHONPATH", python_path, 1);
|
||||
setenv("PYTHON_SERVICE_ARGUMENT", arg, 1);
|
||||
|
||||
char ca_path[128];
|
||||
snprintf(ca_path, 128, "%s/crystax_python/site-packages/certifi/cacert.pem", python_home);
|
||||
setenv("SSL_CERT_FILE", ca_path, 1);
|
||||
setenv("P4A_BOOTSTRAP", bootstrap_name, 1);
|
||||
|
||||
char *argv[] = {"."};
|
||||
/* ANDROID_ARGUMENT points to service subdir,
|
||||
|
@ -320,4 +455,47 @@ JNIEXPORT void JNICALL Java_org_kivy_android_PythonService_nativeStart(
|
|||
main(1, argv);
|
||||
}
|
||||
|
||||
#if defined(BOOTSTRAP_NAME_WEBVIEW) || defined(BOOTSTRAP_NAME_SERVICEONLY)
|
||||
// Webview and service_only uses some more functions:
|
||||
|
||||
void Java_org_kivy_android_PythonActivity_nativeSetenv(
|
||||
JNIEnv* env, jclass cls,
|
||||
jstring name, jstring value)
|
||||
//JNIEXPORT void JNICALL SDL_JAVA_INTERFACE(nativeSetenv)(
|
||||
// JNIEnv* env, jclass cls,
|
||||
// jstring name, jstring value)
|
||||
{
|
||||
const char *utfname = (*env)->GetStringUTFChars(env, name, NULL);
|
||||
const char *utfvalue = (*env)->GetStringUTFChars(env, value, NULL);
|
||||
|
||||
setenv(utfname, utfvalue, 1);
|
||||
|
||||
(*env)->ReleaseStringUTFChars(env, name, utfname);
|
||||
(*env)->ReleaseStringUTFChars(env, value, utfvalue);
|
||||
}
|
||||
|
||||
|
||||
void Java_org_kivy_android_PythonActivity_nativeInit(JNIEnv* env, jclass cls, jobject obj)
|
||||
{
|
||||
/* This nativeInit follows SDL2 */
|
||||
|
||||
/* This interface could expand with ABI negotiation, calbacks, etc. */
|
||||
/* SDL_Android_Init(env, cls); */
|
||||
|
||||
/* SDL_SetMainReady(); */
|
||||
|
||||
/* Run the application code! */
|
||||
int status;
|
||||
char *argv[2];
|
||||
argv[0] = "Python_app";
|
||||
argv[1] = NULL;
|
||||
/* status = SDL_main(1, argv); */
|
||||
|
||||
main(1, argv);
|
||||
|
||||
/* Do not issue an exit or the whole application will terminate instead of just the SDL thread */
|
||||
/* exit(status); */
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif
|
|
@ -0,0 +1,164 @@
|
|||
package org.kivy.android;
|
||||
|
||||
import android.os.Build;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import android.app.Service;
|
||||
import android.os.IBinder;
|
||||
import android.os.Bundle;
|
||||
import android.content.Intent;
|
||||
import android.content.Context;
|
||||
import android.util.Log;
|
||||
import android.app.Notification;
|
||||
import android.app.PendingIntent;
|
||||
import android.os.Process;
|
||||
import java.io.File;
|
||||
|
||||
import org.kivy.android.PythonUtil;
|
||||
|
||||
import org.renpy.android.Hardware;
|
||||
|
||||
|
||||
public class PythonService extends Service implements Runnable {
|
||||
|
||||
// Thread for Python code
|
||||
private Thread pythonThread = null;
|
||||
|
||||
// Python environment variables
|
||||
private String androidPrivate;
|
||||
private String androidArgument;
|
||||
private String pythonName;
|
||||
private String pythonHome;
|
||||
private String pythonPath;
|
||||
private String serviceEntrypoint;
|
||||
// Argument to pass to Python code,
|
||||
private String pythonServiceArgument;
|
||||
public static PythonService mService = null;
|
||||
private Intent startIntent = null;
|
||||
|
||||
private boolean autoRestartService = false;
|
||||
|
||||
public void setAutoRestartService(boolean restart) {
|
||||
autoRestartService = restart;
|
||||
}
|
||||
|
||||
public boolean canDisplayNotification() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public int startType() {
|
||||
return START_NOT_STICKY;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IBinder onBind(Intent arg0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCreate() {
|
||||
super.onCreate();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int onStartCommand(Intent intent, int flags, int startId) {
|
||||
if (pythonThread != null) {
|
||||
Log.v("python service", "service exists, do not start again");
|
||||
return START_NOT_STICKY;
|
||||
}
|
||||
|
||||
startIntent = intent;
|
||||
Bundle extras = intent.getExtras();
|
||||
androidPrivate = extras.getString("androidPrivate");
|
||||
androidArgument = extras.getString("androidArgument");
|
||||
serviceEntrypoint = extras.getString("serviceEntrypoint");
|
||||
pythonName = extras.getString("pythonName");
|
||||
pythonHome = extras.getString("pythonHome");
|
||||
pythonPath = extras.getString("pythonPath");
|
||||
pythonServiceArgument = extras.getString("pythonServiceArgument");
|
||||
|
||||
pythonThread = new Thread(this);
|
||||
pythonThread.start();
|
||||
|
||||
if (canDisplayNotification()) {
|
||||
doStartForeground(extras);
|
||||
}
|
||||
|
||||
return startType();
|
||||
}
|
||||
|
||||
protected void doStartForeground(Bundle extras) {
|
||||
String serviceTitle = extras.getString("serviceTitle");
|
||||
String serviceDescription = extras.getString("serviceDescription");
|
||||
|
||||
Notification notification;
|
||||
Context context = getApplicationContext();
|
||||
Intent contextIntent = new Intent(context, PythonActivity.class);
|
||||
PendingIntent pIntent = PendingIntent.getActivity(context, 0, contextIntent,
|
||||
PendingIntent.FLAG_UPDATE_CURRENT);
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.HONEYCOMB) {
|
||||
notification = new Notification(
|
||||
context.getApplicationInfo().icon, serviceTitle, System.currentTimeMillis());
|
||||
try {
|
||||
// prevent using NotificationCompat, this saves 100kb on apk
|
||||
Method func = notification.getClass().getMethod(
|
||||
"setLatestEventInfo", Context.class, CharSequence.class,
|
||||
CharSequence.class, PendingIntent.class);
|
||||
func.invoke(notification, context, serviceTitle, serviceDescription, pIntent);
|
||||
} catch (NoSuchMethodException | IllegalAccessException |
|
||||
IllegalArgumentException | InvocationTargetException e) {
|
||||
}
|
||||
} else {
|
||||
Notification.Builder builder = new Notification.Builder(context);
|
||||
builder.setContentTitle(serviceTitle);
|
||||
builder.setContentText(serviceDescription);
|
||||
builder.setContentIntent(pIntent);
|
||||
builder.setSmallIcon(context.getApplicationInfo().icon);
|
||||
notification = builder.build();
|
||||
}
|
||||
startForeground(1, notification);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDestroy() {
|
||||
super.onDestroy();
|
||||
pythonThread = null;
|
||||
if (autoRestartService && startIntent != null) {
|
||||
Log.v("python service", "service restart requested");
|
||||
startService(startIntent);
|
||||
}
|
||||
Process.killProcess(Process.myPid());
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops the task gracefully when killed.
|
||||
* Calling stopSelf() will trigger a onDestroy() call from the system.
|
||||
*/
|
||||
@Override
|
||||
public void onTaskRemoved(Intent rootIntent) {
|
||||
super.onTaskRemoved(rootIntent);
|
||||
stopSelf();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run(){
|
||||
String app_root = getFilesDir().getAbsolutePath() + "/app";
|
||||
File app_root_file = new File(app_root);
|
||||
PythonUtil.loadLibraries(app_root_file,
|
||||
new File(getApplicationInfo().nativeLibraryDir));
|
||||
this.mService = this;
|
||||
nativeStart(
|
||||
androidPrivate, androidArgument,
|
||||
serviceEntrypoint, pythonName,
|
||||
pythonHome, pythonPath,
|
||||
pythonServiceArgument);
|
||||
stopSelf();
|
||||
}
|
||||
|
||||
// Native part
|
||||
public static native void nativeStart(
|
||||
String androidPrivate, String androidArgument,
|
||||
String serviceEntrypoint, String pythonName,
|
||||
String pythonHome, String pythonPath,
|
||||
String pythonServiceArgument);
|
||||
}
|
|
@ -0,0 +1,77 @@
|
|||
package org.kivy.android;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
import android.util.Log;
|
||||
import java.util.ArrayList;
|
||||
import java.io.FilenameFilter;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
|
||||
public class PythonUtil {
|
||||
private static final String TAG = "pythonutil";
|
||||
|
||||
protected static void addLibraryIfExists(ArrayList<String> libsList, String pattern, File libsDir) {
|
||||
// pattern should be the name of the lib file, without the
|
||||
// preceding "lib" or suffix ".so", for instance "ssl.*" will
|
||||
// match files of the form "libssl.*.so".
|
||||
File [] files = libsDir.listFiles();
|
||||
|
||||
pattern = "lib" + pattern + "\\.so";
|
||||
Pattern p = Pattern.compile(pattern);
|
||||
for (int i = 0; i < files.length; ++i) {
|
||||
File file = files[i];
|
||||
String name = file.getName();
|
||||
Log.v(TAG, "Checking pattern " + pattern + " against " + name);
|
||||
if (p.matcher(name).matches()) {
|
||||
Log.v(TAG, "Pattern " + pattern + " matched file " + name);
|
||||
libsList.add(name.substring(3, name.length() - 3));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected static ArrayList<String> getLibraries(File libsDir) {
|
||||
ArrayList<String> libsList = new ArrayList<String>();
|
||||
addLibraryIfExists(libsList, "crystax", libsDir);
|
||||
addLibraryIfExists(libsList, "sqlite3", libsDir);
|
||||
addLibraryIfExists(libsList, "ffi", libsDir);
|
||||
addLibraryIfExists(libsList, "ssl.*", libsDir);
|
||||
addLibraryIfExists(libsList, "crypto.*", libsDir);
|
||||
libsList.add("python2.7");
|
||||
libsList.add("python3.5m");
|
||||
libsList.add("python3.6m");
|
||||
libsList.add("python3.7m");
|
||||
libsList.add("main");
|
||||
return libsList;
|
||||
}
|
||||
|
||||
public static void loadLibraries(File filesDir, File libsDir) {
|
||||
String filesDirPath = filesDir.getAbsolutePath();
|
||||
boolean foundPython = false;
|
||||
|
||||
for (String lib : getLibraries(libsDir)) {
|
||||
Log.v(TAG, "Loading library: " + lib);
|
||||
try {
|
||||
System.loadLibrary(lib);
|
||||
if (lib.startsWith("python")) {
|
||||
foundPython = true;
|
||||
}
|
||||
} catch(UnsatisfiedLinkError e) {
|
||||
// If this is the last possible libpython
|
||||
// load, and it has failed, give a more
|
||||
// general error
|
||||
Log.v(TAG, "Library loading error: " + e.getMessage());
|
||||
if (lib.startsWith("python3.7") && !foundPython) {
|
||||
throw new java.lang.RuntimeException("Could not load any libpythonXXX.so");
|
||||
} else if (lib.startsWith("python")) {
|
||||
continue;
|
||||
} else {
|
||||
Log.v(TAG, "An UnsatisfiedLinkError occurred loading " + lib);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Log.v(TAG, "Loaded everything!");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,115 @@
|
|||
// This string is autogenerated by ChangeAppSettings.sh, do not change
|
||||
// spaces amount
|
||||
package org.renpy.android;
|
||||
|
||||
import java.io.*;
|
||||
|
||||
import android.app.Activity;
|
||||
import android.util.Log;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.BufferedOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.File;
|
||||
|
||||
import java.util.zip.GZIPInputStream;
|
||||
|
||||
import android.content.res.AssetManager;
|
||||
|
||||
import org.kamranzafar.jtar.*;
|
||||
|
||||
public class AssetExtract {
|
||||
|
||||
private AssetManager mAssetManager = null;
|
||||
private Activity mActivity = null;
|
||||
|
||||
public AssetExtract(Activity act) {
|
||||
mActivity = act;
|
||||
mAssetManager = act.getAssets();
|
||||
}
|
||||
|
||||
public boolean extractTar(String asset, String target) {
|
||||
|
||||
byte buf[] = new byte[1024 * 1024];
|
||||
|
||||
InputStream assetStream = null;
|
||||
TarInputStream tis = null;
|
||||
|
||||
try {
|
||||
assetStream = mAssetManager.open(asset, AssetManager.ACCESS_STREAMING);
|
||||
tis = new TarInputStream(new BufferedInputStream(new GZIPInputStream(new BufferedInputStream(assetStream, 8192)), 8192));
|
||||
} catch (IOException e) {
|
||||
Log.e("python", "opening up extract tar", e);
|
||||
return false;
|
||||
}
|
||||
|
||||
while (true) {
|
||||
TarEntry entry = null;
|
||||
|
||||
try {
|
||||
entry = tis.getNextEntry();
|
||||
} catch ( java.io.IOException e ) {
|
||||
Log.e("python", "extracting tar", e);
|
||||
return false;
|
||||
}
|
||||
|
||||
if ( entry == null ) {
|
||||
break;
|
||||
}
|
||||
|
||||
Log.v("python", "extracting " + entry.getName());
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
|
||||
try {
|
||||
new File(target +"/" + entry.getName()).mkdirs();
|
||||
} catch ( SecurityException e ) { };
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
OutputStream out = null;
|
||||
String path = target + "/" + entry.getName();
|
||||
|
||||
try {
|
||||
out = new BufferedOutputStream(new FileOutputStream(path), 8192);
|
||||
} catch ( FileNotFoundException e ) {
|
||||
} catch ( SecurityException e ) { };
|
||||
|
||||
if ( out == null ) {
|
||||
Log.e("python", "could not open " + path);
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
int len = tis.read(buf);
|
||||
|
||||
if (len == -1) {
|
||||
break;
|
||||
}
|
||||
|
||||
out.write(buf, 0, len);
|
||||
}
|
||||
|
||||
out.flush();
|
||||
out.close();
|
||||
} catch ( java.io.IOException e ) {
|
||||
Log.e("python", "extracting zip", e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
tis.close();
|
||||
assetStream.close();
|
||||
} catch (IOException e) {
|
||||
// pass
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,54 @@
|
|||
/**
|
||||
* This class takes care of managing resources for us. In our code, we
|
||||
* can't use R, since the name of the package containing R will
|
||||
* change. (This same code is used in both org.renpy.android and
|
||||
* org.renpy.pygame.) So this is the next best thing.
|
||||
*/
|
||||
|
||||
package org.renpy.android;
|
||||
|
||||
import android.app.Activity;
|
||||
import android.content.res.Resources;
|
||||
import android.view.View;
|
||||
|
||||
import android.util.Log;
|
||||
|
||||
public class ResourceManager {
|
||||
|
||||
private Activity act;
|
||||
private Resources res;
|
||||
|
||||
public ResourceManager(Activity activity) {
|
||||
act = activity;
|
||||
res = act.getResources();
|
||||
}
|
||||
|
||||
public int getIdentifier(String name, String kind) {
|
||||
Log.v("SDL", "getting identifier");
|
||||
Log.v("SDL", "kind is " + kind + " and name " + name);
|
||||
Log.v("SDL", "result is " + res.getIdentifier(name, kind, act.getPackageName()));
|
||||
return res.getIdentifier(name, kind, act.getPackageName());
|
||||
}
|
||||
|
||||
public String getString(String name) {
|
||||
|
||||
try {
|
||||
Log.v("SDL", "asked to get string " + name);
|
||||
return res.getString(getIdentifier(name, "string"));
|
||||
} catch (Exception e) {
|
||||
Log.v("SDL", "got exception looking for string!");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public View inflateView(String name) {
|
||||
int id = getIdentifier(name, "layout");
|
||||
return act.getLayoutInflater().inflate(id, null);
|
||||
}
|
||||
|
||||
public View getViewById(View v, String name) {
|
||||
int id = getIdentifier(name, "id");
|
||||
return v.findViewById(id);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,77 @@
|
|||
package {{ args.package }};
|
||||
|
||||
import android.os.Build;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import android.content.Intent;
|
||||
import android.content.Context;
|
||||
import android.app.Notification;
|
||||
import android.app.PendingIntent;
|
||||
import android.os.Bundle;
|
||||
import org.kivy.android.PythonService;
|
||||
import org.kivy.android.PythonActivity;
|
||||
|
||||
|
||||
public class Service{{ name|capitalize }} extends PythonService {
|
||||
{% if sticky %}
|
||||
@Override
|
||||
public int startType() {
|
||||
return START_STICKY;
|
||||
}
|
||||
{% endif %}
|
||||
|
||||
{% if not foreground %}
|
||||
@Override
|
||||
public boolean canDisplayNotification() {
|
||||
return false;
|
||||
}
|
||||
{% endif %}
|
||||
|
||||
@Override
|
||||
protected void doStartForeground(Bundle extras) {
|
||||
Notification notification;
|
||||
Context context = getApplicationContext();
|
||||
Intent contextIntent = new Intent(context, PythonActivity.class);
|
||||
PendingIntent pIntent = PendingIntent.getActivity(context, 0, contextIntent,
|
||||
PendingIntent.FLAG_UPDATE_CURRENT);
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.HONEYCOMB) {
|
||||
notification = new Notification(
|
||||
context.getApplicationInfo().icon, "{{ args.name }}", System.currentTimeMillis());
|
||||
try {
|
||||
// prevent using NotificationCompat, this saves 100kb on apk
|
||||
Method func = notification.getClass().getMethod(
|
||||
"setLatestEventInfo", Context.class, CharSequence.class,
|
||||
CharSequence.class, PendingIntent.class);
|
||||
func.invoke(notification, context, "{{ args.name }}", "{{ name| capitalize }}", pIntent);
|
||||
} catch (NoSuchMethodException | IllegalAccessException |
|
||||
IllegalArgumentException | InvocationTargetException e) {
|
||||
}
|
||||
} else {
|
||||
Notification.Builder builder = new Notification.Builder(context);
|
||||
builder.setContentTitle("{{ args.name }}");
|
||||
builder.setContentText("{{ name| capitalize }}");
|
||||
builder.setContentIntent(pIntent);
|
||||
builder.setSmallIcon(context.getApplicationInfo().icon);
|
||||
notification = builder.build();
|
||||
}
|
||||
startForeground({{ service_id }}, notification);
|
||||
}
|
||||
|
||||
static public void start(Context ctx, String pythonServiceArgument) {
|
||||
Intent intent = new Intent(ctx, Service{{ name|capitalize }}.class);
|
||||
String argument = ctx.getFilesDir().getAbsolutePath() + "/app";
|
||||
intent.putExtra("androidPrivate", ctx.getFilesDir().getAbsolutePath());
|
||||
intent.putExtra("androidArgument", argument);
|
||||
intent.putExtra("serviceEntrypoint", "{{ entrypoint }}");
|
||||
intent.putExtra("pythonName", "{{ name }}");
|
||||
intent.putExtra("pythonHome", argument);
|
||||
intent.putExtra("pythonPath", argument + ":" + argument + "/lib");
|
||||
intent.putExtra("pythonServiceArgument", pythonServiceArgument);
|
||||
ctx.startService(intent);
|
||||
}
|
||||
|
||||
static public void stop(Context ctx) {
|
||||
Intent intent = new Intent(ctx, Service{{ name|capitalize }}.class);
|
||||
ctx.stopService(intent);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,21 @@
|
|||
# This file is used to override default values used by the Ant build system.
|
||||
#
|
||||
# This file must be checked in Version Control Systems, as it is
|
||||
# integral to the build system of your project.
|
||||
|
||||
# This file is only used by the Ant script.
|
||||
|
||||
# You can use this to override default values such as
|
||||
# 'source.dir' for the location of your java source folder and
|
||||
# 'out.dir' for the location of your output folder.
|
||||
|
||||
# You can also use it define how the release builds are signed by declaring
|
||||
# the following properties:
|
||||
# 'key.store' for the location of your keystore and
|
||||
# 'key.alias' for the name of the key to use.
|
||||
# The password will be asked during the build when you use the 'release' target.
|
||||
|
||||
key.store=${env.P4A_RELEASE_KEYSTORE}
|
||||
key.alias=${env.P4A_RELEASE_KEYALIAS}
|
||||
key.store.password=${env.P4A_RELEASE_KEYSTORE_PASSWD}
|
||||
key.alias.password=${env.P4A_RELEASE_KEYALIAS_PASSWD}
|
|
@ -0,0 +1,80 @@
|
|||
// Top-level build file where you can add configuration options common to all sub-projects/modules.
|
||||
buildscript {
|
||||
repositories {
|
||||
google()
|
||||
jcenter()
|
||||
}
|
||||
dependencies {
|
||||
classpath 'com.android.tools.build:gradle:3.1.4'
|
||||
}
|
||||
}
|
||||
|
||||
allprojects {
|
||||
repositories {
|
||||
google()
|
||||
jcenter()
|
||||
flatDir {
|
||||
dirs 'libs'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
apply plugin: 'com.android.application'
|
||||
|
||||
android {
|
||||
compileSdkVersion {{ android_api }}
|
||||
buildToolsVersion '{{ build_tools_version }}'
|
||||
defaultConfig {
|
||||
minSdkVersion {{ args.min_sdk_version }}
|
||||
targetSdkVersion {{ android_api }}
|
||||
versionCode {{ args.numeric_version }}
|
||||
versionName '{{ args.version }}'
|
||||
}
|
||||
|
||||
{% if args.sign -%}
|
||||
signingConfigs {
|
||||
release {
|
||||
storeFile file(System.getenv("P4A_RELEASE_KEYSTORE"))
|
||||
keyAlias System.getenv("P4A_RELEASE_KEYALIAS")
|
||||
storePassword System.getenv("P4A_RELEASE_KEYSTORE_PASSWD")
|
||||
keyPassword System.getenv("P4A_RELEASE_KEYALIAS_PASSWD")
|
||||
}
|
||||
}
|
||||
{%- endif %}
|
||||
|
||||
buildTypes {
|
||||
debug {
|
||||
}
|
||||
release {
|
||||
{% if args.sign -%}
|
||||
signingConfig signingConfigs.release
|
||||
{%- endif %}
|
||||
}
|
||||
}
|
||||
|
||||
compileOptions {
|
||||
sourceCompatibility JavaVersion.VERSION_1_7
|
||||
targetCompatibility JavaVersion.VERSION_1_7
|
||||
}
|
||||
|
||||
sourceSets {
|
||||
main {
|
||||
jniLibs.srcDir 'libs'
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
dependencies {
|
||||
{%- for aar in aars %}
|
||||
compile(name: '{{ aar }}', ext: 'aar')
|
||||
{%- endfor -%}
|
||||
{%- for jar in jars %}
|
||||
compile files('src/main/libs/{{ jar }}')
|
||||
{%- endfor -%}
|
||||
{%- if args.depends -%}
|
||||
{%- for depend in args.depends %}
|
||||
compile '{{ depend }}'
|
||||
{%- endfor %}
|
||||
{%- endif %}
|
||||
}
|
|
@ -0,0 +1,95 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!-- This should be changed to the name of your project -->
|
||||
<project name="{{ versioned_name }}" default="help">
|
||||
|
||||
<!-- The local.properties file is created and updated by the 'android' tool.
|
||||
It contains the path to the SDK. It should *NOT* be checked into
|
||||
Version Control Systems. -->
|
||||
<property file="local.properties" />
|
||||
|
||||
<!-- The ant.properties file can be created by you. It is only edited by the
|
||||
'android' tool to add properties to it.
|
||||
This is the place to change some Ant specific build properties.
|
||||
Here are some properties you may want to change/update:
|
||||
|
||||
source.dir
|
||||
The name of the source directory. Default is 'src'.
|
||||
out.dir
|
||||
The name of the output directory. Default is 'bin'.
|
||||
|
||||
For other overridable properties, look at the beginning of the rules
|
||||
files in the SDK, at tools/ant/build.xml
|
||||
|
||||
Properties related to the SDK location or the project target should
|
||||
be updated using the 'android' tool with the 'update' action.
|
||||
|
||||
This file is an integral part of the build system for your
|
||||
application and should be checked into Version Control Systems.
|
||||
|
||||
-->
|
||||
<property file="ant.properties" />
|
||||
|
||||
<!-- if sdk.dir was not set from one of the property file, then
|
||||
get it from the ANDROID_HOME env var.
|
||||
This must be done before we load project.properties since
|
||||
the proguard config can use sdk.dir -->
|
||||
<property environment="env" />
|
||||
<condition property="sdk.dir" value="${env.ANDROID_HOME}">
|
||||
<isset property="env.ANDROID_HOME" />
|
||||
</condition>
|
||||
|
||||
<property file="build.properties" />
|
||||
|
||||
<!-- The project.properties file is created and updated by the 'android'
|
||||
tool, as well as ADT.
|
||||
|
||||
This contains project specific properties such as project target, and library
|
||||
dependencies. Lower level build properties are stored in ant.properties
|
||||
(or in .classpath for Eclipse projects).
|
||||
|
||||
This file is an integral part of the build system for your
|
||||
application and should be checked into Version Control Systems. -->
|
||||
<loadproperties srcFile="project.properties" />
|
||||
|
||||
<!-- quick check on sdk.dir -->
|
||||
<fail
|
||||
message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
|
||||
unless="sdk.dir"
|
||||
/>
|
||||
|
||||
<!--
|
||||
Import per project custom build rules if present at the root of the project.
|
||||
This is the place to put custom intermediary targets such as:
|
||||
-pre-build
|
||||
-pre-compile
|
||||
-post-compile (This is typically used for code obfuscation.
|
||||
Compiled code location: ${out.classes.absolute.dir}
|
||||
If this is not done in place, override ${out.dex.input.absolute.dir})
|
||||
-post-package
|
||||
-post-build
|
||||
-pre-clean
|
||||
-->
|
||||
<import file="custom_rules.xml" optional="true" />
|
||||
|
||||
<!-- Import the actual build file.
|
||||
|
||||
To customize existing targets, there are two options:
|
||||
- Customize only one target:
|
||||
- copy/paste the target into this file, *before* the
|
||||
<import> task.
|
||||
- customize it to your needs.
|
||||
- Customize the whole content of build.xml
|
||||
- copy/paste the content of the rules files (minus the top node)
|
||||
into this file, replacing the <import> task.
|
||||
- customize to your needs.
|
||||
|
||||
***********************
|
||||
****** IMPORTANT ******
|
||||
***********************
|
||||
In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
|
||||
in order to avoid having your file be overridden by tools such as "android update project"
|
||||
-->
|
||||
<!-- version-tag: 1 -->
|
||||
<import file="${sdk.dir}/tools/ant/build.xml" />
|
||||
|
||||
</project>
|
|
@ -0,0 +1,21 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project name="CustomRules">
|
||||
<target name="-pre-build">
|
||||
<copy todir="tmp-src">
|
||||
{% if args.launcher %}
|
||||
<fileset dir="src/main/java" includes="**" />
|
||||
{% else %}
|
||||
<fileset dir="src/main/java">
|
||||
<exclude name="org/kivy/android/ProjectAdapter.java" />
|
||||
<exclude name="org/kivy/android/ProjectChooser.java" />
|
||||
</fileset>
|
||||
{% endif %}
|
||||
{% for dir, includes in args.extra_source_dirs %}
|
||||
<fileset dir="{{ dir }}" includes="{{ includes }}" />
|
||||
{% endfor %}
|
||||
</copy>
|
||||
</target>
|
||||
<target name="-post-build">
|
||||
<delete dir="tmp-src" />
|
||||
</target>
|
||||
</project>
|
Binary file not shown.
After Width: | Height: | Size: 3.2 KiB |
Binary file not shown.
After Width: | Height: | Size: 11 KiB |
|
@ -0,0 +1 @@
|
|||
# put files here that you need to un-blacklist
|
|
@ -7,13 +7,13 @@ LOCAL_MODULE := main
|
|||
# Add your application source files here...
|
||||
LOCAL_SRC_FILES := start.c pyjniusjni.c
|
||||
|
||||
LOCAL_CFLAGS += -I$(LOCAL_PATH)/../../../../other_builds/$(PYTHON2_NAME)/$(ARCH)/python2/python-install/include/python2.7 $(EXTRA_CFLAGS)
|
||||
LOCAL_CFLAGS += -I$(PYTHON_INCLUDE_ROOT) $(EXTRA_CFLAGS)
|
||||
|
||||
LOCAL_SHARED_LIBRARIES := python_shared
|
||||
|
||||
LOCAL_LDLIBS := -llog $(EXTRA_LDLIBS)
|
||||
|
||||
LOCAL_LDFLAGS += -L$(LOCAL_PATH)/../../../../other_builds/$(PYTHON2_NAME)/$(ARCH)/python2/python-install/lib $(APPLICATION_ADDITIONAL_LDFLAGS)
|
||||
LOCAL_LDFLAGS += -L$(PYTHON_LINK_ROOT) $(APPLICATION_ADDITIONAL_LDFLAGS)
|
||||
|
||||
include $(BUILD_SHARED_LIBRARY)
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
|
||||
#define BOOTSTRAP_NAME_SERVICEONLY
|
||||
#define BOOTSTRAP_USES_NO_SDL_HEADERS
|
||||
|
||||
const char bootstrap_name[] = "service_only";
|
||||
|
|
@ -41,12 +41,6 @@ android {
|
|||
}
|
||||
}
|
||||
|
||||
applicationVariants.all { variant ->
|
||||
variant.outputs.all {
|
||||
outputFileName = "../" + outputFileName
|
||||
}
|
||||
}
|
||||
|
||||
dexOptions {
|
||||
jumboMode true
|
||||
}
|
||||
|
|
|
@ -2,21 +2,25 @@ from __future__ import print_function
|
|||
|
||||
from os.path import (join, realpath, dirname, expanduser, exists,
|
||||
split, isdir)
|
||||
from os import environ, listdir
|
||||
from os import environ
|
||||
import copy
|
||||
import os
|
||||
import glob
|
||||
import sys
|
||||
import re
|
||||
import sh
|
||||
import subprocess
|
||||
|
||||
from pythonforandroid.util import (ensure_dir, current_directory)
|
||||
from pythonforandroid.logger import (info, warning, error, info_notify,
|
||||
Err_Fore, Err_Style, info_main,
|
||||
shprint)
|
||||
from pythonforandroid.archs import ArchARM, ArchARMv7_a, Archx86, Archx86_64, ArchAarch_64
|
||||
from pythonforandroid.recipe import Recipe
|
||||
|
||||
DEFAULT_ANDROID_API = 15
|
||||
from pythonforandroid.util import (
|
||||
current_directory, ensure_dir, get_virtualenv_executable,
|
||||
BuildInterruptingException
|
||||
)
|
||||
from pythonforandroid.logger import (info, warning, info_notify, info_main, shprint)
|
||||
from pythonforandroid.archs import ArchARM, ArchARMv7_a, ArchAarch_64, Archx86, Archx86_64
|
||||
from pythonforandroid.recipe import CythonRecipe, Recipe
|
||||
from pythonforandroid.recommendations import (
|
||||
check_ndk_version, check_target_api, check_ndk_api,
|
||||
RECOMMENDED_NDK_API, RECOMMENDED_TARGET_API)
|
||||
|
||||
|
||||
class Context(object):
|
||||
|
@ -24,14 +28,19 @@ class Context(object):
|
|||
will be instantiated and used to hold all the build state.'''
|
||||
|
||||
env = environ.copy()
|
||||
root_dir = None # the filepath of toolchain.py
|
||||
storage_dir = None # the root dir where builds and dists will be stored
|
||||
# the filepath of toolchain.py
|
||||
root_dir = None
|
||||
# the root dir where builds and dists will be stored
|
||||
storage_dir = None
|
||||
|
||||
build_dir = None # in which bootstraps are copied for building
|
||||
# and recipes are built
|
||||
dist_dir = None # the Android project folder where everything ends up
|
||||
libs_dir = None # where Android libs are cached after build but
|
||||
# before being placed in dists
|
||||
# in which bootstraps are copied for building
|
||||
# and recipes are built
|
||||
build_dir = None
|
||||
# the Android project folder where everything ends up
|
||||
dist_dir = None
|
||||
# where Android libs are cached after build
|
||||
# but before being placed in dists
|
||||
libs_dir = None
|
||||
aars_dir = None
|
||||
|
||||
ccache = None # whether to use ccache
|
||||
|
@ -45,7 +54,7 @@ class Context(object):
|
|||
|
||||
recipe_build_order = None # Will hold the list of all built recipes
|
||||
|
||||
symlink_java_src = False # If True, will symlink instead of copying during build
|
||||
symlink_java_src = False # If True, will symlink instead of copying during build
|
||||
|
||||
java_build_tool = 'auto'
|
||||
|
||||
|
@ -121,17 +130,17 @@ class Context(object):
|
|||
self._android_api = value
|
||||
|
||||
@property
|
||||
def ndk_ver(self):
|
||||
'''The version of the NDK being used for compilation.'''
|
||||
if self._ndk_ver is None:
|
||||
raise ValueError('Tried to access ndk_ver but it has not '
|
||||
def ndk_api(self):
|
||||
'''The API number compile against'''
|
||||
if self._ndk_api is None:
|
||||
raise ValueError('Tried to access ndk_api but it has not '
|
||||
'been set - this should not happen, something '
|
||||
'went wrong!')
|
||||
return self._ndk_ver
|
||||
return self._ndk_api
|
||||
|
||||
@ndk_ver.setter
|
||||
def ndk_ver(self, value):
|
||||
self._ndk_ver = value
|
||||
@ndk_api.setter
|
||||
def ndk_api(self, value):
|
||||
self._ndk_api = value
|
||||
|
||||
@property
|
||||
def sdk_dir(self):
|
||||
|
@ -159,9 +168,11 @@ class Context(object):
|
|||
def ndk_dir(self, value):
|
||||
self._ndk_dir = value
|
||||
|
||||
def prepare_build_environment(self, user_sdk_dir, user_ndk_dir,
|
||||
user_android_api, user_android_min_api,
|
||||
user_ndk_ver):
|
||||
def prepare_build_environment(self,
|
||||
user_sdk_dir,
|
||||
user_ndk_dir,
|
||||
user_android_api,
|
||||
user_ndk_api):
|
||||
'''Checks that build dependencies exist and sets internal variables
|
||||
for the Android SDK etc.
|
||||
|
||||
|
@ -180,12 +191,14 @@ class Context(object):
|
|||
sdk_dir = None
|
||||
if user_sdk_dir:
|
||||
sdk_dir = user_sdk_dir
|
||||
if sdk_dir is None: # This is the old P4A-specific var
|
||||
# This is the old P4A-specific var
|
||||
if sdk_dir is None:
|
||||
sdk_dir = environ.get('ANDROIDSDK', None)
|
||||
if sdk_dir is None: # This seems used more conventionally
|
||||
# This seems used more conventionally
|
||||
if sdk_dir is None:
|
||||
sdk_dir = environ.get('ANDROID_HOME', None)
|
||||
if sdk_dir is None: # Checks in the buildozer SDK dir, useful
|
||||
# for debug tests of p4a
|
||||
# Checks in the buildozer SDK dir, useful for debug tests of p4a
|
||||
if sdk_dir is None:
|
||||
possible_dirs = glob.glob(expanduser(join(
|
||||
'~', '.buildozer', 'android', 'platform', 'android-sdk-*')))
|
||||
possible_dirs = [d for d in possible_dirs if not
|
||||
|
@ -199,57 +212,25 @@ class Context(object):
|
|||
'maintain your own SDK download.')
|
||||
sdk_dir = possible_dirs[0]
|
||||
if sdk_dir is None:
|
||||
warning('Android SDK dir was not specified, exiting.')
|
||||
exit(1)
|
||||
raise BuildInterruptingException('Android SDK dir was not specified, exiting.')
|
||||
self.sdk_dir = realpath(sdk_dir)
|
||||
|
||||
# Check what Android API we're using
|
||||
android_api = None
|
||||
if user_android_api:
|
||||
android_api = user_android_api
|
||||
if android_api is not None:
|
||||
info('Getting Android API version from user argument')
|
||||
if android_api is None:
|
||||
android_api = environ.get('ANDROIDAPI', None)
|
||||
if android_api is not None:
|
||||
info('Found Android API target in $ANDROIDAPI')
|
||||
if android_api is None:
|
||||
info('Getting Android API version from user argument: {}'.format(android_api))
|
||||
elif 'ANDROIDAPI' in environ:
|
||||
android_api = environ['ANDROIDAPI']
|
||||
info('Found Android API target in $ANDROIDAPI: {}'.format(android_api))
|
||||
else:
|
||||
info('Android API target was not set manually, using '
|
||||
'the default of {}'.format(DEFAULT_ANDROID_API))
|
||||
android_api = DEFAULT_ANDROID_API
|
||||
'the default of {}'.format(RECOMMENDED_TARGET_API))
|
||||
android_api = RECOMMENDED_TARGET_API
|
||||
android_api = int(android_api)
|
||||
self.android_api = android_api
|
||||
|
||||
if self.android_api >= 21 and self.archs[0].arch == 'armeabi':
|
||||
error('Asked to build for armeabi architecture with API '
|
||||
'{}, but API 21 or greater does not support armeabi'.format(
|
||||
self.android_api))
|
||||
error('You probably want to build with --arch=armeabi-v7a instead')
|
||||
exit(1)
|
||||
|
||||
# try to determinate min_api
|
||||
android_min_api = None
|
||||
if user_android_min_api:
|
||||
android_min_api = user_android_min_api
|
||||
if android_min_api is not None:
|
||||
info('Getting Minimum Android API version from user argument')
|
||||
if android_min_api is None:
|
||||
android_min_api = environ.get("ANDROIDMINAPI", None)
|
||||
if android_min_api is not None:
|
||||
info('Found Android minimum api in $ANDROIDMINAPI')
|
||||
if android_min_api is None:
|
||||
info('Minimum Android API was not set, using current Android API '
|
||||
'{}'.format(android_api))
|
||||
android_min_api = android_api
|
||||
android_min_api = int(android_min_api)
|
||||
self.android_min_api = android_min_api
|
||||
|
||||
info("Requested API {} (minimum {})".format(
|
||||
self.android_api, self.android_min_api))
|
||||
|
||||
if self.android_min_api > android_api:
|
||||
error('Android minimum api cannot be higher than Android api')
|
||||
exit(1)
|
||||
check_target_api(android_api, self.archs[0].arch)
|
||||
|
||||
if exists(join(sdk_dir, 'tools', 'bin', 'avdmanager')):
|
||||
avdmanager = sh.Command(join(sdk_dir, 'tools', 'bin', 'avdmanager'))
|
||||
|
@ -258,9 +239,9 @@ class Context(object):
|
|||
android = sh.Command(join(sdk_dir, 'tools', 'android'))
|
||||
targets = android('list').stdout.decode('utf-8').split('\n')
|
||||
else:
|
||||
error('Could not find `android` or `sdkmanager` binaries in '
|
||||
'Android SDK. Exiting.')
|
||||
exit(1)
|
||||
raise BuildInterruptingException(
|
||||
'Could not find `android` or `sdkmanager` binaries in Android SDK',
|
||||
instructions='Make sure the path to the Android SDK is correct')
|
||||
apis = [s for s in targets if re.match(r'^ *API level: ', s)]
|
||||
apis = [re.findall(r'[0-9]+', s) for s in apis]
|
||||
apis = [int(s[0]) for s in apis if s]
|
||||
|
@ -270,30 +251,28 @@ class Context(object):
|
|||
info(('Requested API target {} is available, '
|
||||
'continuing.').format(android_api))
|
||||
else:
|
||||
warning(('Requested API target {} is not available, install '
|
||||
'it with the SDK android tool.').format(android_api))
|
||||
warning('Exiting.')
|
||||
exit(1)
|
||||
raise BuildInterruptingException(
|
||||
('Requested API target {} is not available, install '
|
||||
'it with the SDK android tool.').format(android_api))
|
||||
|
||||
# Find the Android NDK
|
||||
# Could also use ANDROID_NDK, but doesn't look like many tools use this
|
||||
ndk_dir = None
|
||||
if user_ndk_dir:
|
||||
ndk_dir = user_ndk_dir
|
||||
if ndk_dir is not None:
|
||||
info('Getting NDK dir from from user argument')
|
||||
info('Getting NDK dir from from user argument')
|
||||
if ndk_dir is None: # The old P4A-specific dir
|
||||
ndk_dir = environ.get('ANDROIDNDK', None)
|
||||
if ndk_dir is not None:
|
||||
info('Found NDK dir in $ANDROIDNDK')
|
||||
info('Found NDK dir in $ANDROIDNDK: {}'.format(ndk_dir))
|
||||
if ndk_dir is None: # Apparently the most common convention
|
||||
ndk_dir = environ.get('NDK_HOME', None)
|
||||
if ndk_dir is not None:
|
||||
info('Found NDK dir in $NDK_HOME')
|
||||
info('Found NDK dir in $NDK_HOME: {}'.format(ndk_dir))
|
||||
if ndk_dir is None: # Another convention (with maven?)
|
||||
ndk_dir = environ.get('ANDROID_NDK_HOME', None)
|
||||
if ndk_dir is not None:
|
||||
info('Found NDK dir in $ANDROID_NDK_HOME')
|
||||
info('Found NDK dir in $ANDROID_NDK_HOME: {}'.format(ndk_dir))
|
||||
if ndk_dir is None: # Checks in the buildozer NDK dir, useful
|
||||
# # for debug tests of p4a
|
||||
possible_dirs = glob.glob(expanduser(join(
|
||||
|
@ -307,62 +286,31 @@ class Context(object):
|
|||
'maintain your own NDK download.')
|
||||
ndk_dir = possible_dirs[0]
|
||||
if ndk_dir is None:
|
||||
warning('Android NDK dir was not specified, exiting.')
|
||||
exit(1)
|
||||
raise BuildInterruptingException('Android NDK dir was not specified')
|
||||
self.ndk_dir = realpath(ndk_dir)
|
||||
|
||||
# Find the NDK version, and check it against what the NDK dir
|
||||
# seems to report
|
||||
ndk_ver = None
|
||||
if user_ndk_ver:
|
||||
ndk_ver = user_ndk_ver
|
||||
if ndk_dir is not None:
|
||||
info('Got NDK version from from user argument')
|
||||
if ndk_ver is None:
|
||||
ndk_ver = environ.get('ANDROIDNDKVER', None)
|
||||
if ndk_dir is not None:
|
||||
info('Got NDK version from $ANDROIDNDKVER')
|
||||
check_ndk_version(ndk_dir)
|
||||
|
||||
self.ndk = 'google'
|
||||
self.ndk = 'crystax' # force crystax detection
|
||||
|
||||
try:
|
||||
with open(join(ndk_dir, 'RELEASE.TXT')) as fileh:
|
||||
reported_ndk_ver = fileh.read().split(' ')[0].strip()
|
||||
except IOError:
|
||||
pass
|
||||
ndk_api = None
|
||||
if user_ndk_api:
|
||||
ndk_api = user_ndk_api
|
||||
info('Getting NDK API version (i.e. minimum supported API) from user argument')
|
||||
elif 'NDKAPI' in environ:
|
||||
ndk_api = environ.get('NDKAPI', None)
|
||||
info('Found Android API target in $NDKAPI')
|
||||
else:
|
||||
if reported_ndk_ver.startswith('crystax-ndk-'):
|
||||
reported_ndk_ver = reported_ndk_ver[12:]
|
||||
self.ndk = 'crystax'
|
||||
if ndk_ver is None:
|
||||
ndk_ver = reported_ndk_ver
|
||||
info(('Got Android NDK version from the NDK dir: '
|
||||
'it is {}').format(ndk_ver))
|
||||
else:
|
||||
if ndk_ver != reported_ndk_ver:
|
||||
warning('NDK version was set as {}, but checking '
|
||||
'the NDK dir claims it is {}.'.format(
|
||||
ndk_ver, reported_ndk_ver))
|
||||
warning('The build will try to continue, but it may '
|
||||
'fail and you should check '
|
||||
'that your setting is correct.')
|
||||
warning('If the NDK dir result is correct, you don\'t '
|
||||
'need to manually set the NDK ver.')
|
||||
if ndk_ver is None:
|
||||
warning('Android NDK version could not be found. This probably'
|
||||
'won\'t cause any problems, but if necessary you can'
|
||||
'set it with `--ndk-version=...`.')
|
||||
self.ndk_ver = ndk_ver
|
||||
ndk_api = min(self.android_api, RECOMMENDED_NDK_API)
|
||||
warning('NDK API target was not set manually, using '
|
||||
'the default of {} = min(android-api={}, default ndk-api={})'.format(
|
||||
ndk_api, self.android_api, RECOMMENDED_NDK_API))
|
||||
ndk_api = int(ndk_api)
|
||||
self.ndk_api = ndk_api
|
||||
|
||||
info('Using {} NDK {}'.format(self.ndk.capitalize(), self.ndk_ver))
|
||||
check_ndk_api(ndk_api, self.android_api)
|
||||
|
||||
virtualenv = None
|
||||
if virtualenv is None:
|
||||
virtualenv = sh.which('virtualenv2')
|
||||
if virtualenv is None:
|
||||
virtualenv = sh.which('virtualenv-2.7')
|
||||
if virtualenv is None:
|
||||
virtualenv = sh.which('virtualenv')
|
||||
virtualenv = get_virtualenv_executable()
|
||||
if virtualenv is None:
|
||||
raise IOError('Couldn\'t find a virtualenv executable, '
|
||||
'you must install this to use p4a.')
|
||||
|
@ -374,14 +322,13 @@ class Context(object):
|
|||
if not self.ccache:
|
||||
info('ccache is missing, the build will not be optimized in the '
|
||||
'future.')
|
||||
for cython_fn in ("cython2", "cython-2.7", "cython"):
|
||||
for cython_fn in ("cython", "cython3", "cython2", "cython-2.7"):
|
||||
cython = sh.which(cython_fn)
|
||||
if cython:
|
||||
self.cython = cython
|
||||
break
|
||||
else:
|
||||
error('No cython binary found. Exiting.')
|
||||
exit(1)
|
||||
raise BuildInterruptingException('No cython binary found.')
|
||||
if not self.cython:
|
||||
ok = False
|
||||
warning("Missing requirement: cython is not installed")
|
||||
|
@ -394,9 +341,8 @@ class Context(object):
|
|||
self.ndk_platform = join(
|
||||
self.ndk_dir,
|
||||
'platforms',
|
||||
'android-{}'.format(self.android_min_api),
|
||||
'android-{}'.format(self.ndk_api),
|
||||
platform_dir)
|
||||
|
||||
if not exists(self.ndk_platform):
|
||||
warning('ndk_platform doesn\'t exist: {}'.format(
|
||||
self.ndk_platform))
|
||||
|
@ -408,7 +354,7 @@ class Context(object):
|
|||
|
||||
toolchain_versions = []
|
||||
toolchain_path = join(self.ndk_dir, 'toolchains')
|
||||
if os.path.isdir(toolchain_path):
|
||||
if isdir(toolchain_path):
|
||||
toolchain_contents = glob.glob('{}/{}-*'.format(toolchain_path,
|
||||
toolchain_prefix))
|
||||
toolchain_versions = [split(path)[-1][len(toolchain_prefix) + 1:]
|
||||
|
@ -456,9 +402,8 @@ class Context(object):
|
|||
executable))
|
||||
|
||||
if not ok:
|
||||
error('{}python-for-android cannot continue; aborting{}'.format(
|
||||
Err_Fore.RED, Err_Fore.RESET))
|
||||
sys.exit(1)
|
||||
raise BuildInterruptingException(
|
||||
'python-for-android cannot continue due to the missing executables above')
|
||||
|
||||
def __init__(self):
|
||||
super(Context, self).__init__()
|
||||
|
@ -469,7 +414,7 @@ class Context(object):
|
|||
self._sdk_dir = None
|
||||
self._ndk_dir = None
|
||||
self._android_api = None
|
||||
self._ndk_ver = None
|
||||
self._ndk_api = None
|
||||
self.ndk = None
|
||||
|
||||
self.toolchain_prefix = None
|
||||
|
@ -483,6 +428,7 @@ class Context(object):
|
|||
ArchARM(self),
|
||||
ArchARMv7_a(self),
|
||||
Archx86(self),
|
||||
Archx86_64(self),
|
||||
ArchAarch_64(self),
|
||||
)
|
||||
|
||||
|
@ -504,8 +450,7 @@ class Context(object):
|
|||
new_archs.add(match)
|
||||
self.archs = list(new_archs)
|
||||
if not self.archs:
|
||||
warning('Asked to compile for no Archs, so failing.')
|
||||
exit(1)
|
||||
raise BuildInterruptingException('Asked to compile for no Archs, so failing.')
|
||||
info('Will compile for the following archs: {}'.format(
|
||||
', '.join([arch.arch for arch in self.archs])))
|
||||
|
||||
|
@ -523,14 +468,10 @@ class Context(object):
|
|||
'''Returns the location of site-packages in the python-install build
|
||||
dir.
|
||||
'''
|
||||
|
||||
# This needs to be replaced with something more general in
|
||||
# order to support multiple python versions and/or multiple
|
||||
# archs.
|
||||
if self.python_recipe.from_crystax:
|
||||
return self.get_python_install_dir()
|
||||
return join(self.get_python_install_dir(),
|
||||
'lib', 'python3.7', 'site-packages')
|
||||
if self.python_recipe.name == 'python2legacy':
|
||||
return join(self.get_python_install_dir(),
|
||||
'lib', 'python2.7', 'site-packages')
|
||||
return self.get_python_install_dir()
|
||||
|
||||
def get_libs_dir(self, arch):
|
||||
'''The libs dir for a given arch.'''
|
||||
|
@ -541,9 +482,33 @@ class Context(object):
|
|||
return exists(join(self.get_libs_dir(arch), lib))
|
||||
|
||||
def has_package(self, name, arch=None):
|
||||
# If this is a file path, it'll need special handling:
|
||||
if (name.find("/") >= 0 or name.find("\\") >= 0) and \
|
||||
name.find("://") < 0: # (:// would indicate an url)
|
||||
if not os.path.exists(name):
|
||||
# Non-existing dir, cannot look this up.
|
||||
return False
|
||||
if os.path.exists(os.path.join(name, "setup.py")):
|
||||
# Get name from setup.py:
|
||||
name = subprocess.check_output([
|
||||
sys.executable, "setup.py", "--name"],
|
||||
cwd=name)
|
||||
try:
|
||||
name = name.decode('utf-8', 'replace')
|
||||
except AttributeError:
|
||||
pass
|
||||
name = name.strip()
|
||||
if len(name) == 0:
|
||||
# Failed to look up any meaningful name.
|
||||
return False
|
||||
else:
|
||||
# A folder with whatever, cannot look this up.
|
||||
return False
|
||||
|
||||
# Try to look up recipe by name:
|
||||
try:
|
||||
recipe = Recipe.get_recipe(name, self)
|
||||
except IOError:
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
name = getattr(recipe, 'site_packages_name', None) or name
|
||||
|
@ -562,7 +527,6 @@ class Context(object):
|
|||
|
||||
def build_recipes(build_order, python_modules, ctx):
|
||||
# Put recipes in correct build order
|
||||
bs = ctx.bootstrap
|
||||
info_notify("Recipe build order is {}".format(build_order))
|
||||
if python_modules:
|
||||
python_modules = sorted(set(python_modules))
|
||||
|
@ -635,7 +599,13 @@ def run_pymodules_install(ctx, modules):
|
|||
|
||||
venv = sh.Command(ctx.virtualenv)
|
||||
with current_directory(join(ctx.build_dir)):
|
||||
shprint(venv, '--python=python3.7', 'venv')
|
||||
shprint(venv,
|
||||
'--python=python{}'.format(
|
||||
ctx.python_recipe.major_minor_version_string.
|
||||
partition(".")[0]
|
||||
),
|
||||
'venv'
|
||||
)
|
||||
|
||||
info('Creating a requirements.txt file for the Python modules')
|
||||
with open('requirements.txt', 'w') as fileh:
|
||||
|
@ -647,18 +617,63 @@ def run_pymodules_install(ctx, modules):
|
|||
line = '{}\n'.format(module)
|
||||
fileh.write(line)
|
||||
|
||||
info('Installing Python modules with pip')
|
||||
info('If this fails with a message about /bin/false, this '
|
||||
'probably means the package cannot be installed with '
|
||||
'pip as it needs a compilation recipe.')
|
||||
# Prepare base environment and upgrade pip:
|
||||
base_env = copy.copy(os.environ)
|
||||
base_env["PYTHONPATH"] = ctx.get_site_packages_dir()
|
||||
info('Upgrade pip to latest version')
|
||||
shprint(sh.bash, '-c', (
|
||||
"source venv/bin/activate && pip install -U pip"
|
||||
), _env=copy.copy(base_env))
|
||||
|
||||
# This bash method is what old-p4a used
|
||||
# It works but should be replaced with something better
|
||||
# Install Cython in case modules need it to build:
|
||||
info('Install Cython in case one of the modules needs it to build')
|
||||
shprint(sh.bash, '-c', (
|
||||
"venv/bin/pip install Cython"
|
||||
), _env=copy.copy(base_env))
|
||||
|
||||
# Get environment variables for build (with CC/compiler set):
|
||||
standard_recipe = CythonRecipe()
|
||||
standard_recipe.ctx = ctx
|
||||
# (note: following line enables explicit -lpython... linker options)
|
||||
standard_recipe.call_hostpython_via_targetpython = False
|
||||
recipe_env = standard_recipe.get_recipe_env(ctx.archs[0])
|
||||
env = copy.copy(base_env)
|
||||
env.update(recipe_env)
|
||||
|
||||
info('Installing Python modules with pip')
|
||||
info('IF THIS FAILS, THE MODULES MAY NEED A RECIPE. '
|
||||
'A reason for this is often modules compiling '
|
||||
'native code that is unaware of Android cross-compilation '
|
||||
'and does not work without additional '
|
||||
'changes / workarounds.')
|
||||
|
||||
# Make sure our build package dir is available, and the virtualenv
|
||||
# site packages come FIRST (so the proper pip version is used):
|
||||
env["PYTHONPATH"] += ":" + ctx.get_site_packages_dir()
|
||||
env["PYTHONPATH"] = os.path.abspath(join(
|
||||
ctx.build_dir, "venv", "lib",
|
||||
"python" + ctx.python_recipe.major_minor_version_string,
|
||||
"site-packages")) + ":" + env["PYTHONPATH"]
|
||||
|
||||
'''
|
||||
# Do actual install:
|
||||
shprint(sh.bash, '-c', (
|
||||
"venv/bin/pip " +
|
||||
"install -v --target '{0}' --no-deps -r requirements.txt"
|
||||
).format(ctx.get_site_packages_dir().replace("'", "'\"'\"'")),
|
||||
_env=copy.copy(env))
|
||||
'''
|
||||
|
||||
# use old install script
|
||||
shprint(sh.bash, '-c', (
|
||||
"source venv/bin/activate && env CC=/bin/false CXX=/bin/false "
|
||||
"PYTHONPATH={0} pip install --target '{0}' --no-deps -r requirements.txt"
|
||||
).format(ctx.get_site_packages_dir()))
|
||||
|
||||
# Strip object files after potential Cython or native code builds:
|
||||
standard_recipe.strip_object_files(ctx.archs[0], env,
|
||||
build_dir=ctx.build_dir)
|
||||
|
||||
|
||||
def biglink(ctx, arch):
|
||||
# First, collate object files from each recipe
|
||||
|
@ -869,8 +884,8 @@ def copylibs_function(soname, objs_paths, extra_link_dirs=[], env=None):
|
|||
if needso:
|
||||
lib = needso.group(1)
|
||||
if (lib not in needed_libs
|
||||
and lib not in found_libs
|
||||
and lib not in blacklist_libs):
|
||||
and lib not in found_libs
|
||||
and lib not in blacklist_libs):
|
||||
needed_libs.append(needso.group(1))
|
||||
|
||||
sofiles += found_sofiles
|
||||
|
|
|
@ -2,9 +2,9 @@ from os.path import exists, join
|
|||
import glob
|
||||
import json
|
||||
|
||||
from pythonforandroid.logger import (info, info_notify, warning,
|
||||
Err_Style, Err_Fore)
|
||||
from pythonforandroid.util import current_directory
|
||||
from pythonforandroid.logger import (info, info_notify, warning, Err_Style, Err_Fore)
|
||||
from pythonforandroid.util import current_directory, BuildInterruptingException
|
||||
from shutil import rmtree
|
||||
|
||||
|
||||
class Distribution(object):
|
||||
|
@ -21,6 +21,7 @@ class Distribution(object):
|
|||
needs_build = False # Whether the dist needs compiling
|
||||
url = None
|
||||
dist_dir = None # Where the dist dir ultimately is. Should not be None.
|
||||
ndk_api = None
|
||||
|
||||
archs = []
|
||||
'''The arch targets that the dist is built for.'''
|
||||
|
@ -42,9 +43,11 @@ class Distribution(object):
|
|||
|
||||
@classmethod
|
||||
def get_distribution(cls, ctx, name=None, recipes=[],
|
||||
ndk_api=None,
|
||||
force_build=False,
|
||||
extra_dist_dirs=[],
|
||||
require_perfect_match=False):
|
||||
require_perfect_match=False,
|
||||
allow_replace_dist=True):
|
||||
'''Takes information about the distribution, and decides what kind of
|
||||
distribution it will be.
|
||||
|
||||
|
@ -68,21 +71,31 @@ class Distribution(object):
|
|||
require_perfect_match : bool
|
||||
If True, will only match distributions with precisely the
|
||||
correct set of recipes.
|
||||
allow_replace_dist : bool
|
||||
If True, will allow an existing dist with the specified
|
||||
name but incompatible requirements to be overwritten by
|
||||
a new one with the current requirements.
|
||||
'''
|
||||
|
||||
existing_dists = Distribution.get_distributions(ctx)
|
||||
|
||||
needs_build = True # whether the dist needs building, will be returned
|
||||
|
||||
possible_dists = existing_dists
|
||||
|
||||
name_match_dist = None
|
||||
|
||||
# 0) Check if a dist with that name already exists
|
||||
if name is not None and name:
|
||||
possible_dists = [d for d in possible_dists if d.name == name]
|
||||
if possible_dists:
|
||||
name_match_dist = possible_dists[0]
|
||||
|
||||
# 1) Check if any existing dists meet the requirements
|
||||
_possible_dists = []
|
||||
for dist in possible_dists:
|
||||
if (
|
||||
ndk_api is not None and dist.ndk_api != ndk_api
|
||||
) or dist.ndk_api is None:
|
||||
continue
|
||||
for recipe in recipes:
|
||||
if recipe not in dist.recipes:
|
||||
break
|
||||
|
@ -97,10 +110,12 @@ class Distribution(object):
|
|||
else:
|
||||
info('No existing dists meet the given requirements!')
|
||||
|
||||
# If any dist has perfect recipes, return it
|
||||
# If any dist has perfect recipes and ndk API, return it
|
||||
for dist in possible_dists:
|
||||
if force_build:
|
||||
continue
|
||||
if ndk_api is not None and dist.ndk_api != ndk_api:
|
||||
continue
|
||||
if (set(dist.recipes) == set(recipes) or
|
||||
(set(recipes).issubset(set(dist.recipes)) and
|
||||
not require_perfect_match)):
|
||||
|
@ -110,33 +125,20 @@ class Distribution(object):
|
|||
|
||||
assert len(possible_dists) < 2
|
||||
|
||||
if not name and possible_dists:
|
||||
info('Asked for dist with name {} with recipes ({}), but a dist '
|
||||
'with this name already exists and has incompatible recipes '
|
||||
'({})'.format(name, ', '.join(recipes),
|
||||
', '.join(possible_dists[0].recipes)))
|
||||
info('No compatible dist found, so exiting.')
|
||||
exit(1)
|
||||
|
||||
# # 2) Check if any downloadable dists meet the requirements
|
||||
|
||||
# online_dists = [('testsdl2', ['hostpython2', 'sdl2_image',
|
||||
# 'sdl2_mixer', 'sdl2_ttf',
|
||||
# 'python2', 'sdl2',
|
||||
# 'pyjniussdl2', 'kivysdl2'],
|
||||
# 'https://github.com/inclement/sdl2-example-dist/archive/master.zip'),
|
||||
# ]
|
||||
# _possible_dists = []
|
||||
# for dist_name, dist_recipes, dist_url in online_dists:
|
||||
# for recipe in recipes:
|
||||
# if recipe not in dist_recipes:
|
||||
# break
|
||||
# else:
|
||||
# dist = Distribution(ctx)
|
||||
# dist.name = dist_name
|
||||
# dist.url = dist_url
|
||||
# _possible_dists.append(dist)
|
||||
# # if _possible_dists
|
||||
# If there was a name match but we didn't already choose it,
|
||||
# then the existing dist is incompatible with the requested
|
||||
# configuration and the build cannot continue
|
||||
if name_match_dist is not None and not allow_replace_dist:
|
||||
raise BuildInterruptingException(
|
||||
'Asked for dist with name {name} with recipes ({req_recipes}) and '
|
||||
'NDK API {req_ndk_api}, but a dist '
|
||||
'with this name already exists and has either incompatible recipes '
|
||||
'({dist_recipes}) or NDK API {dist_ndk_api}'.format(
|
||||
name=name,
|
||||
req_ndk_api=ndk_api,
|
||||
dist_ndk_api=name_match_dist.ndk_api,
|
||||
req_recipes=', '.join(recipes),
|
||||
dist_recipes=', '.join(name_match_dist.recipes)))
|
||||
|
||||
# If we got this far, we need to build a new dist
|
||||
dist = Distribution(ctx)
|
||||
|
@ -152,16 +154,23 @@ class Distribution(object):
|
|||
dist.name = name
|
||||
dist.dist_dir = join(ctx.dist_dir, dist.name)
|
||||
dist.recipes = recipes
|
||||
dist.ndk_api = ctx.ndk_api
|
||||
|
||||
return dist
|
||||
|
||||
def folder_exists(self):
|
||||
return exists(self.dist_dir)
|
||||
|
||||
def delete(self):
|
||||
rmtree(self.dist_dir)
|
||||
|
||||
@classmethod
|
||||
def get_distributions(cls, ctx, extra_dist_dirs=[]):
|
||||
'''Returns all the distributions found locally.'''
|
||||
if extra_dist_dirs:
|
||||
warning('extra_dist_dirs argument to get_distributions '
|
||||
'is not yet implemented')
|
||||
exit(1)
|
||||
raise BuildInterruptingException(
|
||||
'extra_dist_dirs argument to get_distributions '
|
||||
'is not yet implemented')
|
||||
dist_dir = ctx.dist_dir
|
||||
folders = glob.glob(join(dist_dir, '*'))
|
||||
for dir in extra_dist_dirs:
|
||||
|
@ -179,40 +188,47 @@ class Distribution(object):
|
|||
dist.recipes = dist_info['recipes']
|
||||
if 'archs' in dist_info:
|
||||
dist.archs = dist_info['archs']
|
||||
if 'ndk_api' in dist_info:
|
||||
dist.ndk_api = dist_info['ndk_api']
|
||||
else:
|
||||
dist.ndk_api = None
|
||||
warning(
|
||||
"Distribution {distname}: ({distdir}) has been "
|
||||
"built with an unknown api target, ignoring it, "
|
||||
"you might want to delete it".format(
|
||||
distname=dist.name,
|
||||
distdir=dist.dist_dir
|
||||
)
|
||||
)
|
||||
dists.append(dist)
|
||||
return dists
|
||||
|
||||
def save_info(self):
|
||||
def save_info(self, dirn):
|
||||
'''
|
||||
Save information about the distribution in its dist_dir.
|
||||
'''
|
||||
with current_directory(self.dist_dir):
|
||||
with current_directory(dirn):
|
||||
info('Saving distribution info')
|
||||
with open('dist_info.json', 'w') as fileh:
|
||||
json.dump({'dist_name': self.name,
|
||||
json.dump({'dist_name': self.ctx.dist_name,
|
||||
'bootstrap': self.ctx.bootstrap.name,
|
||||
'archs': [arch.arch for arch in self.ctx.archs],
|
||||
'recipes': self.ctx.recipe_build_order},
|
||||
'ndk_api': self.ctx.ndk_api,
|
||||
'recipes': self.ctx.recipe_build_order + self.ctx.python_modules,
|
||||
'hostpython': self.ctx.hostpython,
|
||||
'python_version': self.ctx.python_recipe.major_minor_version_string},
|
||||
fileh)
|
||||
|
||||
def load_info(self):
|
||||
'''Load information about the dist from the info file that p4a
|
||||
automatically creates.'''
|
||||
with current_directory(self.dist_dir):
|
||||
filen = 'dist_info.json'
|
||||
if not exists(filen):
|
||||
return None
|
||||
with open('dist_info.json', 'r') as fileh:
|
||||
dist_info = json.load(fileh)
|
||||
return dist_info
|
||||
|
||||
|
||||
def pretty_log_dists(dists, log_func=info):
|
||||
infos = []
|
||||
for dist in dists:
|
||||
infos.append('{Fore.GREEN}{Style.BRIGHT}{name}{Style.RESET_ALL}: '
|
||||
ndk_api = 'unknown' if dist.ndk_api is None else dist.ndk_api
|
||||
infos.append('{Fore.GREEN}{Style.BRIGHT}{name}{Style.RESET_ALL}: min API {ndk_api}, '
|
||||
'includes recipes ({Fore.GREEN}{recipes}'
|
||||
'{Style.RESET_ALL}), built for archs ({Fore.BLUE}'
|
||||
'{archs}{Style.RESET_ALL})'.format(
|
||||
ndk_api=ndk_api,
|
||||
name=dist.name, recipes=', '.join(dist.recipes),
|
||||
archs=', '.join(dist.archs) if dist.archs else 'UNKNOWN',
|
||||
Fore=Err_Fore, Style=Err_Style))
|
||||
|
|
|
@ -1,24 +1,37 @@
|
|||
|
||||
from copy import deepcopy
|
||||
from itertools import product
|
||||
from sys import exit
|
||||
|
||||
from pythonforandroid.logger import (info, warning, error)
|
||||
from pythonforandroid.logger import info
|
||||
from pythonforandroid.recipe import Recipe
|
||||
from pythonforandroid.bootstrap import Bootstrap
|
||||
from pythonforandroid.util import BuildInterruptingException
|
||||
|
||||
|
||||
def fix_deplist(deps):
|
||||
""" Turn a dependency list into lowercase, and make sure all entries
|
||||
that are just a string become a tuple of strings
|
||||
"""
|
||||
deps = [
|
||||
((dep.lower(),)
|
||||
if not isinstance(dep, (list, tuple))
|
||||
else tuple([dep_entry.lower()
|
||||
for dep_entry in dep
|
||||
]))
|
||||
for dep in deps
|
||||
]
|
||||
return deps
|
||||
|
||||
|
||||
class RecipeOrder(dict):
|
||||
|
||||
def __init__(self, ctx):
|
||||
self.ctx = ctx
|
||||
|
||||
def conflicts(self, name):
|
||||
def conflicts(self):
|
||||
for name in self.keys():
|
||||
try:
|
||||
recipe = Recipe.get_recipe(name, self.ctx)
|
||||
conflicts = recipe.conflicts
|
||||
except IOError:
|
||||
conflicts = [dep.lower() for dep in recipe.conflicts]
|
||||
except ValueError:
|
||||
conflicts = []
|
||||
|
||||
if any([c in self for c in conflicts]):
|
||||
|
@ -26,26 +39,59 @@ class RecipeOrder(dict):
|
|||
return False
|
||||
|
||||
|
||||
def recursively_collect_orders(name, ctx, orders=[]):
|
||||
def get_dependency_tuple_list_for_recipe(recipe, blacklist=None):
|
||||
""" Get the dependencies of a recipe with filtered out blacklist, and
|
||||
turned into tuples with fix_deplist()
|
||||
"""
|
||||
if blacklist is None:
|
||||
blacklist = set()
|
||||
assert(type(blacklist) == set)
|
||||
if recipe.depends is None:
|
||||
dependencies = []
|
||||
else:
|
||||
# Turn all dependencies into tuples so that product will work
|
||||
dependencies = fix_deplist(recipe.depends)
|
||||
|
||||
# Filter out blacklisted items and turn lowercase:
|
||||
dependencies = [
|
||||
tuple(set(deptuple) - blacklist)
|
||||
for deptuple in dependencies
|
||||
if tuple(set(deptuple) - blacklist)
|
||||
]
|
||||
return dependencies
|
||||
|
||||
|
||||
def recursively_collect_orders(
|
||||
name, ctx, all_inputs, orders=None, blacklist=None
|
||||
):
|
||||
'''For each possible recipe ordering, try to add the new recipe name
|
||||
to that order. Recursively do the same thing with all the
|
||||
dependencies of each recipe.
|
||||
|
||||
'''
|
||||
name = name.lower()
|
||||
if orders is None:
|
||||
orders = []
|
||||
if blacklist is None:
|
||||
blacklist = set()
|
||||
try:
|
||||
recipe = Recipe.get_recipe(name, ctx)
|
||||
if recipe.depends is None:
|
||||
dependencies = []
|
||||
else:
|
||||
# make all dependencies into lists so that product will work
|
||||
dependencies = [([dependency] if not isinstance(
|
||||
dependency, (list, tuple))
|
||||
else dependency) for dependency in recipe.depends]
|
||||
dependencies = get_dependency_tuple_list_for_recipe(
|
||||
recipe, blacklist=blacklist
|
||||
)
|
||||
|
||||
# handle opt_depends: these impose requirements on the build
|
||||
# order only if already present in the list of recipes to build
|
||||
dependencies.extend(fix_deplist(
|
||||
[[d] for d in recipe.get_opt_depends_in_list(all_inputs)
|
||||
if d.lower() not in blacklist]
|
||||
))
|
||||
|
||||
if recipe.conflicts is None:
|
||||
conflicts = []
|
||||
else:
|
||||
conflicts = recipe.conflicts
|
||||
except IOError:
|
||||
conflicts = [dep.lower() for dep in recipe.conflicts]
|
||||
except ValueError:
|
||||
# The recipe does not exist, so we assume it can be installed
|
||||
# via pip with no extra dependencies
|
||||
dependencies = []
|
||||
|
@ -57,7 +103,7 @@ def recursively_collect_orders(name, ctx, orders=[]):
|
|||
if name in order:
|
||||
new_orders.append(deepcopy(order))
|
||||
continue
|
||||
if order.conflicts(name):
|
||||
if order.conflicts():
|
||||
continue
|
||||
if any([conflict in order for conflict in conflicts]):
|
||||
continue
|
||||
|
@ -69,7 +115,9 @@ def recursively_collect_orders(name, ctx, orders=[]):
|
|||
dependency_new_orders = [new_order]
|
||||
for dependency in dependency_set:
|
||||
dependency_new_orders = recursively_collect_orders(
|
||||
dependency, ctx, dependency_new_orders)
|
||||
dependency, ctx, all_inputs, dependency_new_orders,
|
||||
blacklist=blacklist
|
||||
)
|
||||
|
||||
new_orders.extend(dependency_new_orders)
|
||||
|
||||
|
@ -95,22 +143,142 @@ def find_order(graph):
|
|||
bset.discard(result)
|
||||
|
||||
|
||||
def get_recipe_order_and_bootstrap(ctx, names, bs=None):
|
||||
recipes_to_load = set(names)
|
||||
if bs is not None and bs.recipe_depends:
|
||||
recipes_to_load = recipes_to_load.union(set(bs.recipe_depends))
|
||||
def obvious_conflict_checker(ctx, name_tuples, blacklist=None):
|
||||
""" This is a pre-flight check function that will completely ignore
|
||||
recipe order or choosing an actual value in any of the multiple
|
||||
choice tuples/dependencies, and just do a very basic obvious
|
||||
conflict check.
|
||||
"""
|
||||
deps_were_added_by = dict()
|
||||
deps = set()
|
||||
if blacklist is None:
|
||||
blacklist = set()
|
||||
|
||||
possible_orders = []
|
||||
# Add dependencies for all recipes:
|
||||
to_be_added = [(name_tuple, None) for name_tuple in name_tuples]
|
||||
while len(to_be_added) > 0:
|
||||
current_to_be_added = list(to_be_added)
|
||||
to_be_added = []
|
||||
for (added_tuple, adding_recipe) in current_to_be_added:
|
||||
assert(type(added_tuple) == tuple)
|
||||
if len(added_tuple) > 1:
|
||||
# No obvious commitment in what to add, don't check it itself
|
||||
# but throw it into deps for later comparing against
|
||||
# (Remember this function only catches obvious issues)
|
||||
deps.add(added_tuple)
|
||||
continue
|
||||
|
||||
name = added_tuple[0]
|
||||
recipe_conflicts = set()
|
||||
recipe_dependencies = []
|
||||
try:
|
||||
# Get recipe to add and who's ultimately adding it:
|
||||
recipe = Recipe.get_recipe(name, ctx)
|
||||
recipe_conflicts = {c.lower() for c in recipe.conflicts}
|
||||
recipe_dependencies = get_dependency_tuple_list_for_recipe(
|
||||
recipe, blacklist=blacklist
|
||||
)
|
||||
except ValueError:
|
||||
pass
|
||||
adder_first_recipe_name = adding_recipe or name
|
||||
|
||||
# Collect the conflicts:
|
||||
triggered_conflicts = []
|
||||
for dep_tuple_list in deps:
|
||||
# See if the new deps conflict with things added before:
|
||||
if set(dep_tuple_list).intersection(
|
||||
recipe_conflicts) == set(dep_tuple_list):
|
||||
triggered_conflicts.append(dep_tuple_list)
|
||||
continue
|
||||
|
||||
# See if what was added before conflicts with the new deps:
|
||||
if len(dep_tuple_list) > 1:
|
||||
# Not an obvious commitment to a specific recipe/dep
|
||||
# to be added, so we won't check.
|
||||
# (remember this function only catches obvious issues)
|
||||
continue
|
||||
try:
|
||||
dep_recipe = Recipe.get_recipe(dep_tuple_list[0], ctx)
|
||||
except ValueError:
|
||||
continue
|
||||
conflicts = [c.lower() for c in dep_recipe.conflicts]
|
||||
if name in conflicts:
|
||||
triggered_conflicts.append(dep_tuple_list)
|
||||
|
||||
# Throw error on conflict:
|
||||
if triggered_conflicts:
|
||||
# Get first conflict and see who added that one:
|
||||
adder_second_recipe_name = "'||'".join(triggered_conflicts[0])
|
||||
second_recipe_original_adder = deps_were_added_by.get(
|
||||
(adder_second_recipe_name,), None
|
||||
)
|
||||
if second_recipe_original_adder:
|
||||
adder_second_recipe_name = second_recipe_original_adder
|
||||
|
||||
# Prompt error:
|
||||
raise BuildInterruptingException(
|
||||
"Conflict detected: '{}'"
|
||||
" inducing dependencies {}, and '{}'"
|
||||
" inducing conflicting dependencies {}".format(
|
||||
adder_first_recipe_name,
|
||||
(recipe.name,),
|
||||
adder_second_recipe_name,
|
||||
triggered_conflicts[0]
|
||||
))
|
||||
|
||||
# Actually add it to our list:
|
||||
deps.add(added_tuple)
|
||||
deps_were_added_by[added_tuple] = adding_recipe
|
||||
|
||||
# Schedule dependencies to be added
|
||||
to_be_added += [
|
||||
(dep, adder_first_recipe_name or name)
|
||||
for dep in recipe_dependencies
|
||||
if dep not in deps
|
||||
]
|
||||
# If we came here, then there were no obvious conflicts.
|
||||
return None
|
||||
|
||||
|
||||
def get_recipe_order_and_bootstrap(ctx, names, bs=None, blacklist=None):
|
||||
# Get set of recipe/dependency names, clean up and add bootstrap deps:
|
||||
names = set(names)
|
||||
if bs is not None and bs.recipe_depends:
|
||||
names = names.union(set(bs.recipe_depends))
|
||||
names = fix_deplist([
|
||||
([name] if not isinstance(name, (list, tuple)) else name)
|
||||
for name in names
|
||||
])
|
||||
if blacklist is None:
|
||||
blacklist = set()
|
||||
blacklist = {bitem.lower() for bitem in blacklist}
|
||||
|
||||
# Remove all values that are in the blacklist:
|
||||
names_before_blacklist = list(names)
|
||||
names = []
|
||||
for name in names_before_blacklist:
|
||||
cleaned_up_tuple = tuple([
|
||||
item for item in name if item not in blacklist
|
||||
])
|
||||
if cleaned_up_tuple:
|
||||
names.append(cleaned_up_tuple)
|
||||
|
||||
# Do check for obvious conflicts (that would trigger in any order, and
|
||||
# without comitting to any specific choice in a multi-choice tuple of
|
||||
# dependencies):
|
||||
obvious_conflict_checker(ctx, names, blacklist=blacklist)
|
||||
# If we get here, no obvious conflicts!
|
||||
|
||||
# get all possible order graphs, as names may include tuples/lists
|
||||
# of alternative dependencies
|
||||
names = [([name] if not isinstance(name, (list, tuple)) else name)
|
||||
for name in names]
|
||||
possible_orders = []
|
||||
for name_set in product(*names):
|
||||
new_possible_orders = [RecipeOrder(ctx)]
|
||||
for name in name_set:
|
||||
new_possible_orders = recursively_collect_orders(
|
||||
name, ctx, orders=new_possible_orders)
|
||||
name, ctx, name_set, orders=new_possible_orders,
|
||||
blacklist=blacklist
|
||||
)
|
||||
possible_orders.extend(new_possible_orders)
|
||||
|
||||
# turn each order graph into a linear list if possible
|
||||
|
@ -122,23 +290,18 @@ def get_recipe_order_and_bootstrap(ctx, names, bs=None):
|
|||
info('Circular dependency found in graph {}, skipping it.'.format(
|
||||
possible_order))
|
||||
continue
|
||||
except:
|
||||
warning('Failed to import recipe named {}; the recipe exists '
|
||||
'but appears broken.'.format(name))
|
||||
warning('Exception was:')
|
||||
raise
|
||||
orders.append(list(order))
|
||||
|
||||
# prefer python2 and SDL2 if available
|
||||
# prefer python3 and SDL2 if available
|
||||
orders = sorted(orders,
|
||||
key=lambda order: -('python2' in order) - ('sdl2' in order))
|
||||
key=lambda order: -('python3' in order) - ('sdl2' in order))
|
||||
|
||||
if not orders:
|
||||
error('Didn\'t find any valid dependency graphs.')
|
||||
error('This means that some of your requirements pull in '
|
||||
'conflicting dependencies.')
|
||||
error('Exiting.')
|
||||
exit(1)
|
||||
raise BuildInterruptingException(
|
||||
'Didn\'t find any valid dependency graphs. '
|
||||
'This means that some of your '
|
||||
'requirements pull in conflicting dependencies.')
|
||||
|
||||
# It would be better to check against possible orders other
|
||||
# than the first one, but in practice clashes will be rare,
|
||||
# and can be resolved by specifying more parameters
|
||||
|
@ -153,18 +316,26 @@ def get_recipe_order_and_bootstrap(ctx, names, bs=None):
|
|||
|
||||
if bs is None:
|
||||
bs = Bootstrap.get_bootstrap_from_recipes(chosen_order, ctx)
|
||||
if bs is None:
|
||||
# Note: don't remove this without thought, causes infinite loop
|
||||
raise BuildInterruptingException(
|
||||
"Could not find any compatible bootstrap!"
|
||||
)
|
||||
recipes, python_modules, bs = get_recipe_order_and_bootstrap(
|
||||
ctx, chosen_order, bs=bs)
|
||||
ctx, chosen_order, bs=bs, blacklist=blacklist
|
||||
)
|
||||
else:
|
||||
# check if each requirement has a recipe
|
||||
recipes = []
|
||||
python_modules = []
|
||||
for name in chosen_order:
|
||||
try:
|
||||
Recipe.get_recipe(name, ctx)
|
||||
except IOError:
|
||||
recipe = Recipe.get_recipe(name, ctx)
|
||||
python_modules += recipe.python_depends
|
||||
except ValueError:
|
||||
python_modules.append(name)
|
||||
else:
|
||||
recipes.append(name)
|
||||
|
||||
python_modules = list(set(python_modules))
|
||||
return recipes, python_modules, bs
|
||||
|
|
|
@ -44,9 +44,9 @@ class LevelDifferentiatingFormatter(logging.Formatter):
|
|||
|
||||
|
||||
logger = logging.getLogger('p4a')
|
||||
if not hasattr(logger, 'touched'): # Necessary as importlib reloads
|
||||
# this, which would add a second
|
||||
# handler and reset the level
|
||||
# Necessary as importlib reloads this,
|
||||
# which would add a second handler and reset the level
|
||||
if not hasattr(logger, 'touched'):
|
||||
logger.setLevel(logging.INFO)
|
||||
logger.touched = True
|
||||
ch = logging.StreamHandler(stderr)
|
||||
|
@ -148,8 +148,10 @@ def shprint(command, *args, **kwargs):
|
|||
kwargs["_bg"] = True
|
||||
is_critical = kwargs.pop('_critical', False)
|
||||
tail_n = kwargs.pop('_tail', None)
|
||||
full_debug = False
|
||||
if "P4A_FULL_DEBUG" in os.environ:
|
||||
tail_n = 0
|
||||
full_debug = True
|
||||
filter_in = kwargs.pop('_filter', None)
|
||||
filter_out = kwargs.pop('_filterout', None)
|
||||
if len(logger.handlers) > 1:
|
||||
|
@ -177,16 +179,21 @@ def shprint(command, *args, **kwargs):
|
|||
if isinstance(line, bytes):
|
||||
line = line.decode('utf-8', errors='replace')
|
||||
if logger.level > logging.DEBUG:
|
||||
if full_debug:
|
||||
stdout.write(line)
|
||||
stdout.flush()
|
||||
continue
|
||||
msg = line.replace(
|
||||
'\n', ' ').replace(
|
||||
'\t', ' ').replace(
|
||||
'\b', ' ').rstrip()
|
||||
if msg:
|
||||
stdout.write(u'{}\r{}{:<{width}}'.format(
|
||||
Err_Style.RESET_ALL, msg_hdr,
|
||||
shorten_string(msg, msg_width), width=msg_width))
|
||||
stdout.flush()
|
||||
need_closing_newline = True
|
||||
if "CI" not in os.environ:
|
||||
stdout.write(u'{}\r{}{:<{width}}'.format(
|
||||
Err_Style.RESET_ALL, msg_hdr,
|
||||
shorten_string(msg, msg_width), width=msg_width))
|
||||
stdout.flush()
|
||||
need_closing_newline = True
|
||||
else:
|
||||
logger.debug(''.join(['\t', line.rstrip()]))
|
||||
if need_closing_newline:
|
||||
|
|
437
p4a/pythonforandroid/python.py
Executable file
437
p4a/pythonforandroid/python.py
Executable file
|
@ -0,0 +1,437 @@
|
|||
'''
|
||||
This module is kind of special because it contains the base classes used to
|
||||
build our python3 and python2 recipes and his corresponding hostpython recipes.
|
||||
'''
|
||||
|
||||
from os.path import dirname, exists, join
|
||||
from multiprocessing import cpu_count
|
||||
from shutil import copy2
|
||||
from os import environ
|
||||
import subprocess
|
||||
import glob
|
||||
import sh
|
||||
|
||||
from pythonforandroid.recipe import Recipe, TargetPythonRecipe
|
||||
from pythonforandroid.logger import logger, info, shprint
|
||||
from pythonforandroid.util import (
|
||||
current_directory, ensure_dir, walk_valid_filens,
|
||||
BuildInterruptingException, build_platform)
|
||||
|
||||
|
||||
class GuestPythonRecipe(TargetPythonRecipe):
|
||||
'''
|
||||
Class for target python recipes. Sets ctx.python_recipe to point to itself,
|
||||
so as to know later what kind of Python was built or used.
|
||||
|
||||
This base class is used for our main python recipes (python2 and python3)
|
||||
which shares most of the build process.
|
||||
|
||||
.. versionadded:: 0.6.0
|
||||
Refactored from the inclement's python3 recipe with a few changes:
|
||||
|
||||
- Splits the python's build process several methods: :meth:`build_arch`
|
||||
and :meth:`get_recipe_env`.
|
||||
- Adds the attribute :attr:`configure_args`, which has been moved from
|
||||
the method :meth:`build_arch` into a static class variable.
|
||||
- Adds some static class variables used to create the python bundle and
|
||||
modifies the method :meth:`create_python_bundle`, to adapt to the new
|
||||
situation. The added static class variables are:
|
||||
:attr:`stdlib_dir_blacklist`, :attr:`stdlib_filen_blacklist`,
|
||||
:attr:`site_packages_dir_blacklist`and
|
||||
:attr:`site_packages_filen_blacklist`.
|
||||
'''
|
||||
|
||||
MIN_NDK_API = 21
|
||||
'''Sets the minimal ndk api number needed to use the recipe.
|
||||
|
||||
.. warning:: This recipe can be built only against API 21+, so it means
|
||||
that any class which inherits from class:`GuestPythonRecipe` will have
|
||||
this limitation.
|
||||
'''
|
||||
|
||||
from_crystax = False
|
||||
'''True if the python is used from CrystaX, False otherwise (i.e. if
|
||||
it is built by p4a).'''
|
||||
|
||||
configure_args = ()
|
||||
'''The configure arguments needed to build the python recipe. Those are
|
||||
used in method :meth:`build_arch` (if not overwritten like python3crystax's
|
||||
recipe does).
|
||||
|
||||
.. note:: This variable should be properly set in subclass.
|
||||
'''
|
||||
|
||||
stdlib_dir_blacklist = {
|
||||
'__pycache__',
|
||||
'test',
|
||||
'tests',
|
||||
'lib2to3',
|
||||
'ensurepip',
|
||||
'idlelib',
|
||||
'tkinter',
|
||||
}
|
||||
'''The directories that we want to omit for our python bundle'''
|
||||
|
||||
stdlib_filen_blacklist = [
|
||||
'*.py',
|
||||
'*.exe',
|
||||
'*.whl',
|
||||
]
|
||||
'''The file extensions that we want to blacklist for our python bundle'''
|
||||
|
||||
site_packages_dir_blacklist = {
|
||||
'__pycache__',
|
||||
'tests'
|
||||
}
|
||||
'''The directories from site packages dir that we don't want to be included
|
||||
in our python bundle.'''
|
||||
|
||||
site_packages_filen_blacklist = [
|
||||
'*.py'
|
||||
]
|
||||
'''The file extensions from site packages dir that we don't want to be
|
||||
included in our python bundle.'''
|
||||
|
||||
opt_depends = ['sqlite3', 'libffi', 'openssl']
|
||||
'''The optional libraries which we would like to get our python linked'''
|
||||
|
||||
compiled_extension = '.pyc'
|
||||
'''the default extension for compiled python files.
|
||||
|
||||
.. note:: the default extension for compiled python files has been .pyo for
|
||||
python 2.x-3.4 but as of Python 3.5, the .pyo filename extension is no
|
||||
longer used and has been removed in favour of extension .pyc
|
||||
'''
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self._ctx = None
|
||||
super(GuestPythonRecipe, self).__init__(*args, **kwargs)
|
||||
|
||||
def get_recipe_env(self, arch=None, with_flags_in_cc=True):
|
||||
if self.from_crystax:
|
||||
return super(GuestPythonRecipe, self).get_recipe_env(
|
||||
arch=arch, with_flags_in_cc=with_flags_in_cc)
|
||||
|
||||
env = environ.copy()
|
||||
|
||||
android_host = env['HOSTARCH'] = arch.command_prefix
|
||||
toolchain = '{toolchain_prefix}-{toolchain_version}'.format(
|
||||
toolchain_prefix=self.ctx.toolchain_prefix,
|
||||
toolchain_version=self.ctx.toolchain_version)
|
||||
toolchain = join(self.ctx.ndk_dir, 'toolchains',
|
||||
toolchain, 'prebuilt', build_platform)
|
||||
|
||||
env['CC'] = (
|
||||
'{clang} -target {target} -gcc-toolchain {toolchain}').format(
|
||||
clang=join(self.ctx.ndk_dir, 'toolchains', 'llvm', 'prebuilt',
|
||||
build_platform, 'bin', 'clang'),
|
||||
target=arch.target,
|
||||
toolchain=toolchain)
|
||||
env['AR'] = join(toolchain, 'bin', android_host) + '-ar'
|
||||
env['LD'] = join(toolchain, 'bin', android_host) + '-ld'
|
||||
env['RANLIB'] = join(toolchain, 'bin', android_host) + '-ranlib'
|
||||
env['READELF'] = join(toolchain, 'bin', android_host) + '-readelf'
|
||||
env['STRIP'] = join(toolchain, 'bin', android_host) + '-strip'
|
||||
env['STRIP'] += ' --strip-debug --strip-unneeded'
|
||||
|
||||
env['PATH'] = (
|
||||
'{hostpython_dir}:{old_path}').format(
|
||||
hostpython_dir=self.get_recipe(
|
||||
'host' + self.name, self.ctx).get_path_to_python(),
|
||||
old_path=env['PATH'])
|
||||
|
||||
ndk_flags = (
|
||||
'-fPIC --sysroot={ndk_sysroot} -D__ANDROID_API__={android_api} '
|
||||
'-isystem {ndk_android_host} -I{ndk_include}').format(
|
||||
ndk_sysroot=join(self.ctx.ndk_dir, 'sysroot'),
|
||||
android_api=self.ctx.ndk_api,
|
||||
ndk_android_host=join(
|
||||
self.ctx.ndk_dir, 'sysroot', 'usr', 'include', android_host),
|
||||
ndk_include=join(self.ctx.ndk_dir, 'sysroot', 'usr', 'include'))
|
||||
sysroot = self.ctx.ndk_platform
|
||||
env['CFLAGS'] = env.get('CFLAGS', '') + ' ' + ndk_flags
|
||||
env['CPPFLAGS'] = env.get('CPPFLAGS', '') + ' ' + ndk_flags
|
||||
env['LDFLAGS'] = env.get('LDFLAGS', '') + ' --sysroot={} -L{}'.format(
|
||||
sysroot, join(sysroot, 'usr', 'lib'))
|
||||
|
||||
# Manually add the libs directory, and copy some object
|
||||
# files to the current directory otherwise they aren't
|
||||
# picked up. This seems necessary because the --sysroot
|
||||
# setting in LDFLAGS is overridden by the other flags.
|
||||
# TODO: Work out why this doesn't happen in the original
|
||||
# bpo-30386 Makefile system.
|
||||
logger.warning('Doing some hacky stuff to link properly')
|
||||
lib_dir = join(sysroot, 'usr', 'lib')
|
||||
if arch.arch == 'x86_64':
|
||||
lib_dir = join(sysroot, 'usr', 'lib64')
|
||||
env['LDFLAGS'] += ' -L{}'.format(lib_dir)
|
||||
shprint(sh.cp, join(lib_dir, 'crtbegin_so.o'), './')
|
||||
shprint(sh.cp, join(lib_dir, 'crtend_so.o'), './')
|
||||
|
||||
env['SYSROOT'] = sysroot
|
||||
|
||||
if sh.which('lld') is not None:
|
||||
# Note: The -L. is to fix a bug in python 3.7.
|
||||
# https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=234409
|
||||
env["LDFLAGS"] += ' -L. -fuse-ld=lld'
|
||||
else:
|
||||
logger.warning('lld not found, linking without it. ' +
|
||||
'Consider installing lld if linker errors occur.')
|
||||
|
||||
return env
|
||||
|
||||
def set_libs_flags(self, env, arch):
|
||||
'''Takes care to properly link libraries with python depending on our
|
||||
requirements and the attribute :attr:`opt_depends`.
|
||||
'''
|
||||
def add_flags(include_flags, link_dirs, link_libs):
|
||||
env['CPPFLAGS'] = env.get('CPPFLAGS', '') + include_flags
|
||||
env['LDFLAGS'] = env.get('LDFLAGS', '') + link_dirs
|
||||
env['LIBS'] = env.get('LIBS', '') + link_libs
|
||||
|
||||
if 'sqlite3' in self.ctx.recipe_build_order:
|
||||
info('Activating flags for sqlite3')
|
||||
recipe = Recipe.get_recipe('sqlite3', self.ctx)
|
||||
add_flags(' -I' + recipe.get_build_dir(arch.arch),
|
||||
' -L' + recipe.get_lib_dir(arch), ' -lsqlite3')
|
||||
|
||||
if 'libffi' in self.ctx.recipe_build_order:
|
||||
info('Activating flags for libffi')
|
||||
recipe = Recipe.get_recipe('libffi', self.ctx)
|
||||
# In order to force the correct linkage for our libffi library, we
|
||||
# set the following variable to point where is our libffi.pc file,
|
||||
# because the python build system uses pkg-config to configure it.
|
||||
env['PKG_CONFIG_PATH'] = recipe.get_build_dir(arch.arch)
|
||||
add_flags(' -I' + ' -I'.join(recipe.get_include_dirs(arch)),
|
||||
' -L' + join(recipe.get_build_dir(arch.arch), '.libs'),
|
||||
' -lffi')
|
||||
|
||||
if 'openssl' in self.ctx.recipe_build_order:
|
||||
info('Activating flags for openssl')
|
||||
recipe = Recipe.get_recipe('openssl', self.ctx)
|
||||
add_flags(recipe.include_flags(arch),
|
||||
recipe.link_dirs_flags(arch), recipe.link_libs_flags())
|
||||
return env
|
||||
|
||||
def prebuild_arch(self, arch):
|
||||
super(TargetPythonRecipe, self).prebuild_arch(arch)
|
||||
if self.from_crystax and self.ctx.ndk != 'crystax':
|
||||
raise BuildInterruptingException(
|
||||
'The {} recipe can only be built when using the CrystaX NDK. '
|
||||
'Exiting.'.format(self.name))
|
||||
self.ctx.python_recipe = self
|
||||
|
||||
def build_arch(self, arch):
|
||||
if self.ctx.ndk_api < self.MIN_NDK_API:
|
||||
raise BuildInterruptingException(
|
||||
'Target ndk-api is {}, but the python3 recipe supports only'
|
||||
' {}+'.format(self.ctx.ndk_api, self.MIN_NDK_API))
|
||||
|
||||
recipe_build_dir = self.get_build_dir(arch.arch)
|
||||
|
||||
# Create a subdirectory to actually perform the build
|
||||
build_dir = join(recipe_build_dir, 'android-build')
|
||||
ensure_dir(build_dir)
|
||||
|
||||
# TODO: Get these dynamically, like bpo-30386 does
|
||||
sys_prefix = '/usr/local'
|
||||
sys_exec_prefix = '/usr/local'
|
||||
|
||||
with current_directory(build_dir):
|
||||
env = self.get_recipe_env(arch)
|
||||
env = self.set_libs_flags(env, arch)
|
||||
|
||||
android_build = sh.Command(
|
||||
join(recipe_build_dir,
|
||||
'config.guess'))().stdout.strip().decode('utf-8')
|
||||
|
||||
if not exists('config.status'):
|
||||
shprint(
|
||||
sh.Command(join(recipe_build_dir, 'configure')),
|
||||
*(' '.join(self.configure_args).format(
|
||||
android_host=env['HOSTARCH'],
|
||||
android_build=android_build,
|
||||
prefix=sys_prefix,
|
||||
exec_prefix=sys_exec_prefix)).split(' '),
|
||||
_env=env)
|
||||
|
||||
if not exists('python'):
|
||||
py_version = self.major_minor_version_string
|
||||
if self.major_minor_version_string[0] == '3':
|
||||
py_version += 'm'
|
||||
shprint(sh.make, 'all', '-j', str(cpu_count()),
|
||||
'INSTSONAME=libpython{version}.so'.format(
|
||||
version=py_version), _env=env)
|
||||
|
||||
# TODO: Look into passing the path to pyconfig.h in a
|
||||
# better way, although this is probably acceptable
|
||||
sh.cp('pyconfig.h', join(recipe_build_dir, 'Include'))
|
||||
|
||||
def include_root(self, arch_name):
|
||||
return join(self.get_build_dir(arch_name), 'Include')
|
||||
|
||||
def link_root(self, arch_name):
|
||||
return join(self.get_build_dir(arch_name), 'android-build')
|
||||
|
||||
def compile_python_files(self, dir):
|
||||
'''
|
||||
Compile the python files (recursively) for the python files inside
|
||||
a given folder.
|
||||
|
||||
.. note:: python2 compiles the files into extension .pyo, but in
|
||||
python3, and as of Python 3.5, the .pyo filename extension is no
|
||||
longer used...uses .pyc (https://www.python.org/dev/peps/pep-0488)
|
||||
'''
|
||||
args = [self.ctx.hostpython]
|
||||
if self.ctx.python_recipe.name == 'python3':
|
||||
args += ['-OO', '-m', 'compileall', '-b', '-f', dir]
|
||||
else:
|
||||
args += ['-OO', '-m', 'compileall', '-f', dir]
|
||||
subprocess.call(args)
|
||||
|
||||
def create_python_bundle(self, dirn, arch):
|
||||
"""
|
||||
Create a packaged python bundle in the target directory, by
|
||||
copying all the modules and standard library to the right
|
||||
place.
|
||||
"""
|
||||
# Todo: find a better way to find the build libs folder
|
||||
modules_build_dir = join(
|
||||
self.get_build_dir(arch.arch),
|
||||
'android-build',
|
||||
'build',
|
||||
'lib.linux{}-{}-{}'.format(
|
||||
'2' if self.version[0] == '2' else '',
|
||||
arch.command_prefix.split('-')[0],
|
||||
self.major_minor_version_string
|
||||
))
|
||||
|
||||
# Compile to *.pyc/*.pyo the python modules
|
||||
self.compile_python_files(modules_build_dir)
|
||||
# Compile to *.pyc/*.pyo the standard python library
|
||||
self.compile_python_files(join(self.get_build_dir(arch.arch), 'Lib'))
|
||||
# Compile to *.pyc/*.pyo the other python packages (site-packages)
|
||||
self.compile_python_files(self.ctx.get_python_install_dir())
|
||||
|
||||
# Bundle compiled python modules to a folder
|
||||
modules_dir = join(dirn, 'modules')
|
||||
c_ext = self.compiled_extension
|
||||
ensure_dir(modules_dir)
|
||||
module_filens = (glob.glob(join(modules_build_dir, '*.so')) +
|
||||
glob.glob(join(modules_build_dir, '*' + c_ext)))
|
||||
info("Copy {} files into the bundle".format(len(module_filens)))
|
||||
for filen in module_filens:
|
||||
info(" - copy {}".format(filen))
|
||||
copy2(filen, modules_dir)
|
||||
|
||||
# zip up the standard library
|
||||
stdlib_zip = join(dirn, 'stdlib.zip')
|
||||
with current_directory(join(self.get_build_dir(arch.arch), 'Lib')):
|
||||
stdlib_filens = list(walk_valid_filens(
|
||||
'.', self.stdlib_dir_blacklist, self.stdlib_filen_blacklist))
|
||||
info("Zip {} files into the bundle".format(len(stdlib_filens)))
|
||||
shprint(sh.zip, stdlib_zip, *stdlib_filens)
|
||||
|
||||
# copy the site-packages into place
|
||||
ensure_dir(join(dirn, 'site-packages'))
|
||||
ensure_dir(self.ctx.get_python_install_dir())
|
||||
# TODO: Improve the API around walking and copying the files
|
||||
with current_directory(self.ctx.get_python_install_dir()):
|
||||
filens = list(walk_valid_filens(
|
||||
'.', self.site_packages_dir_blacklist,
|
||||
self.site_packages_filen_blacklist))
|
||||
info("Copy {} files into the site-packages".format(len(filens)))
|
||||
for filen in filens:
|
||||
info(" - copy {}".format(filen))
|
||||
ensure_dir(join(dirn, 'site-packages', dirname(filen)))
|
||||
copy2(filen, join(dirn, 'site-packages', filen))
|
||||
|
||||
# copy the python .so files into place
|
||||
python_build_dir = join(self.get_build_dir(arch.arch),
|
||||
'android-build')
|
||||
python_lib_name = 'libpython' + self.major_minor_version_string
|
||||
if self.major_minor_version_string[0] == '3':
|
||||
python_lib_name += 'm'
|
||||
shprint(sh.cp, join(python_build_dir, python_lib_name + '.so'),
|
||||
join(self.ctx.dist_dir, self.ctx.dist_name, 'libs', arch.arch))
|
||||
|
||||
info('Renaming .so files to reflect cross-compile')
|
||||
self.reduce_object_file_names(join(dirn, 'site-packages'))
|
||||
|
||||
return join(dirn, 'site-packages')
|
||||
|
||||
|
||||
class HostPythonRecipe(Recipe):
|
||||
'''
|
||||
This is the base class for hostpython3 and hostpython2 recipes. This class
|
||||
will take care to do all the work to build a hostpython recipe but, be
|
||||
careful, it is intended to be subclassed because some of the vars needs to
|
||||
be set:
|
||||
|
||||
- :attr:`name`
|
||||
- :attr:`version`
|
||||
|
||||
.. versionadded:: 0.6.0
|
||||
Refactored from the hostpython3's recipe by inclement
|
||||
'''
|
||||
|
||||
name = ''
|
||||
'''The hostpython's recipe name. This should be ``hostpython2`` or
|
||||
``hostpython3``
|
||||
|
||||
.. warning:: This must be set in inherited class.'''
|
||||
|
||||
version = ''
|
||||
'''The hostpython's recipe version.
|
||||
|
||||
.. warning:: This must be set in inherited class.'''
|
||||
|
||||
build_subdir = 'native-build'
|
||||
'''Specify the sub build directory for the hostpython recipe. Defaults
|
||||
to ``native-build``.'''
|
||||
|
||||
url = 'https://www.python.org/ftp/python/{version}/Python-{version}.tgz'
|
||||
'''The default url to download our host python recipe. This url will
|
||||
change depending on the python version set in attribute :attr:`version`.'''
|
||||
|
||||
def get_build_container_dir(self, arch=None):
|
||||
choices = self.check_recipe_choices()
|
||||
dir_name = '-'.join([self.name] + choices)
|
||||
return join(self.ctx.build_dir, 'other_builds', dir_name, 'desktop')
|
||||
|
||||
def get_build_dir(self, arch=None):
|
||||
'''
|
||||
.. note:: Unlike other recipes, the hostpython build dir doesn't
|
||||
depend on the target arch
|
||||
'''
|
||||
return join(self.get_build_container_dir(), self.name)
|
||||
|
||||
def get_path_to_python(self):
|
||||
return join(self.get_build_dir(), self.build_subdir)
|
||||
|
||||
def build_arch(self, arch):
|
||||
recipe_build_dir = self.get_build_dir(arch.arch)
|
||||
|
||||
# Create a subdirectory to actually perform the build
|
||||
build_dir = join(recipe_build_dir, self.build_subdir)
|
||||
ensure_dir(build_dir)
|
||||
|
||||
if not exists(join(build_dir, 'python')):
|
||||
with current_directory(recipe_build_dir):
|
||||
# Configure the build
|
||||
with current_directory(build_dir):
|
||||
if not exists('config.status'):
|
||||
shprint(
|
||||
sh.Command(join(recipe_build_dir, 'configure')))
|
||||
|
||||
# Create the Setup file. This copying from Setup.dist
|
||||
# seems to be the normal and expected procedure.
|
||||
shprint(sh.cp, join('Modules', 'Setup.dist'),
|
||||
join(build_dir, 'Modules', 'Setup'))
|
||||
|
||||
shprint(sh.make, '-j', str(cpu_count()), '-C', build_dir)
|
||||
else:
|
||||
info('Skipping {name} ({version}) build, as it has already '
|
||||
'been completed'.format(name=self.name, version=self.version))
|
||||
|
||||
self.ctx.hostpython = join(build_dir, 'python')
|
|
@ -1,4 +1,4 @@
|
|||
from os.path import basename, dirname, exists, isdir, isfile, join, realpath
|
||||
from os.path import basename, dirname, exists, isdir, isfile, join, realpath, split
|
||||
import importlib
|
||||
import glob
|
||||
from shutil import rmtree
|
||||
|
@ -12,16 +12,16 @@ import shutil
|
|||
import fnmatch
|
||||
from os import listdir, unlink, environ, mkdir, curdir, walk
|
||||
from sys import stdout
|
||||
import time
|
||||
try:
|
||||
from urlparse import urlparse
|
||||
except ImportError:
|
||||
from urllib.parse import urlparse
|
||||
from pythonforandroid.logger import (logger, info, warning, error, debug, shprint, info_main)
|
||||
from pythonforandroid.util import (urlretrieve, current_directory, ensure_dir)
|
||||
from pythonforandroid.logger import (logger, info, warning, debug, shprint, info_main)
|
||||
from pythonforandroid.util import (urlretrieve, current_directory, ensure_dir,
|
||||
BuildInterruptingException)
|
||||
|
||||
# this import is necessary to keep imp.load_source from complaining :)
|
||||
|
||||
|
||||
if PY2:
|
||||
import imp
|
||||
import_recipe = imp.load_source
|
||||
|
@ -140,13 +140,26 @@ class Recipe(with_metaclass(RecipeMeta)):
|
|||
else:
|
||||
progression = '{0:.2f}%'.format(
|
||||
index * blksize * 100. / float(size))
|
||||
stdout.write('- Download {}\r'.format(progression))
|
||||
stdout.flush()
|
||||
if "CI" not in environ:
|
||||
stdout.write('- Download {}\r'.format(progression))
|
||||
stdout.flush()
|
||||
|
||||
if exists(target):
|
||||
unlink(target)
|
||||
|
||||
urlretrieve(url, target, report_hook)
|
||||
# Download item with multiple attempts (for bad connections):
|
||||
attempts = 0
|
||||
while True:
|
||||
try:
|
||||
urlretrieve(url, target, report_hook)
|
||||
except OSError as e:
|
||||
attempts += 1
|
||||
if attempts >= 5:
|
||||
raise e
|
||||
stdout.write('Download failed retrying in a second...')
|
||||
time.sleep(1)
|
||||
continue
|
||||
break
|
||||
return target
|
||||
elif parsed_url.scheme in ('git', 'git+file', 'git+ssh', 'git+http', 'git+https'):
|
||||
if isdir(target):
|
||||
|
@ -167,28 +180,18 @@ class Recipe(with_metaclass(RecipeMeta)):
|
|||
shprint(sh.git, 'submodule', 'update', '--recursive')
|
||||
return target
|
||||
|
||||
# def get_archive_rootdir(self, filename):
|
||||
# if filename.endswith(".tgz") or filename.endswith(".tar.gz") or \
|
||||
# filename.endswith(".tbz2") or filename.endswith(".tar.bz2"):
|
||||
# archive = tarfile.open(filename)
|
||||
# root = archive.next().path.split("/")
|
||||
# return root[0]
|
||||
# elif filename.endswith(".zip"):
|
||||
# with zipfile.ZipFile(filename) as zf:
|
||||
# return dirname(zf.namelist()[0])
|
||||
# else:
|
||||
# print("Error: cannot detect root directory")
|
||||
# print("Unrecognized extension for {}".format(filename))
|
||||
# raise Exception()
|
||||
|
||||
def apply_patch(self, filename, arch):
|
||||
def apply_patch(self, filename, arch, build_dir=None):
|
||||
"""
|
||||
Apply a patch from the current recipe directory into the current
|
||||
build directory.
|
||||
|
||||
.. versionchanged:: 0.6.0
|
||||
Add ability to apply patch from any dir via kwarg `build_dir`'''
|
||||
"""
|
||||
info("Applying patch {}".format(filename))
|
||||
build_dir = build_dir if build_dir else self.get_build_dir(arch)
|
||||
filename = join(self.get_recipe_dir(), filename)
|
||||
shprint(sh.patch, "-t", "-d", self.get_build_dir(arch), "-p1",
|
||||
shprint(sh.patch, "-t", "-d", build_dir, "-p1",
|
||||
"-i", filename, _tail=10)
|
||||
|
||||
def copy_file(self, filename, dest):
|
||||
|
@ -206,42 +209,12 @@ class Recipe(with_metaclass(RecipeMeta)):
|
|||
with open(dest, "ab") as fd:
|
||||
fd.write(data)
|
||||
|
||||
# def has_marker(self, marker):
|
||||
# """
|
||||
# Return True if the current build directory has the marker set
|
||||
# """
|
||||
# return exists(join(self.build_dir, ".{}".format(marker)))
|
||||
|
||||
# def set_marker(self, marker):
|
||||
# """
|
||||
# Set a marker info the current build directory
|
||||
# """
|
||||
# with open(join(self.build_dir, ".{}".format(marker)), "w") as fd:
|
||||
# fd.write("ok")
|
||||
|
||||
# def delete_marker(self, marker):
|
||||
# """
|
||||
# Delete a specific marker
|
||||
# """
|
||||
# try:
|
||||
# unlink(join(self.build_dir, ".{}".format(marker)))
|
||||
# except:
|
||||
# pass
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
'''The name of the recipe, the same as the folder containing it.'''
|
||||
modname = self.__class__.__module__
|
||||
return modname.split(".", 2)[-1]
|
||||
|
||||
# @property
|
||||
# def archive_fn(self):
|
||||
# bfn = basename(self.url.format(version=self.version))
|
||||
# fn = "{}/{}-{}".format(
|
||||
# self.ctx.cache_dir,
|
||||
# self.name, bfn)
|
||||
# return fn
|
||||
|
||||
@property
|
||||
def filtered_archs(self):
|
||||
'''Return archs of self.ctx that are valid build archs
|
||||
|
@ -269,6 +242,12 @@ class Recipe(with_metaclass(RecipeMeta)):
|
|||
recipes.append(recipe)
|
||||
return sorted(recipes)
|
||||
|
||||
def get_opt_depends_in_list(self, recipes):
|
||||
'''Given a list of recipe names, returns those that are also in
|
||||
self.opt_depends.
|
||||
'''
|
||||
return [recipe for recipe in recipes if recipe in self.opt_depends]
|
||||
|
||||
def get_build_container_dir(self, arch):
|
||||
'''Given the arch name, returns the directory where it will be
|
||||
built.
|
||||
|
@ -277,7 +256,8 @@ class Recipe(with_metaclass(RecipeMeta)):
|
|||
alternative or optional dependencies are being built.
|
||||
'''
|
||||
dir_name = self.get_dir_name()
|
||||
return join(self.ctx.build_dir, 'other_builds', dir_name, arch)
|
||||
return join(self.ctx.build_dir, 'other_builds',
|
||||
dir_name, '{}__ndk_target_{}'.format(arch, self.ctx.ndk_api))
|
||||
|
||||
def get_dir_name(self):
|
||||
choices = self.check_recipe_choices()
|
||||
|
@ -367,7 +347,7 @@ class Recipe(with_metaclass(RecipeMeta)):
|
|||
debug('* Expected md5sum: {}'.format(expected_md5))
|
||||
raise ValueError(
|
||||
('Generated md5sum does not match expected md5sum '
|
||||
'for {} recipe').format(self.name))
|
||||
'for {} recipe').format(self.name))
|
||||
else:
|
||||
info('{} download already cached, skipping'.format(self.name))
|
||||
|
||||
|
@ -410,24 +390,20 @@ class Recipe(with_metaclass(RecipeMeta)):
|
|||
try:
|
||||
sh.unzip(extraction_filename)
|
||||
except (sh.ErrorReturnCode_1, sh.ErrorReturnCode_2):
|
||||
pass # return code 1 means unzipping had
|
||||
# warnings but did complete,
|
||||
# apparently happens sometimes with
|
||||
# github zips
|
||||
# return code 1 means unzipping had
|
||||
# warnings but did complete,
|
||||
# apparently happens sometimes with
|
||||
# github zips
|
||||
pass
|
||||
import zipfile
|
||||
fileh = zipfile.ZipFile(extraction_filename, 'r')
|
||||
root_directory = fileh.filelist[0].filename.split('/')[0]
|
||||
if root_directory != basename(directory_name):
|
||||
shprint(sh.mv, root_directory, directory_name)
|
||||
elif (extraction_filename.endswith('.tar.gz') or
|
||||
extraction_filename.endswith('.tgz') or
|
||||
extraction_filename.endswith('.tar.bz2') or
|
||||
extraction_filename.endswith('.tbz2') or
|
||||
extraction_filename.endswith('.tar.xz') or
|
||||
extraction_filename.endswith('.txz')):
|
||||
elif extraction_filename.endswith(
|
||||
('.tar.gz', '.tgz', '.tar.bz2', '.tbz2', '.tar.xz', '.txz')):
|
||||
sh.tar('xf', extraction_filename)
|
||||
root_directory = shprint(
|
||||
sh.tar, 'tf', extraction_filename).stdout.decode(
|
||||
root_directory = sh.tar('tf', extraction_filename).stdout.decode(
|
||||
'utf-8').split('\n')[0].split('/')[0]
|
||||
if root_directory != directory_name:
|
||||
shprint(sh.mv, root_directory, directory_name)
|
||||
|
@ -450,12 +426,12 @@ class Recipe(with_metaclass(RecipeMeta)):
|
|||
else:
|
||||
info('{} is already unpacked, skipping'.format(self.name))
|
||||
|
||||
def get_recipe_env(self, arch=None, with_flags_in_cc=True):
|
||||
def get_recipe_env(self, arch=None, with_flags_in_cc=True, clang=False):
|
||||
"""Return the env specialized for the recipe
|
||||
"""
|
||||
if arch is None:
|
||||
arch = self.filtered_archs[0]
|
||||
return arch.get_env(with_flags_in_cc=with_flags_in_cc)
|
||||
return arch.get_env(with_flags_in_cc=with_flags_in_cc, clang=clang)
|
||||
|
||||
def prebuild_arch(self, arch):
|
||||
'''Run any pre-build tasks for the Recipe. By default, this checks if
|
||||
|
@ -471,8 +447,11 @@ class Recipe(with_metaclass(RecipeMeta)):
|
|||
build_dir = self.get_build_dir(arch.arch)
|
||||
return exists(join(build_dir, '.patched'))
|
||||
|
||||
def apply_patches(self, arch):
|
||||
'''Apply any patches for the Recipe.'''
|
||||
def apply_patches(self, arch, build_dir=None):
|
||||
'''Apply any patches for the Recipe.
|
||||
|
||||
.. versionchanged:: 0.6.0
|
||||
Add ability to apply patches from any dir via kwarg `build_dir`'''
|
||||
if self.patches:
|
||||
info_main('Applying patches for {}[{}]'
|
||||
.format(self.name, arch.arch))
|
||||
|
@ -481,6 +460,7 @@ class Recipe(with_metaclass(RecipeMeta)):
|
|||
info_main('{} already patched, skipping'.format(self.name))
|
||||
return
|
||||
|
||||
build_dir = build_dir if build_dir else self.get_build_dir(arch.arch)
|
||||
for patch in self.patches:
|
||||
if isinstance(patch, (tuple, list)):
|
||||
patch, patch_check = patch
|
||||
|
@ -489,9 +469,9 @@ class Recipe(with_metaclass(RecipeMeta)):
|
|||
|
||||
self.apply_patch(
|
||||
patch.format(version=self.version, arch=arch.arch),
|
||||
arch.arch)
|
||||
arch.arch, build_dir=build_dir)
|
||||
|
||||
shprint(sh.touch, join(self.get_build_dir(arch.arch), '.patched'))
|
||||
shprint(sh.touch, join(build_dir, '.patched'))
|
||||
|
||||
def should_build(self, arch):
|
||||
'''Should perform any necessary test and return True only if it needs
|
||||
|
@ -547,8 +527,8 @@ class Recipe(with_metaclass(RecipeMeta)):
|
|||
if exists(base_dir):
|
||||
dirs.append(base_dir)
|
||||
if not dirs:
|
||||
warning(('Attempted to clean build for {} but found no existing '
|
||||
'build dirs').format(self.name))
|
||||
warning('Attempted to clean build for {} but found no existing '
|
||||
'build dirs'.format(self.name))
|
||||
|
||||
for directory in dirs:
|
||||
if exists(directory):
|
||||
|
@ -595,6 +575,7 @@ class Recipe(with_metaclass(RecipeMeta)):
|
|||
@classmethod
|
||||
def get_recipe(cls, name, ctx):
|
||||
'''Returns the Recipe with the given name, if it exists.'''
|
||||
name = name.lower()
|
||||
if not hasattr(cls, "recipes"):
|
||||
cls.recipes = {}
|
||||
if name in cls.recipes:
|
||||
|
@ -602,20 +583,28 @@ class Recipe(with_metaclass(RecipeMeta)):
|
|||
|
||||
recipe_file = None
|
||||
for recipes_dir in cls.recipe_dirs(ctx):
|
||||
recipe_file = join(recipes_dir, name, '__init__.py')
|
||||
if exists(recipe_file):
|
||||
if not exists(recipes_dir):
|
||||
continue
|
||||
# Find matching folder (may differ in case):
|
||||
for subfolder in listdir(recipes_dir):
|
||||
if subfolder.lower() == name:
|
||||
recipe_file = join(recipes_dir, subfolder, '__init__.py')
|
||||
if exists(recipe_file):
|
||||
name = subfolder # adapt to actual spelling
|
||||
break
|
||||
recipe_file = None
|
||||
if recipe_file is not None:
|
||||
break
|
||||
recipe_file = None
|
||||
|
||||
if not recipe_file:
|
||||
raise IOError('Recipe does not exist: {}'.format(name))
|
||||
raise ValueError('Recipe does not exist: {}'.format(name))
|
||||
|
||||
mod = import_recipe('pythonforandroid.recipes.{}'.format(name), recipe_file)
|
||||
if len(logger.handlers) > 1:
|
||||
logger.removeHandler(logger.handlers[1])
|
||||
recipe = mod.recipe
|
||||
recipe.ctx = ctx
|
||||
cls.recipes[name] = recipe
|
||||
cls.recipes[name.lower()] = recipe
|
||||
return recipe
|
||||
|
||||
|
||||
|
@ -626,8 +615,8 @@ class IncludedFilesBehaviour(object):
|
|||
|
||||
def prepare_build_dir(self, arch):
|
||||
if self.src_filename is None:
|
||||
print('IncludedFilesBehaviour failed: no src_filename specified')
|
||||
exit(1)
|
||||
raise BuildInterruptingException(
|
||||
'IncludedFilesBehaviour failed: no src_filename specified')
|
||||
shprint(sh.rm, '-rf', self.get_build_dir(arch))
|
||||
shprint(sh.cp, '-a', join(self.get_recipe_dir(), self.src_filename),
|
||||
self.get_build_dir(arch))
|
||||
|
@ -640,6 +629,9 @@ class BootstrapNDKRecipe(Recipe):
|
|||
|
||||
To build an NDK project which is not part of the bootstrap, see
|
||||
:class:`~pythonforandroid.recipe.NDKRecipe`.
|
||||
|
||||
To link with python, call the method :meth:`get_recipe_env`
|
||||
with the kwarg *with_python=True*.
|
||||
'''
|
||||
|
||||
dir_name = None # The name of the recipe build folder in the jni dir
|
||||
|
@ -656,6 +648,20 @@ class BootstrapNDKRecipe(Recipe):
|
|||
def get_jni_dir(self):
|
||||
return join(self.ctx.bootstrap.build_dir, 'jni')
|
||||
|
||||
def get_recipe_env(self, arch=None, with_flags_in_cc=True, with_python=False):
|
||||
env = super(BootstrapNDKRecipe, self).get_recipe_env(
|
||||
arch, with_flags_in_cc)
|
||||
if not with_python:
|
||||
return env
|
||||
|
||||
env['PYTHON_INCLUDE_ROOT'] = self.ctx.python_recipe.include_root(arch.arch)
|
||||
env['PYTHON_LINK_ROOT'] = self.ctx.python_recipe.link_root(arch.arch)
|
||||
env['EXTRA_LDLIBS'] = ' -lpython{}'.format(
|
||||
self.ctx.python_recipe.major_minor_version_string)
|
||||
if 'python3' in self.ctx.python_recipe.name:
|
||||
env['EXTRA_LDLIBS'] += 'm'
|
||||
return env
|
||||
|
||||
|
||||
class NDKRecipe(Recipe):
|
||||
'''A recipe class for any NDK project not included in the bootstrap.'''
|
||||
|
@ -682,7 +688,13 @@ class NDKRecipe(Recipe):
|
|||
|
||||
env = self.get_recipe_env(arch)
|
||||
with current_directory(self.get_build_dir(arch.arch)):
|
||||
shprint(sh.ndk_build, 'V=1', 'APP_ABI=' + arch.arch, *extra_args, _env=env)
|
||||
shprint(
|
||||
sh.ndk_build,
|
||||
'V=1',
|
||||
'APP_PLATFORM=android-' + str(self.ctx.ndk_api),
|
||||
'APP_ABI=' + arch.arch,
|
||||
*extra_args, _env=env
|
||||
)
|
||||
|
||||
|
||||
class PythonRecipe(Recipe):
|
||||
|
@ -711,6 +723,13 @@ class PythonRecipe(Recipe):
|
|||
setup_extra_args = []
|
||||
'''List of extra arugments to pass to setup.py'''
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(PythonRecipe, self).__init__(*args, **kwargs)
|
||||
depends = self.depends
|
||||
depends.append(('python2', 'python2legacy', 'python3', 'python3crystax'))
|
||||
depends = list(set(depends))
|
||||
self.depends = depends
|
||||
|
||||
def clean_build(self, arch=None):
|
||||
super(PythonRecipe, self).clean_build(arch=arch)
|
||||
name = self.folder_name
|
||||
|
@ -726,14 +745,12 @@ class PythonRecipe(Recipe):
|
|||
|
||||
@property
|
||||
def real_hostpython_location(self):
|
||||
if 'hostpython2' in self.ctx.recipe_build_order:
|
||||
return join(
|
||||
Recipe.get_recipe('hostpython2', self.ctx).get_build_dir(),
|
||||
'hostpython')
|
||||
elif 'hostpython3crystax' in self.ctx.recipe_build_order:
|
||||
return join(
|
||||
Recipe.get_recipe('hostpython3crystax', self.ctx).get_build_dir(),
|
||||
'hostpython')
|
||||
host_name = 'host{}'.format(self.ctx.python_recipe.name)
|
||||
host_build = Recipe.get_recipe(host_name, self.ctx).get_build_dir()
|
||||
if host_name in ['hostpython2', 'hostpython3']:
|
||||
return join(host_build, 'native-build', 'python')
|
||||
elif host_name in ['hostpython3crystax', 'hostpython2legacy']:
|
||||
return join(host_build, 'hostpython')
|
||||
else:
|
||||
python_recipe = self.ctx.python_recipe
|
||||
return 'python{}'.format(python_recipe.version)
|
||||
|
@ -757,17 +774,28 @@ class PythonRecipe(Recipe):
|
|||
|
||||
env['PYTHONNOUSERSITE'] = '1'
|
||||
|
||||
# Set the LANG, this isn't usually important but is a better default
|
||||
# as it occasionally matters how Python e.g. reads files
|
||||
env['LANG'] = "en_GB.UTF-8"
|
||||
|
||||
if not self.call_hostpython_via_targetpython:
|
||||
# sets python headers/linkages...depending on python's recipe
|
||||
python_name = self.ctx.python_recipe.name
|
||||
python_version = self.ctx.python_recipe.version
|
||||
python_short_version = '.'.join(python_version.split('.')[:2])
|
||||
if 'python2' in self.ctx.recipe_build_order:
|
||||
env['PYTHON_ROOT'] = self.ctx.get_python_install_dir()
|
||||
env['CFLAGS'] += ' -I' + env[
|
||||
'PYTHON_ROOT'] + '/include/python2.7'
|
||||
env['LDFLAGS'] += ' -L' + env['PYTHON_ROOT'] + '/lib' + \
|
||||
' -lpython2.7'
|
||||
elif self.ctx.python_recipe.from_crystax:
|
||||
if not self.ctx.python_recipe.from_crystax:
|
||||
env['CFLAGS'] += ' -I{}'.format(
|
||||
self.ctx.python_recipe.include_root(arch.arch))
|
||||
env['LDFLAGS'] += ' -L{} -lpython{}'.format(
|
||||
self.ctx.python_recipe.link_root(arch.arch),
|
||||
self.ctx.python_recipe.major_minor_version_string)
|
||||
if python_name == 'python3':
|
||||
env['LDFLAGS'] += 'm'
|
||||
elif python_name == 'python2legacy':
|
||||
env['PYTHON_ROOT'] = join(
|
||||
self.ctx.python_recipe.get_build_dir(
|
||||
arch.arch), 'python-install')
|
||||
else:
|
||||
ndk_dir_python = join(self.ctx.ndk_dir, 'sources',
|
||||
'python', python_version)
|
||||
env['CFLAGS'] += ' -I{} '.format(
|
||||
|
@ -776,26 +804,19 @@ class PythonRecipe(Recipe):
|
|||
env['LDFLAGS'] += ' -L{}'.format(
|
||||
join(ndk_dir_python, 'libs', arch.arch))
|
||||
env['LDFLAGS'] += ' -lpython{}m'.format(python_short_version)
|
||||
elif 'python3' in self.ctx.recipe_build_order:
|
||||
# This headers are unused cause python3 recipe was removed
|
||||
# TODO: should be reviewed when python3 recipe added
|
||||
env['PYTHON_ROOT'] = self.ctx.get_python_install_dir()
|
||||
env['CFLAGS'] += ' -I' + env[
|
||||
'PYTHON_ROOT'] + '/include/python{}m'.format(
|
||||
python_short_version)
|
||||
env['LDFLAGS'] += ' -L' + env['PYTHON_ROOT'] + '/lib' + \
|
||||
' -lpython{}m'.format(
|
||||
python_short_version)
|
||||
|
||||
hppath = []
|
||||
hppath.append(join(dirname(self.hostpython_location), 'Lib'))
|
||||
hppath.append(join(hppath[0], 'site-packages'))
|
||||
builddir = join(dirname(self.hostpython_location), 'build')
|
||||
hppath += [join(builddir, d) for d in listdir(builddir)
|
||||
if isdir(join(builddir, d))]
|
||||
if 'PYTHONPATH' in env:
|
||||
env['PYTHONPATH'] = ':'.join(hppath + [env['PYTHONPATH']])
|
||||
else:
|
||||
env['PYTHONPATH'] = ':'.join(hppath)
|
||||
if exists(builddir):
|
||||
hppath += [join(builddir, d) for d in listdir(builddir)
|
||||
if isdir(join(builddir, d))]
|
||||
if len(hppath) > 0:
|
||||
if 'PYTHONPATH' in env:
|
||||
env['PYTHONPATH'] = ':'.join(hppath + [env['PYTHONPATH']])
|
||||
else:
|
||||
env['PYTHONPATH'] = ':'.join(hppath)
|
||||
return env
|
||||
|
||||
def should_build(self, arch):
|
||||
|
@ -826,7 +847,7 @@ class PythonRecipe(Recipe):
|
|||
with current_directory(self.get_build_dir(arch.arch)):
|
||||
hostpython = sh.Command(self.hostpython_location)
|
||||
|
||||
if self.ctx.python_recipe.from_crystax:
|
||||
if self.ctx.python_recipe.name != 'python2legacy':
|
||||
hpenv = env.copy()
|
||||
shprint(hostpython, 'setup.py', 'install', '-O2',
|
||||
'--root={}'.format(self.ctx.get_python_install_dir()),
|
||||
|
@ -835,13 +856,11 @@ class PythonRecipe(Recipe):
|
|||
elif self.call_hostpython_via_targetpython:
|
||||
shprint(hostpython, 'setup.py', 'install', '-O2', _env=env,
|
||||
*self.setup_extra_args)
|
||||
else:
|
||||
hppath = join(dirname(self.hostpython_location), 'Lib',
|
||||
'site-packages')
|
||||
else: # python2legacy
|
||||
hppath = join(dirname(self.hostpython_location), 'Lib', 'site-packages')
|
||||
hpenv = env.copy()
|
||||
if 'PYTHONPATH' in hpenv:
|
||||
hpenv['PYTHONPATH'] = ':'.join([hppath] +
|
||||
hpenv['PYTHONPATH'].split(':'))
|
||||
hpenv['PYTHONPATH'] = ':'.join([hppath] + hpenv['PYTHONPATH'].split(':'))
|
||||
else:
|
||||
hpenv['PYTHONPATH'] = hppath
|
||||
shprint(hostpython, 'setup.py', 'install', '-O2',
|
||||
|
@ -920,12 +939,14 @@ class CppCompiledComponentsPythonRecipe(CompiledComponentsPythonRecipe):
|
|||
arch_noeabi=arch.arch.replace('eabi', '')
|
||||
)
|
||||
env['LDSHARED'] = env['CC'] + ' -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions'
|
||||
env['CFLAGS'] += " -I{ctx.ndk_dir}/platforms/android-{ctx.android_api}/arch-{arch_noeabi}/usr/include" \
|
||||
" -I{ctx.ndk_dir}/sources/cxx-stl/gnu-libstdc++/{ctx.toolchain_version}/include" \
|
||||
" -I{ctx.ndk_dir}/sources/cxx-stl/gnu-libstdc++/{ctx.toolchain_version}/libs/{arch.arch}/include".format(**keys)
|
||||
env['CFLAGS'] += (
|
||||
" -I{ctx.ndk_dir}/platforms/android-{ctx.android_api}/arch-{arch_noeabi}/usr/include" +
|
||||
" -I{ctx.ndk_dir}/sources/cxx-stl/gnu-libstdc++/{ctx.toolchain_version}/include" +
|
||||
" -I{ctx.ndk_dir}/sources/cxx-stl/gnu-libstdc++/{ctx.toolchain_version}/libs/{arch.arch}/include").format(**keys)
|
||||
env['CXXFLAGS'] = env['CFLAGS'] + ' -frtti -fexceptions'
|
||||
env['LDFLAGS'] += " -L{ctx.ndk_dir}/sources/cxx-stl/gnu-libstdc++/{ctx.toolchain_version}/libs/{arch.arch}" \
|
||||
" -lgnustl_shared".format(**keys)
|
||||
env['LDFLAGS'] += (
|
||||
" -L{ctx.ndk_dir}/sources/cxx-stl/gnu-libstdc++/{ctx.toolchain_version}/libs/{arch.arch}" +
|
||||
" -lgnustl_shared").format(**keys)
|
||||
|
||||
return env
|
||||
|
||||
|
@ -949,7 +970,7 @@ class CythonRecipe(PythonRecipe):
|
|||
def __init__(self, *args, **kwargs):
|
||||
super(CythonRecipe, self).__init__(*args, **kwargs)
|
||||
depends = self.depends
|
||||
depends.append(('python2', 'python3crystax'))
|
||||
depends.append(('python2', 'python2legacy', 'python3', 'python3crystax'))
|
||||
depends = list(set(depends))
|
||||
self.depends = depends
|
||||
|
||||
|
@ -966,20 +987,10 @@ class CythonRecipe(PythonRecipe):
|
|||
|
||||
env = self.get_recipe_env(arch)
|
||||
|
||||
if self.ctx.python_recipe.from_crystax:
|
||||
command = sh.Command('python{}'.format(self.ctx.python_recipe.version))
|
||||
site_packages_dirs = command(
|
||||
'-c', 'import site; print("\\n".join(site.getsitepackages()))')
|
||||
site_packages_dirs = site_packages_dirs.stdout.decode('utf-8').split('\n')
|
||||
if 'PYTHONPATH' in env:
|
||||
env['PYTHONPATH'] = env['PYTHONPATH'] + ':{}'.format(':'.join(site_packages_dirs))
|
||||
else:
|
||||
env['PYTHONPATH'] = ':'.join(site_packages_dirs)
|
||||
|
||||
with current_directory(self.get_build_dir(arch.arch)):
|
||||
hostpython = sh.Command(self.ctx.hostpython)
|
||||
shprint(hostpython, '-c', 'import sys; print(sys.path)', _env=env)
|
||||
print('cwd is', realpath(curdir))
|
||||
debug('cwd is {}'.format(realpath(curdir)))
|
||||
info('Trying first build of {} to get cython files: this is '
|
||||
'expected to fail'.format(self.name))
|
||||
|
||||
|
@ -1000,14 +1011,19 @@ class CythonRecipe(PythonRecipe):
|
|||
info('First build appeared to complete correctly, skipping manual'
|
||||
'cythonising.')
|
||||
|
||||
if 'python2' in self.ctx.recipe_build_order:
|
||||
self.strip_object_files(arch, env)
|
||||
|
||||
def strip_object_files(self, arch, env, build_dir=None):
|
||||
if build_dir is None:
|
||||
build_dir = self.get_build_dir(arch.arch)
|
||||
with current_directory(build_dir):
|
||||
info('Stripping object files')
|
||||
if self.ctx.python_recipe.name == 'python2legacy':
|
||||
info('Stripping object files')
|
||||
build_lib = glob.glob('./build/lib*')
|
||||
shprint(sh.find, build_lib[0], '-name', '*.o', '-exec',
|
||||
env['STRIP'], '{}', ';', _env=env)
|
||||
|
||||
if 'python3crystax' in self.ctx.recipe_build_order:
|
||||
info('Stripping object files')
|
||||
else:
|
||||
shprint(sh.find, '.', '-iname', '*.so', '-exec',
|
||||
'/usr/bin/echo', '{}', ';', _env=env)
|
||||
shprint(sh.find, '.', '-iname', '*.so', '-exec',
|
||||
|
@ -1050,11 +1066,11 @@ class CythonRecipe(PythonRecipe):
|
|||
if self.ctx.python_recipe.from_crystax:
|
||||
env['LDFLAGS'] = (env['LDFLAGS'] +
|
||||
' -L{}'.format(join(self.ctx.bootstrap.build_dir, 'libs', arch.arch)))
|
||||
# ' -L/home/asandy/.local/share/python-for-android/build/bootstrap_builds/sdl2/libs/armeabi '
|
||||
if self.ctx.python_recipe.from_crystax:
|
||||
env['LDSHARED'] = env['CC'] + ' -shared'
|
||||
else:
|
||||
|
||||
if self.ctx.python_recipe.name == 'python2legacy':
|
||||
env['LDSHARED'] = join(self.ctx.root_dir, 'tools', 'liblink.sh')
|
||||
else:
|
||||
env['LDSHARED'] = env['CC'] + ' -shared'
|
||||
# shprint(sh.whereis, env['LDSHARED'], _env=env)
|
||||
env['LIBLINK'] = 'NOTNONE'
|
||||
env['NDKPLATFORM'] = self.ctx.ndk_platform
|
||||
|
@ -1068,6 +1084,24 @@ class CythonRecipe(PythonRecipe):
|
|||
env['LIBLINK_PATH'] = liblink_path
|
||||
ensure_dir(liblink_path)
|
||||
|
||||
# Add crystax-specific site packages:
|
||||
if self.ctx.python_recipe.from_crystax:
|
||||
command = sh.Command('python{}'.format(self.ctx.python_recipe.version))
|
||||
site_packages_dirs = command(
|
||||
'-c', 'import site; print("\\n".join(site.getsitepackages()))')
|
||||
site_packages_dirs = site_packages_dirs.stdout.decode('utf-8').split('\n')
|
||||
if 'PYTHONPATH' in env:
|
||||
env['PYTHONPATH'] = env['PYTHONPATH'] +\
|
||||
':{}'.format(':'.join(site_packages_dirs))
|
||||
else:
|
||||
env['PYTHONPATH'] = ':'.join(site_packages_dirs)
|
||||
while env['PYTHONPATH'].find("::") > 0:
|
||||
env['PYTHONPATH'] = env['PYTHONPATH'].replace("::", ":")
|
||||
if env['PYTHONPATH'].endswith(":"):
|
||||
env['PYTHONPATH'] = env['PYTHONPATH'][:-1]
|
||||
if env['PYTHONPATH'].startswith(":"):
|
||||
env['PYTHONPATH'] = env['PYTHONPATH'][1:]
|
||||
|
||||
return env
|
||||
|
||||
|
||||
|
@ -1086,19 +1120,44 @@ class TargetPythonRecipe(Recipe):
|
|||
def prebuild_arch(self, arch):
|
||||
super(TargetPythonRecipe, self).prebuild_arch(arch)
|
||||
if self.from_crystax and self.ctx.ndk != 'crystax':
|
||||
error('The {} recipe can only be built when '
|
||||
'using the CrystaX NDK. Exiting.'.format(self.name))
|
||||
exit(1)
|
||||
raise BuildInterruptingException(
|
||||
'The {} recipe can only be built when '
|
||||
'using the CrystaX NDK. Exiting.'.format(self.name))
|
||||
self.ctx.python_recipe = self
|
||||
|
||||
# @property
|
||||
# def ctx(self):
|
||||
# return self._ctx
|
||||
def include_root(self, arch):
|
||||
'''The root directory from which to include headers.'''
|
||||
raise NotImplementedError('Not implemented in TargetPythonRecipe')
|
||||
|
||||
# @ctx.setter
|
||||
# def ctx(self, ctx):
|
||||
# self._ctx = ctx
|
||||
# ctx.python_recipe = self
|
||||
def link_root(self):
|
||||
raise NotImplementedError('Not implemented in TargetPythonRecipe')
|
||||
|
||||
@property
|
||||
def major_minor_version_string(self):
|
||||
from distutils.version import LooseVersion
|
||||
return '.'.join([str(v) for v in LooseVersion(self.version).version[:2]])
|
||||
|
||||
def create_python_bundle(self, dirn, arch):
|
||||
"""
|
||||
Create a packaged python bundle in the target directory, by
|
||||
copying all the modules and standard library to the right
|
||||
place.
|
||||
"""
|
||||
raise NotImplementedError('{} does not implement create_python_bundle'.format(self))
|
||||
|
||||
def reduce_object_file_names(self, dirn):
|
||||
"""Recursively renames all files named XXX.cpython-...-linux-gnu.so"
|
||||
to "XXX.so", i.e. removing the erroneous architecture name
|
||||
coming from the local system.
|
||||
"""
|
||||
py_so_files = shprint(sh.find, dirn, '-iname', '*.so')
|
||||
filens = py_so_files.stdout.decode('utf-8').split('\n')[:-1]
|
||||
for filen in filens:
|
||||
file_dirname, file_basename = split(filen)
|
||||
parts = file_basename.split('.')
|
||||
if len(parts) <= 2:
|
||||
continue
|
||||
shprint(sh.mv, filen, join(file_dirname, parts[0] + '.so'))
|
||||
|
||||
|
||||
def md5sum(filen):
|
||||
|
|
59
p4a/pythonforandroid/recipes/Pillow/__init__.py
Normal file
59
p4a/pythonforandroid/recipes/Pillow/__init__.py
Normal file
|
@ -0,0 +1,59 @@
|
|||
from pythonforandroid.recipe import CompiledComponentsPythonRecipe
|
||||
from os.path import join
|
||||
|
||||
|
||||
class PillowRecipe(CompiledComponentsPythonRecipe):
|
||||
|
||||
version = '5.2.0'
|
||||
url = 'https://github.com/python-pillow/Pillow/archive/{version}.tar.gz'
|
||||
site_packages_name = 'Pillow'
|
||||
depends = ['png', 'jpeg', 'freetype', 'setuptools']
|
||||
patches = [join('patches', 'fix-docstring.patch'),
|
||||
join('patches', 'fix-setup.patch')]
|
||||
|
||||
call_hostpython_via_targetpython = False
|
||||
|
||||
def get_recipe_env(self, arch=None, with_flags_in_cc=True):
|
||||
env = super(PillowRecipe, self).get_recipe_env(arch, with_flags_in_cc)
|
||||
|
||||
env['ANDROID_ROOT'] = join(self.ctx.ndk_platform, 'usr')
|
||||
ndk_lib_dir = join(self.ctx.ndk_platform, 'usr', 'lib')
|
||||
ndk_include_dir = join(self.ctx.ndk_dir, 'sysroot', 'usr', 'include')
|
||||
|
||||
png = self.get_recipe('png', self.ctx)
|
||||
png_lib_dir = png.get_lib_dir(arch)
|
||||
png_jni_dir = png.get_jni_dir(arch)
|
||||
|
||||
jpeg = self.get_recipe('jpeg', self.ctx)
|
||||
jpeg_inc_dir = jpeg_lib_dir = jpeg.get_build_dir(arch.arch)
|
||||
|
||||
freetype = self.get_recipe('freetype', self.ctx)
|
||||
free_lib_dir = join(freetype.get_build_dir(arch.arch), 'objs', '.libs')
|
||||
free_inc_dir = join(freetype.get_build_dir(arch.arch), 'include')
|
||||
|
||||
# harfbuzz is a direct dependency of freetype and we need the proper
|
||||
# flags to successfully build the Pillow recipe, so we add them here.
|
||||
harfbuzz = self.get_recipe('harfbuzz', self.ctx)
|
||||
harf_lib_dir = join(harfbuzz.get_build_dir(arch.arch), 'src', '.libs')
|
||||
harf_inc_dir = harfbuzz.get_build_dir(arch.arch)
|
||||
|
||||
env['JPEG_ROOT'] = '{}|{}'.format(jpeg_lib_dir, jpeg_inc_dir)
|
||||
env['FREETYPE_ROOT'] = '{}|{}'.format(free_lib_dir, free_inc_dir)
|
||||
env['ZLIB_ROOT'] = '{}|{}'.format(ndk_lib_dir, ndk_include_dir)
|
||||
|
||||
cflags = ' -I{}'.format(png_jni_dir)
|
||||
cflags += ' -I{} -I{}'.format(harf_inc_dir, join(harf_inc_dir, 'src'))
|
||||
cflags += ' -I{}'.format(free_inc_dir)
|
||||
cflags += ' -I{}'.format(jpeg_inc_dir)
|
||||
cflags += ' -I{}'.format(ndk_include_dir)
|
||||
|
||||
env['LIBS'] = ' -lpng -lfreetype -lharfbuzz -ljpeg -lturbojpeg'
|
||||
|
||||
env['LDFLAGS'] += ' -L{} -L{} -L{} -L{}'.format(
|
||||
png_lib_dir, harf_lib_dir, jpeg_lib_dir, ndk_lib_dir)
|
||||
if cflags not in env['CFLAGS']:
|
||||
env['CFLAGS'] += cflags
|
||||
return env
|
||||
|
||||
|
||||
recipe = PillowRecipe()
|
|
@ -0,0 +1,13 @@
|
|||
diff --git a/src/PIL/__init__.py b/src/PIL/__init__.py
|
||||
index a07280e..6b9fe99 100644
|
||||
--- a/src/PIL/__init__.py
|
||||
+++ b/src/PIL/__init__.py
|
||||
@@ -24,7 +24,7 @@ PILLOW_VERSION = __version__ = _version.__version__
|
||||
|
||||
del _version
|
||||
|
||||
-__doc__ = __doc__.format(__version__) # include version in docstring
|
||||
+__doc__ = ''
|
||||
|
||||
|
||||
_plugins = ['BlpImagePlugin',
|
148
p4a/pythonforandroid/recipes/Pillow/patches/fix-setup.patch
Normal file
148
p4a/pythonforandroid/recipes/Pillow/patches/fix-setup.patch
Normal file
|
@ -0,0 +1,148 @@
|
|||
diff --git a/setup.py b/setup.py
|
||||
index 761d552..4ddc598 100755
|
||||
--- a/setup.py
|
||||
+++ b/setup.py
|
||||
@@ -136,12 +136,12 @@ except (ImportError, OSError):
|
||||
|
||||
NAME = 'Pillow'
|
||||
PILLOW_VERSION = get_version()
|
||||
-JPEG_ROOT = None
|
||||
+JPEG_ROOT = tuple(os.environ['JPEG_ROOT'].split('|')) if 'JPEG_ROOT' in os.environ else None
|
||||
JPEG2K_ROOT = None
|
||||
-ZLIB_ROOT = None
|
||||
+ZLIB_ROOT = tuple(os.environ['ZLIB_ROOT'].split('|')) if 'ZLIB_ROOT' in os.environ else None
|
||||
IMAGEQUANT_ROOT = None
|
||||
TIFF_ROOT = None
|
||||
-FREETYPE_ROOT = None
|
||||
+FREETYPE_ROOT = tuple(os.environ['FREETYPE_ROOT'].split('|')) if 'FREETYPE_ROOT' in os.environ else None
|
||||
LCMS_ROOT = None
|
||||
|
||||
|
||||
@@ -194,7 +194,7 @@ class pil_build_ext(build_ext):
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
- self.disable_platform_guessing = None
|
||||
+ self.disable_platform_guessing = True
|
||||
build_ext.initialize_options(self)
|
||||
for x in self.feature:
|
||||
setattr(self, 'disable_%s' % x, None)
|
||||
@@ -466,61 +466,6 @@ class pil_build_ext(build_ext):
|
||||
feature.jpeg = "libjpeg" # alternative name
|
||||
|
||||
feature.openjpeg_version = None
|
||||
- if feature.want('jpeg2000'):
|
||||
- _dbg('Looking for jpeg2000')
|
||||
- best_version = None
|
||||
- best_path = None
|
||||
-
|
||||
- # Find the best version
|
||||
- for directory in self.compiler.include_dirs:
|
||||
- _dbg('Checking for openjpeg-#.# in %s', directory)
|
||||
- try:
|
||||
- listdir = os.listdir(directory)
|
||||
- except Exception:
|
||||
- # WindowsError, FileNotFoundError
|
||||
- continue
|
||||
- for name in listdir:
|
||||
- if name.startswith('openjpeg-') and \
|
||||
- os.path.isfile(os.path.join(directory, name,
|
||||
- 'openjpeg.h')):
|
||||
- _dbg('Found openjpeg.h in %s/%s', (directory, name))
|
||||
- version = tuple(int(x) for x in name[9:].split('.'))
|
||||
- if best_version is None or version > best_version:
|
||||
- best_version = version
|
||||
- best_path = os.path.join(directory, name)
|
||||
- _dbg('Best openjpeg version %s so far in %s',
|
||||
- (best_version, best_path))
|
||||
-
|
||||
- if best_version and _find_library_file(self, 'openjp2'):
|
||||
- # Add the directory to the include path so we can include
|
||||
- # <openjpeg.h> rather than having to cope with the versioned
|
||||
- # include path
|
||||
- # FIXME (melvyn-sopacua):
|
||||
- # At this point it's possible that best_path is already in
|
||||
- # self.compiler.include_dirs. Should investigate how that is
|
||||
- # possible.
|
||||
- _add_directory(self.compiler.include_dirs, best_path, 0)
|
||||
- feature.jpeg2000 = 'openjp2'
|
||||
- feature.openjpeg_version = '.'.join(str(x) for x in best_version)
|
||||
-
|
||||
- if feature.want('imagequant'):
|
||||
- _dbg('Looking for imagequant')
|
||||
- if _find_include_file(self, 'libimagequant.h'):
|
||||
- if _find_library_file(self, "imagequant"):
|
||||
- feature.imagequant = "imagequant"
|
||||
- elif _find_library_file(self, "libimagequant"):
|
||||
- feature.imagequant = "libimagequant"
|
||||
-
|
||||
- if feature.want('tiff'):
|
||||
- _dbg('Looking for tiff')
|
||||
- if _find_include_file(self, 'tiff.h'):
|
||||
- if _find_library_file(self, "tiff"):
|
||||
- feature.tiff = "tiff"
|
||||
- if sys.platform == "win32" and _find_library_file(self, "libtiff"):
|
||||
- feature.tiff = "libtiff"
|
||||
- if (sys.platform == "darwin" and
|
||||
- _find_library_file(self, "libtiff")):
|
||||
- feature.tiff = "libtiff"
|
||||
|
||||
if feature.want('freetype'):
|
||||
_dbg('Looking for freetype')
|
||||
@@ -546,36 +491,6 @@ class pil_build_ext(build_ext):
|
||||
if subdir:
|
||||
_add_directory(self.compiler.include_dirs, subdir, 0)
|
||||
|
||||
- if feature.want('lcms'):
|
||||
- _dbg('Looking for lcms')
|
||||
- if _find_include_file(self, "lcms2.h"):
|
||||
- if _find_library_file(self, "lcms2"):
|
||||
- feature.lcms = "lcms2"
|
||||
- elif _find_library_file(self, "lcms2_static"):
|
||||
- # alternate Windows name.
|
||||
- feature.lcms = "lcms2_static"
|
||||
-
|
||||
- if feature.want('webp'):
|
||||
- _dbg('Looking for webp')
|
||||
- if (_find_include_file(self, "webp/encode.h") and
|
||||
- _find_include_file(self, "webp/decode.h")):
|
||||
- # In Google's precompiled zip it is call "libwebp":
|
||||
- if _find_library_file(self, "webp"):
|
||||
- feature.webp = "webp"
|
||||
- elif _find_library_file(self, "libwebp"):
|
||||
- feature.webp = "libwebp"
|
||||
-
|
||||
- if feature.want('webpmux'):
|
||||
- _dbg('Looking for webpmux')
|
||||
- if (_find_include_file(self, "webp/mux.h") and
|
||||
- _find_include_file(self, "webp/demux.h")):
|
||||
- if (_find_library_file(self, "webpmux") and
|
||||
- _find_library_file(self, "webpdemux")):
|
||||
- feature.webpmux = "webpmux"
|
||||
- if (_find_library_file(self, "libwebpmux") and
|
||||
- _find_library_file(self, "libwebpdemux")):
|
||||
- feature.webpmux = "libwebpmux"
|
||||
-
|
||||
for f in feature:
|
||||
if not getattr(feature, f) and feature.require(f):
|
||||
if f in ('jpeg', 'zlib'):
|
||||
@@ -612,8 +527,6 @@ class pil_build_ext(build_ext):
|
||||
defs.append(("HAVE_LIBTIFF", None))
|
||||
if sys.platform == "win32":
|
||||
libs.extend(["kernel32", "user32", "gdi32"])
|
||||
- if struct.unpack("h", "\0\1".encode('ascii'))[0] == 1:
|
||||
- defs.append(("WORDS_BIGENDIAN", None))
|
||||
|
||||
if sys.platform == "win32" and not (PLATFORM_PYPY or PLATFORM_MINGW):
|
||||
defs.append(("PILLOW_VERSION", '"\\"%s\\""' % PILLOW_VERSION))
|
||||
@@ -658,10 +571,6 @@ class pil_build_ext(build_ext):
|
||||
define_macros=defs))
|
||||
|
||||
tk_libs = ['psapi'] if sys.platform == 'win32' else []
|
||||
- exts.append(Extension("PIL._imagingtk",
|
||||
- ["src/_imagingtk.c", "src/Tk/tkImaging.c"],
|
||||
- include_dirs=['src/Tk'],
|
||||
- libraries=tk_libs))
|
||||
|
||||
exts.append(Extension("PIL._imagingmath", ["src/_imagingmath.c"]))
|
||||
exts.append(Extension("PIL._imagingmorph", ["src/_imagingmorph.c"]))
|
|
@ -1,3 +1,4 @@
|
|||
from __future__ import unicode_literals
|
||||
from pythonforandroid.recipe import CythonRecipe, IncludedFilesBehaviour
|
||||
from pythonforandroid.util import current_directory
|
||||
from pythonforandroid.patching import will_build
|
||||
|
@ -13,7 +14,8 @@ class AndroidRecipe(IncludedFilesBehaviour, CythonRecipe):
|
|||
|
||||
src_filename = 'src'
|
||||
|
||||
depends = [('pygame', 'sdl2', 'genericndkbuild'), ('python2', 'python3crystax')]
|
||||
depends = [('pygame', 'sdl2', 'genericndkbuild'),
|
||||
'pyjnius']
|
||||
|
||||
config_env = {}
|
||||
|
||||
|
@ -24,26 +26,35 @@ class AndroidRecipe(IncludedFilesBehaviour, CythonRecipe):
|
|||
|
||||
def prebuild_arch(self, arch):
|
||||
super(AndroidRecipe, self).prebuild_arch(arch)
|
||||
ctx_bootstrap = self.ctx.bootstrap.name
|
||||
|
||||
# define macros for Cython, C, Python
|
||||
tpxi = 'DEF {} = {}\n'
|
||||
th = '#define {} {}\n'
|
||||
tpy = '{} = {}\n'
|
||||
|
||||
bootstrap = bootstrap_name = self.ctx.bootstrap.name
|
||||
is_sdl2 = bootstrap_name in ('sdl2', 'sdl2python3')
|
||||
# make sure bootstrap name is in unicode
|
||||
if isinstance(ctx_bootstrap, bytes):
|
||||
ctx_bootstrap = ctx_bootstrap.decode('utf-8')
|
||||
bootstrap = bootstrap_name = ctx_bootstrap
|
||||
|
||||
is_sdl2 = bootstrap_name in ('sdl2', 'sdl2python3', 'sdl2_gradle')
|
||||
is_pygame = bootstrap_name in ('pygame',)
|
||||
is_webview = bootstrap_name in ('webview',)
|
||||
|
||||
if is_sdl2 or is_webview:
|
||||
if is_sdl2:
|
||||
bootstrap = 'sdl2'
|
||||
java_ns = 'org.kivy.android'
|
||||
jni_ns = 'org/kivy/android'
|
||||
java_ns = u'org.kivy.android'
|
||||
jni_ns = u'org/kivy/android'
|
||||
elif is_pygame:
|
||||
java_ns = 'org.renpy.android'
|
||||
jni_ns = 'org/renpy/android'
|
||||
java_ns = u'org.renpy.android'
|
||||
jni_ns = u'org/renpy/android'
|
||||
else:
|
||||
logger.error('unsupported bootstrap for android recipe: {}'.format(bootstrap_name))
|
||||
logger.error((
|
||||
'unsupported bootstrap for android recipe: {}'
|
||||
''.format(bootstrap_name)
|
||||
))
|
||||
exit(1)
|
||||
|
||||
config = {
|
||||
|
@ -55,22 +66,30 @@ class AndroidRecipe(IncludedFilesBehaviour, CythonRecipe):
|
|||
'JNI_NAMESPACE': jni_ns,
|
||||
}
|
||||
|
||||
with current_directory(self.get_build_dir(arch.arch)):
|
||||
with open(join('android', 'config.pxi'), 'w') as fpxi:
|
||||
with open(join('android', 'config.h'), 'w') as fh:
|
||||
with open(join('android', 'config.py'), 'w') as fpy:
|
||||
for key, value in config.items():
|
||||
fpxi.write(tpxi.format(key, repr(value)))
|
||||
fpy.write(tpy.format(key, repr(value)))
|
||||
fh.write(th.format(key, value if isinstance(value, int)
|
||||
else '"{}"'.format(value)))
|
||||
self.config_env[key] = str(value)
|
||||
# create config files for Cython, C and Python
|
||||
with (
|
||||
current_directory(self.get_build_dir(arch.arch))), (
|
||||
open(join('android', 'config.pxi'), 'w')) as fpxi, (
|
||||
open(join('android', 'config.h'), 'w')) as fh, (
|
||||
open(join('android', 'config.py'), 'w')) as fpy:
|
||||
|
||||
if is_sdl2:
|
||||
fh.write('JNIEnv *SDL_AndroidGetJNIEnv(void);\n')
|
||||
fh.write('#define SDL_ANDROID_GetJNIEnv SDL_AndroidGetJNIEnv\n')
|
||||
elif is_pygame:
|
||||
fh.write('JNIEnv *SDL_ANDROID_GetJNIEnv(void);\n')
|
||||
for key, value in config.items():
|
||||
fpxi.write(tpxi.format(key, repr(value)))
|
||||
fpy.write(tpy.format(key, repr(value)))
|
||||
|
||||
fh.write(th.format(
|
||||
key,
|
||||
value if isinstance(value, int) else '"{}"'.format(value)
|
||||
))
|
||||
self.config_env[key] = str(value)
|
||||
|
||||
if is_sdl2:
|
||||
fh.write('JNIEnv *SDL_AndroidGetJNIEnv(void);\n')
|
||||
fh.write(
|
||||
'#define SDL_ANDROID_GetJNIEnv SDL_AndroidGetJNIEnv\n'
|
||||
)
|
||||
elif is_pygame:
|
||||
fh.write('JNIEnv *SDL_ANDROID_GetJNIEnv(void);\n')
|
||||
|
||||
|
||||
recipe = AndroidRecipe()
|
||||
|
|
|
@ -5,4 +5,4 @@ Android module
|
|||
'''
|
||||
|
||||
# legacy import
|
||||
from android._android import *
|
||||
from android._android import * # noqa: F401, F403
|
||||
|
|
|
@ -175,13 +175,13 @@ api_version = autoclass('android.os.Build$VERSION').SDK_INT
|
|||
version_codes = autoclass('android.os.Build$VERSION_CODES')
|
||||
|
||||
|
||||
python_act = autoclass(JAVA_NAMESPACE + '.PythonActivity')
|
||||
Rect = autoclass('android.graphics.Rect')
|
||||
python_act = autoclass(JAVA_NAMESPACE + u'.PythonActivity')
|
||||
Rect = autoclass(u'android.graphics.Rect')
|
||||
mActivity = python_act.mActivity
|
||||
if mActivity:
|
||||
# PyGame backend already has the listener so adding
|
||||
# one here leads to a crash/too much cpu usage.
|
||||
# SDL2 now does noe need the listener so there is
|
||||
# SDL2 now does not need the listener so there is
|
||||
# no point adding a processor intensive layout listenere here.
|
||||
height = 0
|
||||
def get_keyboard_height():
|
||||
|
@ -332,7 +332,7 @@ class AndroidBrowser(object):
|
|||
return open_url(url)
|
||||
|
||||
import webbrowser
|
||||
webbrowser.register('android', AndroidBrowser, None, -1)
|
||||
webbrowser.register('android', AndroidBrowser)
|
||||
|
||||
cdef extern void android_start_service(char *, char *, char *)
|
||||
def start_service(title=None, description=None, arg=None):
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
from jnius import PythonJavaClass, java_method, autoclass, cast
|
||||
from jnius import PythonJavaClass, autoclass, java_method
|
||||
from android.config import JAVA_NAMESPACE, JNI_NAMESPACE
|
||||
|
||||
_activity = autoclass(JAVA_NAMESPACE + '.PythonActivity').mActivity
|
||||
|
||||
_callbacks = {
|
||||
'on_new_intent': [],
|
||||
'on_activity_result': [] }
|
||||
'on_activity_result': [],
|
||||
}
|
||||
|
||||
|
||||
class NewIntentListener(PythonJavaClass):
|
||||
__javainterfaces__ = [JNI_NAMESPACE + '/PythonActivity$NewIntentListener']
|
||||
|
@ -46,6 +48,7 @@ def bind(**kwargs):
|
|||
_activity.registerActivityResultListener(listener)
|
||||
_callbacks[event].append(listener)
|
||||
|
||||
|
||||
def unbind(**kwargs):
|
||||
for event, callback in kwargs.items():
|
||||
if event not in _callbacks:
|
||||
|
@ -58,4 +61,3 @@ def unbind(**kwargs):
|
|||
_activity.unregisterNewIntentListener(listener)
|
||||
elif event == 'on_activity_result':
|
||||
_activity.unregisterActivityResultListener(listener)
|
||||
|
||||
|
|
|
@ -3,5 +3,3 @@ Android Billing API
|
|||
===================
|
||||
|
||||
'''
|
||||
|
||||
from android._android_billing import *
|
||||
|
|
|
@ -28,7 +28,7 @@ class BroadcastReceiver(object):
|
|||
|
||||
def _expand_partial_name(partial_name):
|
||||
if '.' in partial_name:
|
||||
return partial_name # Its actually a full dotted name
|
||||
return partial_name # Its actually a full dotted name
|
||||
else:
|
||||
name = 'ACTION_{}'.format(partial_name.upper())
|
||||
if not hasattr(Intent, name):
|
||||
|
@ -61,8 +61,8 @@ class BroadcastReceiver(object):
|
|||
Handler = autoclass('android.os.Handler')
|
||||
self.handlerthread.start()
|
||||
self.handler = Handler(self.handlerthread.getLooper())
|
||||
self.context.registerReceiver(self.receiver, self.receiver_filter, None,
|
||||
self.handler)
|
||||
self.context.registerReceiver(
|
||||
self.receiver, self.receiver_filter, None, self.handler)
|
||||
|
||||
def stop(self):
|
||||
self.context.unregisterReceiver(self.receiver)
|
||||
|
@ -76,4 +76,3 @@ class BroadcastReceiver(object):
|
|||
return PythonService.mService
|
||||
PythonActivity = autoclass(JAVA_NAMESPACE + '.PythonActivity')
|
||||
return PythonActivity.mActivity
|
||||
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
|
||||
from jnius import autoclass
|
||||
|
||||
|
||||
def hide_loading_screen():
|
||||
python_activity = autoclass('org.kivy.android.PythonActivity')
|
||||
python_activity.removeLoadingScreen()
|
|
@ -8,36 +8,45 @@ import os
|
|||
|
||||
condition = threading.Condition()
|
||||
|
||||
|
||||
def periodic():
|
||||
for i in range(0, num_channels):
|
||||
if i in channels:
|
||||
channels[i].periodic()
|
||||
|
||||
|
||||
num_channels = 8
|
||||
reserved_channels = 0
|
||||
|
||||
|
||||
def init(frequency=22050, size=-16, channels=2, buffer=4096):
|
||||
return None
|
||||
|
||||
|
||||
def pre_init(frequency=22050, size=-16, channels=2, buffersize=4096):
|
||||
return None
|
||||
|
||||
|
||||
def quit():
|
||||
stop()
|
||||
return None
|
||||
|
||||
|
||||
def stop():
|
||||
for i in range(0, num_channels):
|
||||
sound.stop(i)
|
||||
|
||||
|
||||
def pause():
|
||||
for i in range(0, num_channels):
|
||||
sound.pause(i)
|
||||
|
||||
|
||||
def unpause():
|
||||
for i in range(0, num_channels):
|
||||
sound.unpause(i)
|
||||
|
||||
|
||||
def get_busy():
|
||||
for i in range(0, num_channels):
|
||||
if sound.busy(i):
|
||||
|
@ -45,28 +54,33 @@ def get_busy():
|
|||
|
||||
return False
|
||||
|
||||
|
||||
def fadeout(time):
|
||||
# Fadeout doesn't work - it just immediately stops playback.
|
||||
stop()
|
||||
|
||||
|
||||
# A map from channel number to Channel object.
|
||||
channels = { }
|
||||
channels = {}
|
||||
|
||||
|
||||
def set_num_channels(count):
|
||||
global num_channels
|
||||
num_channels = count
|
||||
|
||||
|
||||
def get_num_channels(count):
|
||||
return num_channels
|
||||
|
||||
|
||||
def set_reserved(count):
|
||||
global reserved_channels
|
||||
reserved_channels = count
|
||||
|
||||
|
||||
def find_channel(force=False):
|
||||
|
||||
busy = [ ]
|
||||
busy = []
|
||||
|
||||
for i in range(reserved_channels, num_channels):
|
||||
c = Channel(i)
|
||||
|
@ -79,10 +93,11 @@ def find_channel(force=False):
|
|||
if not force:
|
||||
return None
|
||||
|
||||
busy.sort(key=lambda x : x.play_time)
|
||||
busy.sort(key=lambda x: x.play_time)
|
||||
|
||||
return busy[0]
|
||||
|
||||
|
||||
class ChannelImpl(object):
|
||||
|
||||
def __init__(self, id):
|
||||
|
@ -101,7 +116,6 @@ class ChannelImpl(object):
|
|||
if self.loop is not None and sound.queue_depth(self.id) < 2:
|
||||
self.queue(self.loop, loops=1)
|
||||
|
||||
|
||||
def play(self, s, loops=0, maxtime=0, fade_ms=0):
|
||||
if loops:
|
||||
self.loop = s
|
||||
|
@ -181,7 +195,8 @@ def Channel(n):
|
|||
|
||||
|
||||
sound_serial = 0
|
||||
sounds = { }
|
||||
sounds = {}
|
||||
|
||||
|
||||
class Sound(object):
|
||||
|
||||
|
@ -196,10 +211,10 @@ class Sound(object):
|
|||
self.serial = str(sound_serial)
|
||||
sound_serial += 1
|
||||
|
||||
if isinstance(what, file):
|
||||
if isinstance(what, file): # noqa F821
|
||||
self.file = what
|
||||
else:
|
||||
self.file = file(os.path.abspath(what), "rb")
|
||||
self.file = file(os.path.abspath(what), "rb") # noqa F821
|
||||
|
||||
sounds[self.serial] = self
|
||||
|
||||
|
@ -214,7 +229,6 @@ class Sound(object):
|
|||
channel.play(self, loops=loops)
|
||||
return channel
|
||||
|
||||
|
||||
def stop(self):
|
||||
for i in range(0, num_channels):
|
||||
if Channel(i).get_sound() is self:
|
||||
|
@ -244,9 +258,11 @@ class Sound(object):
|
|||
def get_length(self):
|
||||
return 1.0
|
||||
|
||||
|
||||
music_channel = Channel(256)
|
||||
music_sound = None
|
||||
|
||||
|
||||
class music(object):
|
||||
|
||||
@staticmethod
|
||||
|
@ -306,6 +322,3 @@ class music(object):
|
|||
@staticmethod
|
||||
def queue(filename):
|
||||
return music_channel.queue(Sound(filename))
|
||||
|
||||
|
||||
|
||||
|
|
438
p4a/pythonforandroid/recipes/android/src/android/permissions.py
Normal file
438
p4a/pythonforandroid/recipes/android/src/android/permissions.py
Normal file
|
@ -0,0 +1,438 @@
|
|||
|
||||
try:
|
||||
from jnius import autoclass
|
||||
except ImportError:
|
||||
# To allow importing by build/manifest-creating code without
|
||||
# pyjnius being present:
|
||||
def autoclass(item):
|
||||
raise RuntimeError("pyjnius not available")
|
||||
|
||||
|
||||
class Permission:
|
||||
ACCEPT_HANDOVER = "android.permission.ACCEPT_HANDOVER"
|
||||
ACCESS_COARSE_LOCATION = "android.permission.ACCESS_COARSE_LOCATION"
|
||||
ACCESS_LOCATION_EXTRA_COMMANDS = (
|
||||
"android.permission.ACCESS_LOCATION_EXTRA_COMMANDS"
|
||||
)
|
||||
ACCESS_NETWORK_STATE = "android.permission.ACCESS_NETWORK_STATE"
|
||||
ACCESS_NOTIFICATION_POLICY = (
|
||||
"android.permission.ACCESS_NOTIFICATION_POLICY"
|
||||
)
|
||||
ACCESS_WIFI_STATE = "android.permission.ACCESS_WIFI_STATE"
|
||||
ADD_VOICEMAIL = "com.android.voicemail.permission.ADD_VOICEMAIL"
|
||||
ANSWER_PHONE_CALLS = "android.permission.ANSWER_PHONE_CALLS"
|
||||
BATTERY_STATS = "android.permission.BATTERY_STATS"
|
||||
BIND_ACCESSIBILITY_SERVICE = (
|
||||
"android.permission.BIND_ACCESSIBILITY_SERVICE"
|
||||
)
|
||||
BIND_AUTOFILL_SERVICE = "android.permission.BIND_AUTOFILL_SERVICE"
|
||||
BIND_CARRIER_MESSAGING_SERVICE = ( # note: deprecated in api 23+
|
||||
"android.permission.BIND_CARRIER_MESSAGING_SERVICE"
|
||||
)
|
||||
BIND_CARRIER_SERVICES = ( # replaces BIND_CARRIER_MESSAGING_SERVICE
|
||||
"android.permission.BIND_CARRIER_SERVICES"
|
||||
)
|
||||
BIND_CHOOSER_TARGET_SERVICE = (
|
||||
"android.permission.BIND_CHOOSER_TARGET_SERVICE"
|
||||
)
|
||||
BIND_CONDITION_PROVIDER_SERVICE = (
|
||||
"android.permission.BIND_CONDITION_PROVIDER_SERVICE"
|
||||
)
|
||||
BIND_DEVICE_ADMIN = "android.permission.BIND_DEVICE_ADMIN"
|
||||
BIND_DREAM_SERVICE = "android.permission.BIND_DREAM_SERVICE"
|
||||
BIND_INCALL_SERVICE = "android.permission.BIND_INCALL_SERVICE"
|
||||
BIND_INPUT_METHOD = (
|
||||
"android.permission.BIND_INPUT_METHOD"
|
||||
)
|
||||
BIND_MIDI_DEVICE_SERVICE = (
|
||||
"android.permission.BIND_MIDI_DEVICE_SERVICE"
|
||||
)
|
||||
BIND_NFC_SERVICE = (
|
||||
"android.permission.BIND_NFC_SERVICE"
|
||||
)
|
||||
BIND_NOTIFICATION_LISTENER_SERVICE = (
|
||||
"android.permission.BIND_NOTIFICATION_LISTENER_SERVICE"
|
||||
)
|
||||
BIND_PRINT_SERVICE = (
|
||||
"android.permission.BIND_PRINT_SERVICE"
|
||||
)
|
||||
BIND_QUICK_SETTINGS_TILE = (
|
||||
"android.permission.BIND_QUICK_SETTINGS_TILE"
|
||||
)
|
||||
BIND_REMOTEVIEWS = (
|
||||
"android.permission.BIND_REMOTEVIEWS"
|
||||
)
|
||||
BIND_SCREENING_SERVICE = (
|
||||
"android.permission.BIND_SCREENING_SERVICE"
|
||||
)
|
||||
BIND_TELECOM_CONNECTION_SERVICE = (
|
||||
"android.permission.BIND_TELECOM_CONNECTION_SERVICE"
|
||||
)
|
||||
BIND_TEXT_SERVICE = (
|
||||
"android.permission.BIND_TEXT_SERVICE"
|
||||
)
|
||||
BIND_TV_INPUT = (
|
||||
"android.permission.BIND_TV_INPUT"
|
||||
)
|
||||
BIND_VISUAL_VOICEMAIL_SERVICE = (
|
||||
"android.permission.BIND_VISUAL_VOICEMAIL_SERVICE"
|
||||
)
|
||||
BIND_VOICE_INTERACTION = (
|
||||
"android.permission.BIND_VOICE_INTERACTION"
|
||||
)
|
||||
BIND_VPN_SERVICE = (
|
||||
"android.permission.BIND_VPN_SERVICE"
|
||||
)
|
||||
BIND_VR_LISTENER_SERVICE = (
|
||||
"android.permission.BIND_VR_LISTENER_SERVICE"
|
||||
)
|
||||
BIND_WALLPAPER = (
|
||||
"android.permission.BIND_WALLPAPER"
|
||||
)
|
||||
BLUETOOTH = (
|
||||
"android.permission.BLUETOOTH"
|
||||
)
|
||||
BLUETOOTH_ADMIN = (
|
||||
"android.permission.BLUETOOTH_ADMIN"
|
||||
)
|
||||
BODY_SENSORS = (
|
||||
"android.permission.BODY_SENSORS"
|
||||
)
|
||||
BROADCAST_PACKAGE_REMOVED = (
|
||||
"android.permission.BROADCAST_PACKAGE_REMOVED"
|
||||
)
|
||||
BROADCAST_STICKY = (
|
||||
"android.permission.BROADCAST_STICKY"
|
||||
)
|
||||
CALL_PHONE = (
|
||||
"android.permission.CALL_PHONE"
|
||||
)
|
||||
CALL_PRIVILEGED = (
|
||||
"android.permission.CALL_PRIVILEGED"
|
||||
)
|
||||
CAMERA = (
|
||||
"android.permission.CAMERA"
|
||||
)
|
||||
CAPTURE_AUDIO_OUTPUT = (
|
||||
"android.permission.CAPTURE_AUDIO_OUTPUT"
|
||||
)
|
||||
CAPTURE_SECURE_VIDEO_OUTPUT = (
|
||||
"android.permission.CAPTURE_SECURE_VIDEO_OUTPUT"
|
||||
)
|
||||
CAPTURE_VIDEO_OUTPUT = (
|
||||
"android.permission.CAPTURE_VIDEO_OUTPUT"
|
||||
)
|
||||
CHANGE_COMPONENT_ENABLED_STATE = (
|
||||
"android.permission.CHANGE_COMPONENT_ENABLED_STATE"
|
||||
)
|
||||
CHANGE_CONFIGURATION = (
|
||||
"android.permission.CHANGE_CONFIGURATION"
|
||||
)
|
||||
CHANGE_NETWORK_STATE = (
|
||||
"android.permission.CHANGE_NETWORK_STATE"
|
||||
)
|
||||
CHANGE_WIFI_MULTICAST_STATE = (
|
||||
"android.permission.CHANGE_WIFI_MULTICAST_STATE"
|
||||
)
|
||||
CHANGE_WIFI_STATE = (
|
||||
"android.permission.CHANGE_WIFI_STATE"
|
||||
)
|
||||
CLEAR_APP_CACHE = (
|
||||
"android.permission.CLEAR_APP_CACHE"
|
||||
)
|
||||
CONTROL_LOCATION_UPDATES = (
|
||||
"android.permission.CONTROL_LOCATION_UPDATES"
|
||||
)
|
||||
DELETE_CACHE_FILES = (
|
||||
"android.permission.DELETE_CACHE_FILES"
|
||||
)
|
||||
DELETE_PACKAGES = (
|
||||
"android.permission.DELETE_PACKAGES"
|
||||
)
|
||||
DIAGNOSTIC = (
|
||||
"android.permission.DIAGNOSTIC"
|
||||
)
|
||||
DISABLE_KEYGUARD = (
|
||||
"android.permission.DISABLE_KEYGUARD"
|
||||
)
|
||||
DUMP = (
|
||||
"android.permission.DUMP"
|
||||
)
|
||||
EXPAND_STATUS_BAR = (
|
||||
"android.permission.EXPAND_STATUS_BAR"
|
||||
)
|
||||
FACTORY_TEST = (
|
||||
"android.permission.FACTORY_TEST"
|
||||
)
|
||||
FOREGROUND_SERVICE = (
|
||||
"android.permission.FOREGROUND_SERVICE"
|
||||
)
|
||||
GET_ACCOUNTS = (
|
||||
"android.permission.GET_ACCOUNTS"
|
||||
)
|
||||
GET_ACCOUNTS_PRIVILEGED = (
|
||||
"android.permission.GET_ACCOUNTS_PRIVILEGED"
|
||||
)
|
||||
GET_PACKAGE_SIZE = (
|
||||
"android.permission.GET_PACKAGE_SIZE"
|
||||
)
|
||||
GET_TASKS = (
|
||||
"android.permission.GET_TASKS"
|
||||
)
|
||||
GLOBAL_SEARCH = (
|
||||
"android.permission.GLOBAL_SEARCH"
|
||||
)
|
||||
INSTALL_LOCATION_PROVIDER = (
|
||||
"android.permission.INSTALL_LOCATION_PROVIDER"
|
||||
)
|
||||
INSTALL_PACKAGES = (
|
||||
"android.permission.INSTALL_PACKAGES"
|
||||
)
|
||||
INSTALL_SHORTCUT = (
|
||||
"com.android.launcher.permission.INSTALL_SHORTCUT"
|
||||
)
|
||||
INSTANT_APP_FOREGROUND_SERVICE = (
|
||||
"android.permission.INSTANT_APP_FOREGROUND_SERVICE"
|
||||
)
|
||||
INTERNET = (
|
||||
"android.permission.INTERNET"
|
||||
)
|
||||
KILL_BACKGROUND_PROCESSES = (
|
||||
"android.permission.KILL_BACKGROUND_PROCESSES"
|
||||
)
|
||||
LOCATION_HARDWARE = (
|
||||
"android.permission.LOCATION_HARDWARE"
|
||||
)
|
||||
MANAGE_DOCUMENTS = (
|
||||
"android.permission.MANAGE_DOCUMENTS"
|
||||
)
|
||||
MANAGE_OWN_CALLS = (
|
||||
"android.permission.MANAGE_OWN_CALLS"
|
||||
)
|
||||
MASTER_CLEAR = (
|
||||
"android.permission.MASTER_CLEAR"
|
||||
)
|
||||
MEDIA_CONTENT_CONTROL = (
|
||||
"android.permission.MEDIA_CONTENT_CONTROL"
|
||||
)
|
||||
MODIFY_AUDIO_SETTINGS = (
|
||||
"android.permission.MODIFY_AUDIO_SETTINGS"
|
||||
)
|
||||
MODIFY_PHONE_STATE = (
|
||||
"android.permission.MODIFY_PHONE_STATE"
|
||||
)
|
||||
MOUNT_FORMAT_FILESYSTEMS = (
|
||||
"android.permission.MOUNT_FORMAT_FILESYSTEMS"
|
||||
)
|
||||
MOUNT_UNMOUNT_FILESYSTEMS = (
|
||||
"android.permission.MOUNT_UNMOUNT_FILESYSTEMS"
|
||||
)
|
||||
NFC = (
|
||||
"android.permission.NFC"
|
||||
)
|
||||
NFC_TRANSACTION_EVENT = (
|
||||
"android.permission.NFC_TRANSACTION_EVENT"
|
||||
)
|
||||
PACKAGE_USAGE_STATS = (
|
||||
"android.permission.PACKAGE_USAGE_STATS"
|
||||
)
|
||||
PERSISTENT_ACTIVITY = (
|
||||
"android.permission.PERSISTENT_ACTIVITY"
|
||||
)
|
||||
PROCESS_OUTGOING_CALLS = (
|
||||
"android.permission.PROCESS_OUTGOING_CALLS"
|
||||
)
|
||||
READ_CALENDAR = (
|
||||
"android.permission.READ_CALENDAR"
|
||||
)
|
||||
READ_CALL_LOG = (
|
||||
"android.permission.READ_CALL_LOG"
|
||||
)
|
||||
READ_CONTACTS = (
|
||||
"android.permission.READ_CONTACTS"
|
||||
)
|
||||
READ_EXTERNAL_STORAGE = (
|
||||
"android.permission.READ_EXTERNAL_STORAGE"
|
||||
)
|
||||
READ_FRAME_BUFFER = (
|
||||
"android.permission.READ_FRAME_BUFFER"
|
||||
)
|
||||
READ_INPUT_STATE = (
|
||||
"android.permission.READ_INPUT_STATE"
|
||||
)
|
||||
READ_LOGS = (
|
||||
"android.permission.READ_LOGS"
|
||||
)
|
||||
READ_PHONE_NUMBERS = (
|
||||
"android.permission.READ_PHONE_NUMBERS"
|
||||
)
|
||||
READ_PHONE_STATE = (
|
||||
"android.permission.READ_PHONE_STATE"
|
||||
)
|
||||
READ_SMS = (
|
||||
"android.permission.READ_SMS"
|
||||
)
|
||||
READ_SYNC_SETTINGS = (
|
||||
"android.permission.READ_SYNC_SETTINGS"
|
||||
)
|
||||
READ_SYNC_STATS = (
|
||||
"android.permission.READ_SYNC_STATS"
|
||||
)
|
||||
READ_VOICEMAIL = (
|
||||
"com.android.voicemail.permission.READ_VOICEMAIL"
|
||||
)
|
||||
REBOOT = (
|
||||
"android.permission.REBOOT"
|
||||
)
|
||||
RECEIVE_BOOT_COMPLETED = (
|
||||
"android.permission.RECEIVE_BOOT_COMPLETED"
|
||||
)
|
||||
RECEIVE_MMS = (
|
||||
"android.permission.RECEIVE_MMS"
|
||||
)
|
||||
RECEIVE_SMS = (
|
||||
"android.permission.RECEIVE_SMS"
|
||||
)
|
||||
RECEIVE_WAP_PUSH = (
|
||||
"android.permission.RECEIVE_WAP_PUSH"
|
||||
)
|
||||
RECORD_AUDIO = (
|
||||
"android.permission.RECORD_AUDIO"
|
||||
)
|
||||
REORDER_TASKS = (
|
||||
"android.permission.REORDER_TASKS"
|
||||
)
|
||||
REQUEST_COMPANION_RUN_IN_BACKGROUND = (
|
||||
"android.permission.REQUEST_COMPANION_RUN_IN_BACKGROUND"
|
||||
)
|
||||
REQUEST_COMPANION_USE_DATA_IN_BACKGROUND = (
|
||||
"android.permission.REQUEST_COMPANION_USE_DATA_IN_BACKGROUND"
|
||||
)
|
||||
REQUEST_DELETE_PACKAGES = (
|
||||
"android.permission.REQUEST_DELETE_PACKAGES"
|
||||
)
|
||||
REQUEST_IGNORE_BATTERY_OPTIMIZATIONS = (
|
||||
"android.permission.REQUEST_IGNORE_BATTERY_OPTIMIZATIONS"
|
||||
)
|
||||
REQUEST_INSTALL_PACKAGES = (
|
||||
"android.permission.REQUEST_INSTALL_PACKAGES"
|
||||
)
|
||||
RESTART_PACKAGES = (
|
||||
"android.permission.RESTART_PACKAGES"
|
||||
)
|
||||
SEND_RESPOND_VIA_MESSAGE = (
|
||||
"android.permission.SEND_RESPOND_VIA_MESSAGE"
|
||||
)
|
||||
SEND_SMS = (
|
||||
"android.permission.SEND_SMS"
|
||||
)
|
||||
SET_ALARM = (
|
||||
"com.android.alarm.permission.SET_ALARM"
|
||||
)
|
||||
SET_ALWAYS_FINISH = (
|
||||
"android.permission.SET_ALWAYS_FINISH"
|
||||
)
|
||||
SET_ANIMATION_SCALE = (
|
||||
"android.permission.SET_ANIMATION_SCALE"
|
||||
)
|
||||
SET_DEBUG_APP = (
|
||||
"android.permission.SET_DEBUG_APP"
|
||||
)
|
||||
SET_PREFERRED_APPLICATIONS = (
|
||||
"android.permission.SET_PREFERRED_APPLICATIONS"
|
||||
)
|
||||
SET_PROCESS_LIMIT = (
|
||||
"android.permission.SET_PROCESS_LIMIT"
|
||||
)
|
||||
SET_TIME = (
|
||||
"android.permission.SET_TIME"
|
||||
)
|
||||
SET_TIME_ZONE = (
|
||||
"android.permission.SET_TIME_ZONE"
|
||||
)
|
||||
SET_WALLPAPER = (
|
||||
"android.permission.SET_WALLPAPER"
|
||||
)
|
||||
SET_WALLPAPER_HINTS = (
|
||||
"android.permission.SET_WALLPAPER_HINTS"
|
||||
)
|
||||
SIGNAL_PERSISTENT_PROCESSES = (
|
||||
"android.permission.SIGNAL_PERSISTENT_PROCESSES"
|
||||
)
|
||||
STATUS_BAR = (
|
||||
"android.permission.STATUS_BAR"
|
||||
)
|
||||
SYSTEM_ALERT_WINDOW = (
|
||||
"android.permission.SYSTEM_ALERT_WINDOW"
|
||||
)
|
||||
TRANSMIT_IR = (
|
||||
"android.permission.TRANSMIT_IR"
|
||||
)
|
||||
UNINSTALL_SHORTCUT = (
|
||||
"com.android.launcher.permission.UNINSTALL_SHORTCUT"
|
||||
)
|
||||
UPDATE_DEVICE_STATS = (
|
||||
"android.permission.UPDATE_DEVICE_STATS"
|
||||
)
|
||||
USE_BIOMETRIC = (
|
||||
"android.permission.USE_BIOMETRIC"
|
||||
)
|
||||
USE_FINGERPRINT = (
|
||||
"android.permission.USE_FINGERPRINT"
|
||||
)
|
||||
USE_SIP = (
|
||||
"android.permission.USE_SIP"
|
||||
)
|
||||
VIBRATE = (
|
||||
"android.permission.VIBRATE"
|
||||
)
|
||||
WAKE_LOCK = (
|
||||
"android.permission.WAKE_LOCK"
|
||||
)
|
||||
WRITE_APN_SETTINGS = (
|
||||
"android.permission.WRITE_APN_SETTINGS"
|
||||
)
|
||||
WRITE_CALENDAR = (
|
||||
"android.permission.WRITE_CALENDAR"
|
||||
)
|
||||
WRITE_CALL_LOG = (
|
||||
"android.permission.WRITE_CALL_LOG"
|
||||
)
|
||||
WRITE_CONTACTS = (
|
||||
"android.permission.WRITE_CONTACTS"
|
||||
)
|
||||
WRITE_EXTERNAL_STORAGE = (
|
||||
"android.permission.WRITE_EXTERNAL_STORAGE"
|
||||
)
|
||||
WRITE_GSERVICES = (
|
||||
"android.permission.WRITE_GSERVICES"
|
||||
)
|
||||
WRITE_SECURE_SETTINGS = (
|
||||
"android.permission.WRITE_SECURE_SETTINGS"
|
||||
)
|
||||
WRITE_SETTINGS = (
|
||||
"android.permission.WRITE_SETTINGS"
|
||||
)
|
||||
WRITE_SYNC_SETTINGS = (
|
||||
"android.permission.WRITE_SYNC_SETTINGS"
|
||||
)
|
||||
WRITE_VOICEMAIL = (
|
||||
"com.android.voicemail.permission.WRITE_VOICEMAIL"
|
||||
)
|
||||
|
||||
|
||||
def request_permissions(permissions):
|
||||
python_activity = autoclass('org.kivy.android.PythonActivity')
|
||||
python_activity.requestPermissions(permissions)
|
||||
|
||||
|
||||
def request_permission(permission):
|
||||
request_permissions([permission])
|
||||
|
||||
|
||||
def check_permission(permission):
|
||||
python_activity = autoclass('org.kivy.android.PythonActivity')
|
||||
result = bool(python_activity.checkCurrentPermission(
|
||||
permission + ""
|
||||
))
|
||||
return result
|
|
@ -33,12 +33,13 @@ class Runnable(PythonJavaClass):
|
|||
def run(self):
|
||||
try:
|
||||
self.func(*self.args, **self.kwargs)
|
||||
except:
|
||||
except: # noqa E722
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
Runnable.__runnables__.remove(self)
|
||||
|
||||
|
||||
def run_on_ui_thread(f):
|
||||
'''Decorator to create automatically a :class:`Runnable` object with the
|
||||
function. The function will be delayed and call into the Activity thread.
|
||||
|
|
|
@ -6,7 +6,7 @@ lib_dict = {
|
|||
'pygame': ['sdl'],
|
||||
'sdl2': ['SDL2', 'SDL2_image', 'SDL2_mixer', 'SDL2_ttf']
|
||||
}
|
||||
sdl_libs = lib_dict[os.environ['BOOTSTRAP']] if os.environ['BOOTSTRAP'] == 'sdl2' else []
|
||||
sdl_libs = lib_dict.get(os.environ['BOOTSTRAP'], [])
|
||||
|
||||
renpy_sound = Extension('android._android_sound',
|
||||
['android/_android_sound.c', 'android/_android_sound_jni.c', ],
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
from pythonforandroid.toolchain import PythonRecipe, shprint, shutil, current_directory
|
||||
from os.path import join, exists
|
||||
from pythonforandroid.recipe import PythonRecipe
|
||||
from pythonforandroid.toolchain import current_directory, shprint
|
||||
import sh
|
||||
|
||||
|
||||
class ApswRecipe(PythonRecipe):
|
||||
version = '3.15.0-r1'
|
||||
url = 'https://github.com/rogerbinns/apsw/archive/{version}.tar.gz'
|
||||
depends = ['sqlite3', 'hostpython2', 'python2', 'setuptools']
|
||||
depends = ['sqlite3', ('python2', 'python3'), 'setuptools']
|
||||
call_hostpython_via_targetpython = False
|
||||
site_packages_name = 'apsw'
|
||||
|
||||
|
@ -17,21 +18,17 @@ class ApswRecipe(PythonRecipe):
|
|||
shprint(hostpython,
|
||||
'setup.py',
|
||||
'build_ext',
|
||||
'--enable=fts4'
|
||||
, _env=env)
|
||||
'--enable=fts4', _env=env)
|
||||
# Install python bindings
|
||||
super(ApswRecipe, self).build_arch(arch)
|
||||
|
||||
def get_recipe_env(self, arch):
|
||||
env = super(ApswRecipe, self).get_recipe_env(arch)
|
||||
env['PYTHON_ROOT'] = self.ctx.get_python_install_dir()
|
||||
env['CFLAGS'] += ' -I' + env['PYTHON_ROOT'] + '/include/python2.7' + \
|
||||
' -I' + self.get_recipe('sqlite3', self.ctx).get_build_dir(arch.arch)
|
||||
# Set linker to use the correct gcc
|
||||
env['LDSHARED'] = env['CC'] + ' -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions'
|
||||
env['LDFLAGS'] += ' -L' + env['PYTHON_ROOT'] + '/lib' + \
|
||||
' -lpython2.7' + \
|
||||
' -lsqlite3'
|
||||
sqlite_recipe = self.get_recipe('sqlite3', self.ctx)
|
||||
env['CFLAGS'] += ' -I' + sqlite_recipe.get_build_dir(arch.arch)
|
||||
env['LDFLAGS'] += ' -L' + sqlite_recipe.get_lib_dir(arch)
|
||||
env['LIBS'] = env.get('LIBS', '') + ' -lsqlite3'
|
||||
return env
|
||||
|
||||
|
||||
recipe = ApswRecipe()
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
from pythonforandroid.recipe import CppCompiledComponentsPythonRecipe
|
||||
|
||||
|
||||
class AtomRecipe(CppCompiledComponentsPythonRecipe):
|
||||
site_packages_name = 'atom'
|
||||
version = '0.3.10'
|
||||
url = 'https://github.com/nucleic/atom/archive/master.zip'
|
||||
depends = ['python2','setuptools']
|
||||
|
||||
depends = ['setuptools']
|
||||
|
||||
|
||||
recipe = AtomRecipe()
|
||||
|
|
|
@ -1,36 +1,32 @@
|
|||
|
||||
from pythonforandroid.toolchain import CythonRecipe, shprint, current_directory, info
|
||||
import sh
|
||||
import glob
|
||||
from os.path import join, exists
|
||||
from pythonforandroid.recipe import CythonRecipe
|
||||
from os.path import join
|
||||
|
||||
|
||||
class AudiostreamRecipe(CythonRecipe):
|
||||
version = 'master'
|
||||
version = 'master'
|
||||
url = 'https://github.com/kivy/audiostream/archive/{version}.zip'
|
||||
name = 'audiostream'
|
||||
depends = ['python2', ('sdl', 'sdl2'), 'pyjnius']
|
||||
depends = [('python2', 'python3'), ('sdl', 'sdl2'), 'pyjnius']
|
||||
|
||||
def get_recipe_env(self, arch):
|
||||
env = super(AudiostreamRecipe, self).get_recipe_env(arch)
|
||||
if 'sdl' in self.ctx.recipe_build_order:
|
||||
sdl_include = 'sdl'
|
||||
sdl_mixer_include = 'sdl_mixer'
|
||||
elif 'sdl2' in self.ctx.recipe_build_order:
|
||||
sdl_include = 'SDL'
|
||||
sdl_include = 'SDL2'
|
||||
sdl_mixer_include = 'SDL2_mixer'
|
||||
|
||||
#note: audiostream library is not yet able to judge whether it is being used with sdl or with sdl2.
|
||||
#this causes linking to fail against SDL2 (compiling against SDL2 works)
|
||||
#need to find a way to fix this in audiostream's setup.py
|
||||
raise RuntimeError('Audiostream library is not yet able to configure itself to link against SDL2. Patch on audiostream library needed - any help much appreciated!')
|
||||
env['USE_SDL2'] = 'True'
|
||||
env['SDL2_INCLUDE_DIR'] = join(self.ctx.bootstrap.build_dir, 'jni', 'SDL', 'include')
|
||||
|
||||
env = super(AudiostreamRecipe, self).get_recipe_env(arch)
|
||||
env['CFLAGS'] += ' -I{jni_path}/{sdl_include}/include -I{jni_path}/{sdl_mixer_include}'.format(
|
||||
jni_path = join(self.ctx.bootstrap.build_dir, 'jni'),
|
||||
sdl_include = sdl_include,
|
||||
sdl_mixer_include = sdl_mixer_include)
|
||||
jni_path=join(self.ctx.bootstrap.build_dir, 'jni'),
|
||||
sdl_include=sdl_include,
|
||||
sdl_mixer_include=sdl_mixer_include)
|
||||
env['NDKPLATFORM'] = self.ctx.ndk_platform
|
||||
env['LIBLINK'] = 'NOTNONE' # Hacky fix. Needed by audiostream setup.py
|
||||
return env
|
||||
|
||||
|
||||
|
||||
recipe = AudiostreamRecipe()
|
||||
|
|
|
@ -2,14 +2,14 @@ from pythonforandroid.recipe import PythonRecipe
|
|||
|
||||
|
||||
class BabelRecipe(PythonRecipe):
|
||||
name = 'babel'
|
||||
version = '2.1.1'
|
||||
url = 'https://pypi.python.org/packages/source/B/Babel/Babel-{version}.tar.gz'
|
||||
name = 'babel'
|
||||
version = '2.2.0'
|
||||
url = 'https://pypi.python.org/packages/source/B/Babel/Babel-{version}.tar.gz'
|
||||
|
||||
depends = [('python2', 'python3crystax'), 'setuptools', 'pytz']
|
||||
depends = ['setuptools', 'pytz']
|
||||
|
||||
call_hostpython_via_targetpython = False
|
||||
install_in_hostpython = True
|
||||
call_hostpython_via_targetpython = False
|
||||
install_in_hostpython = True
|
||||
|
||||
|
||||
recipe = BabelRecipe()
|
||||
|
|
|
@ -1,17 +1,45 @@
|
|||
from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory
|
||||
from os.path import join, exists
|
||||
from os import environ
|
||||
import sh
|
||||
|
||||
"""
|
||||
This recipe creates a custom toolchain and bootstraps Boost from source to build Boost.Build
|
||||
including python bindings
|
||||
"""
|
||||
|
||||
|
||||
class BoostRecipe(Recipe):
|
||||
version = '1.60.0'
|
||||
# Don't forget to change the URL when changing the version
|
||||
url = 'http://downloads.sourceforge.net/project/boost/boost/{version}/boost_1_60_0.tar.bz2'
|
||||
depends = ['python2']
|
||||
patches = ['disable-so-version.patch', 'use-android-libs.patch']
|
||||
# Todo: make recipe compatible with all p4a architectures
|
||||
'''
|
||||
.. note:: This recipe can be built only against API 21+ and arch armeabi-v7a
|
||||
|
||||
.. versionchanged:: 0.6.0
|
||||
Rewrote recipe to support clang's build. The following changes has
|
||||
been made:
|
||||
|
||||
- Bumped version number to 1.68.0
|
||||
- Better version handling for url
|
||||
- Added python 3 compatibility
|
||||
- Default compiler for ndk's toolchain set to clang
|
||||
- Python version will be detected via user-config.jam
|
||||
- Changed stl's lib from ``gnustl_shared`` to ``c++_shared``
|
||||
'''
|
||||
version = '1.68.0'
|
||||
url = 'http://downloads.sourceforge.net/project/boost/' \
|
||||
'boost/{version}/boost_{version_underscore}.tar.bz2'
|
||||
depends = [('python2', 'python3')]
|
||||
patches = ['disable-so-version.patch',
|
||||
'use-android-libs.patch',
|
||||
'fix-android-issues.patch']
|
||||
|
||||
@property
|
||||
def versioned_url(self):
|
||||
if self.url is None:
|
||||
return None
|
||||
return self.url.format(
|
||||
version=self.version,
|
||||
version_underscore=self.version.replace('.', '_'))
|
||||
|
||||
def should_build(self, arch):
|
||||
return not exists(join(self.get_build_dir(arch.arch), 'b2'))
|
||||
|
@ -26,9 +54,11 @@ class BoostRecipe(Recipe):
|
|||
shprint(bash, join(self.ctx.ndk_dir, 'build/tools/make-standalone-toolchain.sh'),
|
||||
'--arch=' + env['ARCH'],
|
||||
'--platform=android-' + str(self.ctx.android_api),
|
||||
'--toolchain=' + env['CROSSHOST'] + '-' + env['TOOLCHAIN_VERSION'],
|
||||
'--toolchain=' + env['CROSSHOST'] + '-' + self.ctx.toolchain_version + ':-llvm',
|
||||
'--use-llvm',
|
||||
'--stl=libc++',
|
||||
'--install-dir=' + env['CROSSHOME']
|
||||
)
|
||||
)
|
||||
# Set custom configuration
|
||||
shutil.copyfile(join(self.get_recipe_dir(), 'user-config.jam'),
|
||||
join(env['BOOST_BUILD_PATH'], 'user-config.jam'))
|
||||
|
@ -36,32 +66,39 @@ class BoostRecipe(Recipe):
|
|||
def build_arch(self, arch):
|
||||
super(BoostRecipe, self).build_arch(arch)
|
||||
env = self.get_recipe_env(arch)
|
||||
env['PYTHON_HOST'] = self.ctx.hostpython
|
||||
with current_directory(self.get_build_dir(arch.arch)):
|
||||
# Compile Boost.Build engine with this custom toolchain
|
||||
bash = sh.Command('bash')
|
||||
shprint(bash, 'bootstrap.sh',
|
||||
'--with-python=' + join(env['PYTHON_ROOT'], 'bin/python.host'),
|
||||
'--with-python-version=2.7',
|
||||
'--with-python-root=' + env['PYTHON_ROOT']
|
||||
) # Do not pass env
|
||||
shprint(bash, 'bootstrap.sh') # Do not pass env
|
||||
# Install app stl
|
||||
shutil.copyfile(join(env['CROSSHOME'], env['CROSSHOST'], 'lib/libgnustl_shared.so'),
|
||||
join(self.ctx.get_libs_dir(arch.arch), 'libgnustl_shared.so'))
|
||||
shutil.copyfile(
|
||||
join(self.ctx.ndk_dir, 'sources/cxx-stl/llvm-libc++/libs/'
|
||||
'armeabi-v7a/libc++_shared.so'),
|
||||
join(self.ctx.get_libs_dir(arch.arch), 'libc++_shared.so'))
|
||||
|
||||
def select_build_arch(self, arch):
|
||||
return arch.arch.replace('eabi-v7a', '').replace('eabi', '')
|
||||
|
||||
def get_recipe_env(self, arch):
|
||||
env = super(BoostRecipe, self).get_recipe_env(arch)
|
||||
# We don't use the normal env because we
|
||||
# are building with a standalone toolchain
|
||||
env = environ.copy()
|
||||
|
||||
env['BOOST_BUILD_PATH'] = self.get_build_dir(arch.arch) # find user-config.jam
|
||||
env['BOOST_ROOT'] = env['BOOST_BUILD_PATH'] # find boost source
|
||||
env['PYTHON_ROOT'] = self.ctx.get_python_install_dir()
|
||||
|
||||
env['PYTHON_ROOT'] = self.ctx.python_recipe.link_root(arch.arch)
|
||||
env['PYTHON_INCLUDE'] = self.ctx.python_recipe.include_root(arch.arch)
|
||||
env['PYTHON_MAJOR_MINOR'] = self.ctx.python_recipe.version[:3]
|
||||
env['PYTHON_LINK_VERSION'] = self.ctx.python_recipe.major_minor_version_string
|
||||
if 'python3' in self.ctx.python_recipe.name:
|
||||
env['PYTHON_LINK_VERSION'] += 'm'
|
||||
|
||||
env['ARCH'] = self.select_build_arch(arch)
|
||||
env['ANDROIDAPI'] = str(self.ctx.android_api)
|
||||
env['CROSSHOST'] = env['ARCH'] + '-linux-androideabi'
|
||||
env['CROSSHOME'] = join(env['BOOST_ROOT'], 'standalone-' + env['ARCH'] + '-toolchain')
|
||||
env['TOOLCHAIN_PREFIX'] = join(env['CROSSHOME'], 'bin', env['CROSSHOST'])
|
||||
return env
|
||||
|
||||
|
||||
recipe = BoostRecipe()
|
||||
recipe = BoostRecipe()
|
||||
|
|
68
p4a/pythonforandroid/recipes/boost/fix-android-issues.patch
Normal file
68
p4a/pythonforandroid/recipes/boost/fix-android-issues.patch
Normal file
|
@ -0,0 +1,68 @@
|
|||
diff -u -r boost_1_68_0.orig/boost/config/user.hpp boost_1_68_0/boost/config/user.hpp
|
||||
--- boost_1_68_0.orig/boost/config/user.hpp 2018-08-01 22:50:46.000000000 +0200
|
||||
+++ boost_1_68_0/boost/config/user.hpp 2018-08-27 15:43:38.000000000 +0200
|
||||
@@ -13,6 +13,12 @@
|
||||
// configuration policy:
|
||||
//
|
||||
|
||||
+// Android defines
|
||||
+// There is problem with std::atomic on android (and some other platforms).
|
||||
+// See this link for more info:
|
||||
+// https://code.google.com/p/android/issues/detail?id=42735#makechanges
|
||||
+#define BOOST_ASIO_DISABLE_STD_ATOMIC 1
|
||||
+
|
||||
// define this to locate a compiler config file:
|
||||
// #define BOOST_COMPILER_CONFIG <myheader>
|
||||
|
||||
diff -u -r boost_1_68_0.orig/boost/asio/detail/config.hpp boost_1_68_0/boost/asio/detail/config.hpp
|
||||
--- boost_1_68_0.orig/boost/asio/detail/config.hpp 2018-08-01 22:50:46.000000000 +0200
|
||||
+++ boost_1_68_0/boost/asio/detail/config.hpp 2018-09-19 12:39:56.000000000 +0200
|
||||
@@ -804,7 +804,11 @@
|
||||
# if defined(__clang__)
|
||||
# if (__cplusplus >= 201402)
|
||||
# if __has_include(<experimental/string_view>)
|
||||
-# define BOOST_ASIO_HAS_STD_EXPERIMENTAL_STRING_VIEW 1
|
||||
+# if __clang_major__ >= 7
|
||||
+# undef BOOST_ASIO_HAS_STD_EXPERIMENTAL_STRING_VIEW
|
||||
+# else
|
||||
+# define BOOST_ASIO_HAS_STD_EXPERIMENTAL_STRING_VIEW 1
|
||||
+# endif // __clang_major__ >= 7
|
||||
# endif // __has_include(<experimental/string_view>)
|
||||
# endif // (__cplusplus >= 201402)
|
||||
# endif // defined(__clang__)
|
||||
diff -u -r boost_1_68_0.orig/boost/system/error_code.hpp boost_1_68_0/boost/system/error_code.hpp
|
||||
--- boost_1_68_0.orig/boost/system/error_code.hpp 2018-08-01 22:50:53.000000000 +0200
|
||||
+++ boost_1_68_0/boost/system/error_code.hpp 2018-08-27 15:44:29.000000000 +0200
|
||||
@@ -17,6 +17,7 @@
|
||||
#include <boost/assert.hpp>
|
||||
#include <boost/noncopyable.hpp>
|
||||
#include <boost/utility/enable_if.hpp>
|
||||
+#include <stdio.h>
|
||||
#include <ostream>
|
||||
#include <string>
|
||||
#include <stdexcept>
|
||||
diff -u -r boost_1_68_0.orig/libs/filesystem/src/operations.cpp boost_1_68_0/libs/filesystem/src/operations.cpp
|
||||
--- boost_1_68_0.orig/libs/filesystem/src/operations.cpp 2018-08-01 22:50:47.000000000 +0200
|
||||
+++ boost_1_68_0/libs/filesystem/src/operations.cpp 2018-08-27 15:47:15.000000000 +0200
|
||||
@@ -232,6 +232,21 @@
|
||||
|
||||
# if defined(BOOST_POSIX_API)
|
||||
|
||||
+# if defined(__ANDROID__)
|
||||
+# define truncate libboost_truncate_wrapper
|
||||
+// truncate() is present in Android libc only starting from ABI 21, so here's a simple wrapper
|
||||
+static int libboost_truncate_wrapper(const char *path, off_t length)
|
||||
+{
|
||||
+ int fd = open(path, O_WRONLY);
|
||||
+ if (fd == -1) {
|
||||
+ return -1;
|
||||
+ }
|
||||
+ int status = ftruncate(fd, length);
|
||||
+ close(fd);
|
||||
+ return status;
|
||||
+}
|
||||
+# endif
|
||||
+
|
||||
typedef int err_t;
|
||||
|
||||
// POSIX uses a 0 return to indicate success
|
|
@ -1,28 +1,61 @@
|
|||
import os ;
|
||||
|
||||
local ANDROIDNDK = [ os.environ ANDROIDNDK ] ;
|
||||
local ANDROIDAPI = [ os.environ ANDROIDAPI ] ;
|
||||
local TOOLCHAIN_VERSION = [ os.environ TOOLCHAIN_VERSION ] ;
|
||||
local TOOLCHAIN_PREFIX = [ os.environ TOOLCHAIN_PREFIX ] ;
|
||||
local ARCH = [ os.environ ARCH ] ;
|
||||
local CROSSHOME = [ os.environ CROSSHOME ] ;
|
||||
local PYTHON_HOST = [ os.environ PYTHON_HOST ] ;
|
||||
local PYTHON_ROOT = [ os.environ PYTHON_ROOT ] ;
|
||||
local PYTHON_INCLUDE = [ os.environ PYTHON_INCLUDE ] ;
|
||||
local PYTHON_LINK_VERSION = [ os.environ PYTHON_LINK_VERSION ] ;
|
||||
local PYTHON_MAJOR_MINOR = [ os.environ PYTHON_MAJOR_MINOR ] ;
|
||||
|
||||
using gcc : $(ARCH) : $(TOOLCHAIN_PREFIX)-g++ :
|
||||
using clang : $(ARCH) : $(CROSSHOME)/bin/arm-linux-androideabi-clang++ :
|
||||
<archiver>$(CROSSHOME)/bin/arm-linux-androideabi-ar
|
||||
<root>$(CROSSHOME)/sysroot
|
||||
<architecture>$(ARCH)
|
||||
<archiver>$(TOOLCHAIN_PREFIX)-ar
|
||||
<compileflags>-DBOOST_SP_USE_PTHREADS
|
||||
<compileflags>-DBOOST_AC_USE_PTHREADS
|
||||
<cxxflags>-DBOOST_SP_USE_PTHREADS
|
||||
<cxxflags>-DBOOST_AC_USE_PTHREADS
|
||||
<cxxflags>-frtti
|
||||
<cxxflags>-fexceptions
|
||||
<compileflags>-I$(ANDROIDNDK)/platforms/android-$(ANDROIDAPI)/arch-$(ARCH)/usr/include
|
||||
<compileflags>-I$(ANDROIDNDK)/sources/cxx-stl/gnu-libstdc++/$(TOOLCHAIN_VERSION)/include
|
||||
<compileflags>-I$(ANDROIDNDK)/sources/cxx-stl/gnu-libstdc++/$(TOOLCHAIN_VERSION)/libs/$(ARCH)/include
|
||||
<compileflags>-I$(PYTHON_ROOT)/include/python2.7
|
||||
<linkflags>--sysroot=$(ANDROIDNDK)/platforms/android-$(ANDROIDAPI)/arch-$(ARCH)
|
||||
<linkflags>-L$(ANDROIDNDK)/sources/cxx-stl/gnu-libstdc++/$(TOOLCHAIN_VERSION)/libs/$(ARCH)
|
||||
<linkflags>-L$(PYTHON_ROOT)/lib
|
||||
<linkflags>-lgnustl_shared
|
||||
<linkflags>-lpython2.7
|
||||
<compileflags>-fexceptions
|
||||
<compileflags>-frtti
|
||||
<compileflags>-fpic
|
||||
<compileflags>-ffunction-sections
|
||||
<compileflags>-funwind-tables
|
||||
<compileflags>-march=armv7-a
|
||||
<compileflags>-msoft-float
|
||||
<compileflags>-mfpu=neon
|
||||
<compileflags>-mthumb
|
||||
<linkflags>-march=armv7-a
|
||||
<linkflags>-Wl,--fix-cortex-a8
|
||||
<compileflags>-Os
|
||||
<compileflags>-fomit-frame-pointer
|
||||
<compileflag>-fno-strict-aliasing
|
||||
<compileflags>-DANDROID
|
||||
<compileflags>-D__ANDROID__
|
||||
<compileflags>-DANDROID_TOOLCHAIN=clang
|
||||
<compileflags>-DANDROID_ABI=armv7-a
|
||||
<compileflags>-DANDROID_STL=c++_shared
|
||||
<compileflags>-DBOOST_ALL_NO_LIB
|
||||
#<compileflags>-DNDEBUG
|
||||
<compileflags>-O2
|
||||
<compileflags>-g
|
||||
<compileflags>-fvisibility=hidden
|
||||
<compileflags>-fvisibility-inlines-hidden
|
||||
<compileflags>-fdata-sections
|
||||
<cxxflags>-D__arm__
|
||||
<cxxflags>-D_REENTRANT
|
||||
<cxxflags>-D_GLIBCXX__PTHREADS
|
||||
<compileflags>-Wno-long-long
|
||||
<compileflags>-Wno-missing-field-initializers
|
||||
<compileflags>-Wno-unused-variable
|
||||
<linkflags>-Wl,-z,relro
|
||||
<linkflags>-Wl,-z,now
|
||||
<linkflags>-lc++_shared
|
||||
<linkflags>-L$(PYTHON_ROOT)
|
||||
<linkflags>-lpython$(PYTHON_LINK_VERSION)
|
||||
<linkflags>-Wl,-O1
|
||||
<linkflags>-Wl,-Bsymbolic-functions
|
||||
;
|
||||
|
||||
using python : $(PYTHON_MAJOR_MINOR)
|
||||
: $(PYTHON_host)
|
||||
: $(PYTHON_ROOT) $(PYTHON_INCLUDE)
|
||||
: $(PYTHON_ROOT)/libpython$(PYTHON_LINK_VERSION).so
|
||||
: #<define>BOOST_ALL_DYN_LINK
|
||||
;
|
|
@ -1,5 +1,6 @@
|
|||
from pythonforandroid.toolchain import Recipe
|
||||
|
||||
|
||||
class BrokenRecipe(Recipe):
|
||||
def __init__(self):
|
||||
print('This is a broken recipe, not a real one!')
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
|
||||
from pythonforandroid.toolchain import CompiledComponentsPythonRecipe
|
||||
from pythonforandroid.recipe import CompiledComponentsPythonRecipe
|
||||
from pythonforandroid.patching import is_darwin
|
||||
|
||||
|
||||
|
@ -8,7 +7,7 @@ class CdecimalRecipe(CompiledComponentsPythonRecipe):
|
|||
version = '2.3'
|
||||
url = 'http://www.bytereef.org/software/mpdecimal/releases/cdecimal-{version}.tar.gz'
|
||||
|
||||
depends = ['python2']
|
||||
depends = []
|
||||
|
||||
patches = ['locale.patch',
|
||||
'cross-compile.patch']
|
||||
|
|
|
@ -1,30 +1,53 @@
|
|||
import os
|
||||
from pythonforandroid.recipe import CompiledComponentsPythonRecipe
|
||||
|
||||
|
||||
class CffiRecipe(CompiledComponentsPythonRecipe):
|
||||
name = 'cffi'
|
||||
version = '1.4.2'
|
||||
url = 'https://pypi.python.org/packages/source/c/cffi/cffi-{version}.tar.gz'
|
||||
"""
|
||||
Extra system dependencies: autoconf, automake and libtool.
|
||||
"""
|
||||
name = 'cffi'
|
||||
version = '1.11.5'
|
||||
url = 'https://pypi.python.org/packages/source/c/cffi/cffi-{version}.tar.gz'
|
||||
|
||||
depends = [('python2', 'python3crystax'), 'setuptools', 'pycparser', 'libffi']
|
||||
depends = ['setuptools', 'pycparser', 'libffi']
|
||||
|
||||
patches = ['disable-pkg-config.patch']
|
||||
patches = ['disable-pkg-config.patch']
|
||||
|
||||
# call_hostpython_via_targetpython = False
|
||||
install_in_hostpython = True
|
||||
# call_hostpython_via_targetpython = False
|
||||
install_in_hostpython = True
|
||||
|
||||
def get_recipe_env(self, arch=None):
|
||||
env = super(CffiRecipe, self).get_recipe_env(arch)
|
||||
libffi = self.get_recipe('libffi', self.ctx)
|
||||
includes = libffi.get_include_dirs(arch)
|
||||
env['CFLAGS'] = ' -I'.join([env.get('CFLAGS', '')] + includes)
|
||||
env['LDFLAGS'] = (env.get('CFLAGS', '') + ' -L' +
|
||||
self.ctx.get_libs_dir(arch.arch))
|
||||
env['PYTHONPATH'] = ':'.join([
|
||||
self.ctx.get_site_packages_dir(),
|
||||
env['BUILDLIB_PATH'],
|
||||
])
|
||||
return env
|
||||
def get_hostrecipe_env(self, arch=None):
|
||||
# fixes missing ffi.h on some host systems (e.g. gentoo)
|
||||
env = super(CffiRecipe, self).get_hostrecipe_env(arch)
|
||||
libffi = self.get_recipe('libffi', self.ctx)
|
||||
includes = libffi.get_include_dirs(arch)
|
||||
env['FFI_INC'] = ",".join(includes)
|
||||
return env
|
||||
|
||||
def get_recipe_env(self, arch=None):
|
||||
env = super(CffiRecipe, self).get_recipe_env(arch)
|
||||
libffi = self.get_recipe('libffi', self.ctx)
|
||||
includes = libffi.get_include_dirs(arch)
|
||||
env['CFLAGS'] = ' -I'.join([env.get('CFLAGS', '')] + includes)
|
||||
env['CFLAGS'] += ' -I{}'.format(self.ctx.python_recipe.include_root(arch.arch))
|
||||
env['LDFLAGS'] = (env.get('CFLAGS', '') + ' -L' +
|
||||
self.ctx.get_libs_dir(arch.arch))
|
||||
env['LDFLAGS'] += ' -L{}'.format(os.path.join(self.ctx.bootstrap.build_dir, 'libs', arch.arch))
|
||||
# required for libc and libdl
|
||||
ndk_dir = self.ctx.ndk_platform
|
||||
ndk_lib_dir = os.path.join(ndk_dir, 'usr', 'lib')
|
||||
env['LDFLAGS'] += ' -L{}'.format(ndk_lib_dir)
|
||||
env['LDFLAGS'] += " --sysroot={}".format(self.ctx.ndk_platform)
|
||||
env['PYTHONPATH'] = ':'.join([
|
||||
self.ctx.get_site_packages_dir(),
|
||||
env['BUILDLIB_PATH'],
|
||||
])
|
||||
env['LDFLAGS'] += ' -L{}'.format(self.ctx.python_recipe.link_root(arch.arch))
|
||||
env['LDFLAGS'] += ' -lpython{}'.format(self.ctx.python_recipe.major_minor_version_string)
|
||||
if 'python3' in self.ctx.python_recipe.name:
|
||||
env['LDFLAGS'] += 'm'
|
||||
return env
|
||||
|
||||
|
||||
recipe = CffiRecipe()
|
||||
|
|
|
@ -1,17 +1,18 @@
|
|||
diff -Naur cffi-1.4.2/setup.py b/setup.py
|
||||
--- cffi-1.4.2/setup.py 2015-12-21 12:09:47.000000000 -0600
|
||||
+++ b/setup.py 2015-12-23 10:20:40.590622524 -0600
|
||||
@@ -5,8 +5,7 @@
|
||||
diff --git a/setup.py b/setup.py
|
||||
index c1db368..57311c3 100644
|
||||
--- a/setup.py
|
||||
+++ b/setup.py
|
||||
@@ -5,8 +5,7 @@ import errno
|
||||
|
||||
sources = ['c/_cffi_backend.c']
|
||||
libraries = ['ffi']
|
||||
-include_dirs = ['/usr/include/ffi',
|
||||
- '/usr/include/libffi'] # may be changed by pkg-config
|
||||
+include_dirs = []
|
||||
+include_dirs = os.environ['FFI_INC'].split(",") if 'FFI_INC' in os.environ else []
|
||||
define_macros = []
|
||||
library_dirs = []
|
||||
extra_compile_args = []
|
||||
@@ -67,14 +66,7 @@
|
||||
@@ -67,14 +66,7 @@ def ask_supports_thread():
|
||||
sys.stderr.write("The above error message can be safely ignored\n")
|
||||
|
||||
def use_pkg_config():
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
from pythonforandroid.toolchain import PythonRecipe
|
||||
from pythonforandroid.recipe import PythonRecipe
|
||||
|
||||
|
||||
class CherryPyRecipe(PythonRecipe):
|
||||
version = '5.1.0'
|
||||
url = 'https://bitbucket.org/cherrypy/cherrypy/get/{version}.tar.gz'
|
||||
depends = ['hostpython2', 'setuptools']
|
||||
depends = ['setuptools']
|
||||
site_packages_name = 'cherrypy'
|
||||
call_hostpython_via_targetpython = False
|
||||
|
||||
|
||||
recipe = CherryPyRecipe()
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from pythonforandroid.toolchain import PythonRecipe
|
||||
from pythonforandroid.recipe import PythonRecipe
|
||||
|
||||
|
||||
class CoverageRecipe(PythonRecipe):
|
||||
|
@ -7,7 +7,7 @@ class CoverageRecipe(PythonRecipe):
|
|||
|
||||
url = 'https://pypi.python.org/packages/2d/10/6136c8e10644c16906edf4d9f7c782c0f2e7ed47ff2f41f067384e432088/coverage-{version}.tar.gz'
|
||||
|
||||
depends = ['hostpython2', 'setuptools']
|
||||
depends = [('hostpython2', 'hostpython3'), 'setuptools']
|
||||
|
||||
patches = ['fallback-utf8.patch']
|
||||
|
||||
|
|
|
@ -1,27 +1,23 @@
|
|||
from pythonforandroid.recipe import CompiledComponentsPythonRecipe
|
||||
from os.path import dirname, join
|
||||
from pythonforandroid.recipe import CompiledComponentsPythonRecipe, Recipe
|
||||
|
||||
|
||||
class CryptographyRecipe(CompiledComponentsPythonRecipe):
|
||||
name = 'cryptography'
|
||||
version = '1.4'
|
||||
version = '2.6.1'
|
||||
url = 'https://github.com/pyca/cryptography/archive/{version}.tar.gz'
|
||||
depends = [('python2', 'python3crystax'), 'openssl', 'idna', 'pyasn1', 'six', 'setuptools', 'ipaddress', 'cffi']
|
||||
depends = ['openssl', 'idna', 'asn1crypto', 'six', 'setuptools',
|
||||
'enum34', 'ipaddress', 'cffi']
|
||||
call_hostpython_via_targetpython = False
|
||||
|
||||
def get_recipe_env(self, arch):
|
||||
env = super(CryptographyRecipe, self).get_recipe_env(arch)
|
||||
r = self.get_recipe('openssl', self.ctx)
|
||||
openssl_dir = r.get_build_dir(arch.arch)
|
||||
env['PYTHON_ROOT'] = self.ctx.get_python_install_dir()
|
||||
env['CFLAGS'] += ' -I' + env['PYTHON_ROOT'] + '/include/python2.7' + \
|
||||
' -I' + join(openssl_dir, 'include')
|
||||
# Set linker to use the correct gcc
|
||||
env['LDSHARED'] = env['CC'] + ' -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions'
|
||||
env['LDFLAGS'] += ' -L' + env['PYTHON_ROOT'] + '/lib' + \
|
||||
' -L' + openssl_dir + \
|
||||
' -lpython2.7' + \
|
||||
' -lssl' + r.version + \
|
||||
' -lcrypto' + r.version
|
||||
|
||||
openssl_recipe = Recipe.get_recipe('openssl', self.ctx)
|
||||
env['CFLAGS'] += openssl_recipe.include_flags(arch)
|
||||
env['LDFLAGS'] += openssl_recipe.link_dirs_flags(arch)
|
||||
env['LIBS'] = openssl_recipe.link_libs_flags()
|
||||
|
||||
return env
|
||||
|
||||
|
||||
recipe = CryptographyRecipe()
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from pythonforandroid.toolchain import CythonRecipe
|
||||
from pythonforandroid.recipe import CythonRecipe
|
||||
|
||||
|
||||
class CymunkRecipe(CythonRecipe):
|
||||
|
@ -6,7 +6,7 @@ class CymunkRecipe(CythonRecipe):
|
|||
url = 'https://github.com/tito/cymunk/archive/{version}.zip'
|
||||
name = 'cymunk'
|
||||
|
||||
depends = [('python2', 'python3crystax')]
|
||||
depends = [('python2', 'python3crystax', 'python3')]
|
||||
|
||||
|
||||
recipe = CymunkRecipe()
|
||||
|
|
|
@ -2,13 +2,13 @@ from pythonforandroid.recipe import PythonRecipe
|
|||
|
||||
|
||||
class DateutilRecipe(PythonRecipe):
|
||||
name = 'dateutil'
|
||||
version = '2.6.0'
|
||||
url = 'https://pypi.python.org/packages/51/fc/39a3fbde6864942e8bb24c93663734b74e281b984d1b8c4f95d64b0c21f6/python-dateutil-2.6.0.tar.gz'
|
||||
name = 'dateutil'
|
||||
version = '2.6.0'
|
||||
url = 'https://pypi.python.org/packages/51/fc/39a3fbde6864942e8bb24c93663734b74e281b984d1b8c4f95d64b0c21f6/python-dateutil-2.6.0.tar.gz'
|
||||
|
||||
depends = ['python2', "setuptools"]
|
||||
call_hostpython_via_targetpython = False
|
||||
install_in_hostpython = True
|
||||
depends = ["setuptools"]
|
||||
call_hostpython_via_targetpython = False
|
||||
install_in_hostpython = True
|
||||
|
||||
|
||||
recipe = DateutilRecipe()
|
||||
|
|
|
@ -1,10 +1,13 @@
|
|||
from pythonforandroid.toolchain import PythonRecipe
|
||||
from pythonforandroid.recipe import PythonRecipe
|
||||
|
||||
|
||||
class DecoratorPyRecipe(PythonRecipe):
|
||||
version = '4.0.9'
|
||||
version = '4.2.1'
|
||||
url = 'https://pypi.python.org/packages/source/d/decorator/decorator-{version}.tar.gz'
|
||||
depends = ['hostpython2', 'setuptools']
|
||||
url = 'https://github.com/micheles/decorator/archive/{version}.tar.gz'
|
||||
depends = ['setuptools']
|
||||
site_packages_name = 'decorator'
|
||||
call_hostpython_via_targetpython = False
|
||||
|
||||
|
||||
recipe = DecoratorPyRecipe()
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
from pythonforandroid.recipe import CppCompiledComponentsPythonRecipe
|
||||
|
||||
|
||||
class EnamlRecipe(CppCompiledComponentsPythonRecipe):
|
||||
site_packages_name = 'enaml'
|
||||
version = '0.9.8'
|
||||
url = 'https://github.com/nucleic/enaml/archive/master.zip'
|
||||
patches = ['0001-Update-setup.py.patch'] # Remove PyQt dependency
|
||||
depends = ['python2','setuptools','atom','kiwisolver']
|
||||
url = 'https://github.com/nucleic/enaml/archive/{version}.zip'
|
||||
patches = ['0001-Update-setup.py.patch'] # Remove PyQt dependency
|
||||
depends = ['setuptools', 'atom', 'kiwisolver']
|
||||
|
||||
|
||||
recipe = EnamlRecipe()
|
||||
|
|
|
@ -1,10 +1,22 @@
|
|||
from pythonforandroid.toolchain import PythonRecipe
|
||||
from pythonforandroid.recipe import PythonRecipe
|
||||
|
||||
|
||||
class Enum34Recipe(PythonRecipe):
|
||||
version = '1.1.3'
|
||||
version = '1.1.6'
|
||||
url = 'https://pypi.python.org/packages/source/e/enum34/enum34-{version}.tar.gz'
|
||||
depends = ['python2', 'setuptools']
|
||||
depends = ['setuptools']
|
||||
site_packages_name = 'enum'
|
||||
call_hostpython_via_targetpython = False
|
||||
|
||||
def should_build(self, arch):
|
||||
if 'python3' in self.ctx.python_recipe.name:
|
||||
# Since python 3.6 the enum34 library is no longer compatible with
|
||||
# the standard library and it will cause errors, so we disable it
|
||||
# in favour of the internal module, but we still add python3 to
|
||||
# attribute `depends` because otherwise we will not be able to
|
||||
# build the cryptography recipe.
|
||||
return False
|
||||
return super(Enum34Recipe, self).should_build(arch)
|
||||
|
||||
|
||||
recipe = Enum34Recipe()
|
||||
|
|
|
@ -5,7 +5,7 @@ class EthashRecipe(PythonRecipe):
|
|||
|
||||
url = 'https://github.com/ethereum/ethash/archive/master.zip'
|
||||
|
||||
depends = ['python2', 'setuptools']
|
||||
depends = ['setuptools']
|
||||
|
||||
|
||||
recipe = EthashRecipe()
|
||||
|
|
|
@ -6,7 +6,7 @@ class EvdevRecipe(CompiledComponentsPythonRecipe):
|
|||
version = 'v0.4.7'
|
||||
url = 'https://github.com/gvalkov/python-evdev/archive/{version}.zip'
|
||||
|
||||
depends = [('python2', 'python3crystax')]
|
||||
depends = []
|
||||
|
||||
build_cmd = 'build'
|
||||
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
from pythonforandroid.toolchain import PythonRecipe
|
||||
from pythonforandroid.recipe import PythonRecipe
|
||||
|
||||
|
||||
class FeedparserPyRecipe(PythonRecipe):
|
||||
version = '5.2.1'
|
||||
url = 'https://github.com/kurtmckee/feedparser/archive/{version}.tar.gz'
|
||||
depends = ['hostpython2', 'setuptools']
|
||||
depends = ['setuptools']
|
||||
site_packages_name = 'feedparser'
|
||||
call_hostpython_via_targetpython = False
|
||||
|
||||
|
||||
recipe = FeedparserPyRecipe()
|
||||
|
|
|
@ -1,19 +1,14 @@
|
|||
from pythonforandroid.toolchain import Recipe, shprint, current_directory, ArchARM
|
||||
from pythonforandroid.toolchain import Recipe, current_directory, shprint
|
||||
from os.path import exists, join, realpath
|
||||
from os import uname
|
||||
import glob
|
||||
import sh
|
||||
import os
|
||||
import shutil
|
||||
|
||||
|
||||
class FFMpegRecipe(Recipe):
|
||||
version = '3.1.8' # 3.2+ works with bugs
|
||||
version = '3.4.5'
|
||||
url = 'http://ffmpeg.org/releases/ffmpeg-{version}.tar.bz2'
|
||||
md5sum = 'f25a0cdd7f731cfbd8c0f7842b0d15b9'
|
||||
depends = ['sdl2'] # Need this to build correct recipe order
|
||||
opts_depends = ['openssl', 'ffpyplayer_codecs']
|
||||
patches = ['patches/fix-libshine-configure.patch']
|
||||
patches = ['patches/configure.patch']
|
||||
|
||||
def should_build(self, arch):
|
||||
build_dir = self.get_build_dir(arch.arch)
|
||||
|
@ -22,7 +17,7 @@ class FFMpegRecipe(Recipe):
|
|||
def prebuild_arch(self, arch):
|
||||
self.apply_patches(arch)
|
||||
|
||||
def get_recipe_env(self,arch):
|
||||
def get_recipe_env(self, arch):
|
||||
env = super(FFMpegRecipe, self).get_recipe_env(arch)
|
||||
env['NDK'] = self.ctx.ndk_dir
|
||||
return env
|
||||
|
@ -37,7 +32,7 @@ class FFMpegRecipe(Recipe):
|
|||
|
||||
if 'openssl' in self.ctx.recipe_build_order:
|
||||
flags += [
|
||||
'--enable-openssl',
|
||||
'--enable-openssl',
|
||||
'--enable-nonfree',
|
||||
'--enable-protocol=https,tls_openssl',
|
||||
]
|
||||
|
@ -45,7 +40,7 @@ class FFMpegRecipe(Recipe):
|
|||
cflags += ['-I' + build_dir + '/include/']
|
||||
ldflags += ['-L' + build_dir]
|
||||
|
||||
if 'ffpyplayer_codecs' in self.ctx.recipe_build_order:
|
||||
if 'ffpyplayer_codecs' in self.ctx.recipe_build_order:
|
||||
# libx264
|
||||
flags += ['--enable-libx264']
|
||||
build_dir = Recipe.get_recipe('libx264', self.ctx).get_build_dir(arch.arch)
|
||||
|
@ -69,16 +64,10 @@ class FFMpegRecipe(Recipe):
|
|||
else:
|
||||
# Enable codecs only for .mp4:
|
||||
flags += [
|
||||
'--enable-parser=h264,aac',
|
||||
'--enable-decoder=h263,h264,aac',
|
||||
]
|
||||
|
||||
# disable some unused algo
|
||||
# note: "golomb" are the one used in our video test, so don't use --disable-golomb
|
||||
# note: and for aac decoding: "rdft", "mdct", and "fft" are needed
|
||||
flags += [
|
||||
'--disable-dxva2 --disable-vdpau --disable-vaapi',
|
||||
'--disable-dct',
|
||||
'--enable-parser=aac,ac3,h261,h264,mpegaudio,mpeg4video,mpegvideo,vc1',
|
||||
'--enable-decoder=aac,h264,mpeg4,mpegvideo',
|
||||
'--enable-muxer=h264,mov,mp4,mpeg2video',
|
||||
'--enable-demuxer=aac,h264,m4v,mov,mpegvideo,vc1',
|
||||
]
|
||||
|
||||
# needed to prevent _ffmpeg.so: version node not found for symbol av_init_packet@LIBAVFORMAT_52
|
||||
|
@ -89,41 +78,39 @@ class FFMpegRecipe(Recipe):
|
|||
|
||||
# disable binaries / doc
|
||||
flags += [
|
||||
'--disable-ffmpeg',
|
||||
'--disable-ffplay',
|
||||
'--disable-ffprobe',
|
||||
'--disable-ffserver',
|
||||
'--disable-ffmpeg',
|
||||
'--disable-ffplay',
|
||||
'--disable-ffprobe',
|
||||
'--disable-ffserver',
|
||||
'--disable-doc',
|
||||
]
|
||||
|
||||
# other flags:
|
||||
flags += [
|
||||
'--enable-filter=aresample,resample,crop,adelay,volume',
|
||||
'--enable-filter=aresample,resample,crop,adelay,volume,scale',
|
||||
'--enable-protocol=file,http',
|
||||
'--enable-small',
|
||||
'--enable-hwaccels',
|
||||
'--enable-gpl',
|
||||
'--enable-pic',
|
||||
'--disable-static',
|
||||
'--disable-static',
|
||||
'--enable-shared',
|
||||
]
|
||||
|
||||
# android:
|
||||
flags += [
|
||||
'--target-os=android',
|
||||
'--cross-prefix=arm-linux-androideabi-',
|
||||
'--target-os=android',
|
||||
'--cross-prefix=arm-linux-androideabi-',
|
||||
'--arch=arm',
|
||||
'--sysroot=' + self.ctx.ndk_platform,
|
||||
'--enable-neon',
|
||||
'--prefix={}'.format(realpath('.')),
|
||||
]
|
||||
cflags = [
|
||||
'-march=armv7-a',
|
||||
'-mfpu=vfpv3-d16',
|
||||
'-mfloat-abi=softfp',
|
||||
'-fPIC',
|
||||
'-DANDROID',
|
||||
] + cflags
|
||||
cflags += [
|
||||
'-mfpu=vfpv3-d16',
|
||||
'-mfloat-abi=softfp',
|
||||
'-fPIC',
|
||||
]
|
||||
|
||||
env['CFLAGS'] += ' ' + ' '.join(cflags)
|
||||
env['LDFLAGS'] += ' ' + ' '.join(ldflags)
|
||||
|
@ -135,4 +122,5 @@ class FFMpegRecipe(Recipe):
|
|||
# copy libs:
|
||||
sh.cp('-a', sh.glob('./lib/lib*.so'), self.ctx.get_libs_dir(arch.arch))
|
||||
|
||||
|
||||
recipe = FFMpegRecipe()
|
||||
|
|
40
p4a/pythonforandroid/recipes/ffmpeg/patches/configure.patch
Normal file
40
p4a/pythonforandroid/recipes/ffmpeg/patches/configure.patch
Normal file
|
@ -0,0 +1,40 @@
|
|||
--- ./configure.orig 2017-12-11 00:35:18.000000000 +0300
|
||||
+++ ./configure 2017-12-19 09:47:54.104914600 +0300
|
||||
@@ -4841,9 +4841,6 @@
|
||||
add_cflags -std=c11 ||
|
||||
check_cflags -std=c99
|
||||
|
||||
-check_cppflags -D_FILE_OFFSET_BITS=64
|
||||
-check_cppflags -D_LARGEFILE_SOURCE
|
||||
-
|
||||
add_host_cppflags -D_ISOC99_SOURCE
|
||||
check_host_cflags -std=c99
|
||||
check_host_cflags -Wall
|
||||
@@ -5979,7 +5976,7 @@
|
||||
enabled librsvg && require_pkg_config librsvg librsvg-2.0 librsvg-2.0/librsvg/rsvg.h rsvg_handle_render_cairo
|
||||
enabled librtmp && require_pkg_config librtmp librtmp librtmp/rtmp.h RTMP_Socket
|
||||
enabled librubberband && require_pkg_config librubberband "rubberband >= 1.8.1" rubberband/rubberband-c.h rubberband_new
|
||||
-enabled libshine && require_pkg_config libshine shine shine/layer3.h shine_encode_buffer
|
||||
+enabled libshine && require "shine" shine/layer3.h shine_encode_buffer -lshine
|
||||
enabled libsmbclient && { use_pkg_config libsmbclient smbclient libsmbclient.h smbc_init ||
|
||||
require smbclient libsmbclient.h smbc_init -lsmbclient; }
|
||||
enabled libsnappy && require libsnappy snappy-c.h snappy_compress -lsnappy
|
||||
|
||||
diff -Naur ffmpeg/configure ffmpeg-1/configure
|
||||
--- ffmpeg/configure 2019-01-11 09:30:02.824961600 +0100
|
||||
+++ ffmpeg-1/configure 2019-01-11 09:29:54.976149600 +0100
|
||||
@@ -6068,11 +6068,11 @@
|
||||
{ ! enabled cross_compile && add_cflags -isystem/opt/vc/include/IL && check_header OMX_Core.h ; } ||
|
||||
die "ERROR: OpenMAX IL headers not found"; }
|
||||
enabled omx && require_header OMX_Core.h
|
||||
-enabled openssl && { use_pkg_config openssl openssl openssl/ssl.h OPENSSL_init_ssl ||
|
||||
+enabled openssl && { use_pkg_config openssl openssl openssl/ssl.h OPENSSL_init_ssl ||
|
||||
use_pkg_config openssl openssl openssl/ssl.h SSL_library_init ||
|
||||
- check_lib openssl openssl/ssl.h OPENSSL_init_ssl -lssl -lcrypto ||
|
||||
- check_lib openssl openssl/ssl.h OPENSSL_init_ssl -lssl32 -leay32 ||
|
||||
- check_lib openssl openssl/ssl.h OPENSSL_init_ssl -lssl -lcrypto -lws2_32 -lgdi32 ||
|
||||
+ check_lib openssl openssl/ssl.h SSL_library_init -lssl -lcrypto ||
|
||||
+ check_lib openssl openssl/ssl.h SSL_library_init -lssl32 -leay32 ||
|
||||
+ check_lib openssl openssl/ssl.h SSL_library_init -lssl -lcrypto -lws2_32 -lgdi32 ||
|
||||
die "ERROR: openssl not found"; }
|
||||
enabled rkmpp && { { require_pkg_config rockchip_mpp rockchip_mpp rockchip/rk_mpi.h mpp_create ||
|
|
@ -1,11 +0,0 @@
|
|||
--- ./configure.orig 2016-09-19 04:41:33.000000000 +0300
|
||||
+++ ./configure 2016-12-06 19:12:05.046025000 +0300
|
||||
@@ -5260,7 +5260,7 @@
|
||||
enabled libquvi && require_pkg_config libquvi quvi/quvi.h quvi_init
|
||||
enabled librtmp && require_pkg_config librtmp librtmp/rtmp.h RTMP_Socket
|
||||
enabled libschroedinger && require_pkg_config schroedinger-1.0 schroedinger/schro.h schro_init
|
||||
-enabled libshine && require_pkg_config shine shine/layer3.h shine_encode_buffer
|
||||
+enabled libshine && require "shine" shine/layer3.h shine_encode_buffer -lshine
|
||||
enabled libsmbclient && { use_pkg_config smbclient libsmbclient.h smbc_init ||
|
||||
require smbclient libsmbclient.h smbc_init -lsmbclient; }
|
||||
enabled libsnappy && require snappy snappy-c.h snappy_compress -lsnappy
|
|
@ -1,27 +1,28 @@
|
|||
from pythonforandroid.toolchain import Recipe, CythonRecipe, shprint, current_directory, ArchARM
|
||||
from os.path import exists, join, realpath
|
||||
from os import uname
|
||||
import glob
|
||||
import sh
|
||||
import os
|
||||
from pythonforandroid.recipe import CythonRecipe
|
||||
from pythonforandroid.toolchain import Recipe
|
||||
from os.path import join
|
||||
|
||||
|
||||
class FFPyPlayerRecipe(CythonRecipe):
|
||||
version = 'master'
|
||||
version = '6f7568b498715c2da88f061ebad082a042514923'
|
||||
url = 'https://github.com/matham/ffpyplayer/archive/{version}.zip'
|
||||
depends = ['python2', 'sdl2', 'ffmpeg']
|
||||
depends = [('python2', 'python3'), 'sdl2', 'ffmpeg']
|
||||
opt_depends = ['openssl', 'ffpyplayer_codecs']
|
||||
|
||||
def get_recipe_env(self, arch, with_flags_in_cc=True):
|
||||
env = super(FFPyPlayerRecipe, self).get_recipe_env(arch)
|
||||
|
||||
env["SDL_INCLUDE_DIR"] = join(self.ctx.bootstrap.build_dir, 'jni', 'SDL', 'include')
|
||||
env["SDL_LIB_DIR"] = join(self.ctx.bootstrap.build_dir, 'libs', arch.arch)
|
||||
|
||||
build_dir = Recipe.get_recipe('ffmpeg', self.ctx).get_build_dir(arch.arch)
|
||||
env["FFMPEG_INCLUDE_DIR"] = join(build_dir, "include")
|
||||
env["FFMPEG_LIB_DIR"] = join(build_dir, "lib")
|
||||
|
||||
env["SDL_INCLUDE_DIR"] = join(self.ctx.bootstrap.build_dir, 'jni', 'SDL', 'include')
|
||||
env["SDL_LIB_DIR"] = join(self.ctx.bootstrap.build_dir, 'libs', arch.arch)
|
||||
|
||||
env["USE_SDL2_MIXER"] = '1'
|
||||
env["SDL2_MIXER_INCLUDE_DIR"] = join(self.ctx.bootstrap.build_dir, 'jni', 'SDL2_mixer')
|
||||
|
||||
return env
|
||||
|
||||
|
||||
recipe = FFPyPlayerRecipe()
|
||||
|
|
|
@ -7,4 +7,5 @@ class FFPyPlayerCodecsRecipe(Recipe):
|
|||
def build_arch(self, arch):
|
||||
pass
|
||||
|
||||
|
||||
recipe = FFPyPlayerCodecsRecipe()
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
|
||||
from pythonforandroid.toolchain import PythonRecipe, shprint
|
||||
import sh
|
||||
from pythonforandroid.recipe import PythonRecipe
|
||||
|
||||
|
||||
class FlaskRecipe(PythonRecipe):
|
||||
version = '0.10.1' # The webserver of 'master' seems to fail
|
||||
# after a little while on Android, so use
|
||||
# 0.10.1 at least for now
|
||||
# The webserver of 'master' seems to fail
|
||||
# after a little while on Android, so use
|
||||
# 0.10.1 at least for now
|
||||
version = '0.10.1'
|
||||
url = 'https://github.com/pallets/flask/archive/{version}.zip'
|
||||
|
||||
depends = [('python2', 'python3crystax'), 'setuptools']
|
||||
depends = [('python2', 'python3', 'python3crystax'), 'setuptools']
|
||||
|
||||
python_depends = ['jinja2', 'werkzeug', 'markupsafe', 'itsdangerous', 'click']
|
||||
|
||||
|
|
|
@ -1,11 +1,8 @@
|
|||
|
||||
from pythonforandroid.toolchain import BootstrapNDKRecipe, shprint, current_directory, info_main
|
||||
from os.path import exists, join
|
||||
from pythonforandroid.recipe import BootstrapNDKRecipe
|
||||
from pythonforandroid.toolchain import current_directory, shprint
|
||||
import sh
|
||||
|
||||
|
||||
|
||||
|
||||
class FontconfigRecipe(BootstrapNDKRecipe):
|
||||
version = "really_old"
|
||||
url = 'https://github.com/vault/fontconfig/archive/androidbuild.zip'
|
||||
|
|
|
@ -1,19 +1,20 @@
|
|||
|
||||
from pythonforandroid.toolchain import Recipe, shprint, current_directory, ArchARM
|
||||
from pythonforandroid.toolchain import Recipe
|
||||
from pythonforandroid.util import current_directory
|
||||
from pythonforandroid.logger import shprint
|
||||
from os.path import exists, join, realpath
|
||||
from os import uname
|
||||
import glob
|
||||
import sh
|
||||
|
||||
|
||||
class FreetypeRecipe(Recipe):
|
||||
|
||||
version = '2.5.5'
|
||||
url = 'http://download.savannah.gnu.org/releases/freetype/freetype-{version}.tar.gz'
|
||||
url = 'http://download.savannah.gnu.org/releases/freetype/freetype-{version}.tar.gz' # noqa
|
||||
|
||||
depends = ['harfbuzz']
|
||||
|
||||
def should_build(self, arch):
|
||||
if exists(join(self.get_build_dir(arch.arch), 'objs', '.libs', 'libfreetype.so')):
|
||||
if exists(join(self.get_build_dir(arch.arch),
|
||||
'objs', '.libs', 'libfreetype.a')):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
@ -23,17 +24,21 @@ class FreetypeRecipe(Recipe):
|
|||
harfbuzz_recipe = Recipe.get_recipe('harfbuzz', self.ctx)
|
||||
env['LDFLAGS'] = ' '.join(
|
||||
[env['LDFLAGS'],
|
||||
'-L{}'.format(join(harfbuzz_recipe.get_build_dir(arch.arch), 'src', '.libs'))])
|
||||
'-L{}'.format(join(harfbuzz_recipe.get_build_dir(arch.arch),
|
||||
'src', '.libs'))])
|
||||
|
||||
with current_directory(self.get_build_dir(arch.arch)):
|
||||
configure = sh.Command('./configure')
|
||||
shprint(configure, '--host=arm-linux-androideabi',
|
||||
shprint(configure,
|
||||
'--host=arm-linux-androideabi',
|
||||
'--prefix={}'.format(realpath('.')),
|
||||
'--without-zlib', '--with-png=no', '--enable-shared',
|
||||
'--without-zlib',
|
||||
'--with-png=no',
|
||||
'--disable-shared',
|
||||
_env=env)
|
||||
shprint(sh.make, '-j5', _env=env)
|
||||
|
||||
shprint(sh.cp, 'objs/.libs/libfreetype.so', self.ctx.libs_dir)
|
||||
shprint(sh.cp, 'objs/.libs/libfreetype.a', self.ctx.libs_dir)
|
||||
|
||||
|
||||
recipe = FreetypeRecipe()
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from pythonforandroid.toolchain import BootstrapNDKRecipe, shprint, current_directory, info
|
||||
from os.path import exists, join
|
||||
from pythonforandroid.recipe import BootstrapNDKRecipe
|
||||
from pythonforandroid.toolchain import current_directory, shprint
|
||||
import sh
|
||||
|
||||
|
||||
|
@ -7,18 +7,16 @@ class GenericNDKBuildRecipe(BootstrapNDKRecipe):
|
|||
version = None
|
||||
url = None
|
||||
|
||||
depends = [('python2', 'python3crystax')]
|
||||
depends = [('python2', 'python3', 'python3crystax')]
|
||||
conflicts = ['sdl2', 'pygame', 'sdl']
|
||||
|
||||
def should_build(self, arch):
|
||||
return True
|
||||
|
||||
def get_recipe_env(self, arch=None):
|
||||
env = super(GenericNDKBuildRecipe, self).get_recipe_env(arch)
|
||||
py2 = self.get_recipe('python2', arch.ctx)
|
||||
env['PYTHON2_NAME'] = py2.get_dir_name()
|
||||
if 'python2' in self.ctx.recipe_build_order:
|
||||
env['EXTRA_LDLIBS'] = ' -lpython2.7'
|
||||
def get_recipe_env(self, arch=None, with_flags_in_cc=True, with_python=True):
|
||||
env = super(GenericNDKBuildRecipe, self).get_recipe_env(
|
||||
arch=arch, with_flags_in_cc=with_flags_in_cc, with_python=with_python)
|
||||
env['APP_ALLOW_MISSING_DEPS'] = 'true'
|
||||
return env
|
||||
|
||||
def build_arch(self, arch):
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
from pythonforandroid.toolchain import PythonRecipe
|
||||
from pythonforandroid.recipe import PythonRecipe
|
||||
|
||||
|
||||
class GeventWebsocketRecipe(PythonRecipe):
|
||||
version = '0.9.5'
|
||||
url = 'https://pypi.python.org/packages/source/g/gevent-websocket/gevent-websocket-{version}.tar.gz'
|
||||
depends = [('python2', 'python3crystax'), 'setuptools']
|
||||
depends = ['setuptools']
|
||||
site_packages_name = 'geventwebsocket'
|
||||
call_hostpython_via_targetpython = False
|
||||
|
||||
|
||||
recipe = GeventWebsocketRecipe()
|
||||
|
|
|
@ -1,10 +1,32 @@
|
|||
from pythonforandroid.toolchain import CompiledComponentsPythonRecipe
|
||||
import re
|
||||
from pythonforandroid.logger import info
|
||||
from pythonforandroid.recipe import CythonRecipe
|
||||
|
||||
|
||||
class GeventRecipe(CompiledComponentsPythonRecipe):
|
||||
version = '1.1.1'
|
||||
class GeventRecipe(CythonRecipe):
|
||||
version = '1.4.0'
|
||||
url = 'https://pypi.python.org/packages/source/g/gevent/gevent-{version}.tar.gz'
|
||||
depends = [('python2', 'python3crystax'), 'greenlet']
|
||||
patches = ["gevent.patch"]
|
||||
depends = ['librt', 'greenlet']
|
||||
patches = ["cross_compiling.patch"]
|
||||
|
||||
def get_recipe_env(self, arch=None, with_flags_in_cc=True):
|
||||
"""
|
||||
- Moves all -I<inc> -D<macro> from CFLAGS to CPPFLAGS environment.
|
||||
- Moves all -l<lib> from LDFLAGS to LIBS environment.
|
||||
- Fixes linker name (use cross compiler) and flags (appends LIBS)
|
||||
"""
|
||||
env = super(GeventRecipe, self).get_recipe_env(arch, with_flags_in_cc)
|
||||
# CFLAGS may only be used to specify C compiler flags, for macro definitions use CPPFLAGS
|
||||
regex = re.compile(r'(?:\s|^)-[DI][\S]+')
|
||||
env['CPPFLAGS'] = ''.join(re.findall(regex, env['CFLAGS'])).strip()
|
||||
env['CFLAGS'] = re.sub(regex, '', env['CFLAGS'])
|
||||
info('Moved "{}" from CFLAGS to CPPFLAGS.'.format(env['CPPFLAGS']))
|
||||
# LDFLAGS may only be used to specify linker flags, for libraries use LIBS
|
||||
regex = re.compile(r'(?:\s|^)-l[\w\.]+')
|
||||
env['LIBS'] = ''.join(re.findall(regex, env['LDFLAGS'])).strip()
|
||||
env['LDFLAGS'] = re.sub(regex, '', env['LDFLAGS'])
|
||||
info('Moved "{}" from LDFLAGS to LIBS.'.format(env['LIBS']))
|
||||
return env
|
||||
|
||||
|
||||
recipe = GeventRecipe()
|
||||
|
|
26
p4a/pythonforandroid/recipes/gevent/cross_compiling.patch
Normal file
26
p4a/pythonforandroid/recipes/gevent/cross_compiling.patch
Normal file
|
@ -0,0 +1,26 @@
|
|||
diff --git a/_setupares.py b/_setupares.py
|
||||
index dd184de6..bb16bebe 100644
|
||||
--- a/_setupares.py
|
||||
+++ b/_setupares.py
|
||||
@@ -43,7 +43,7 @@ else:
|
||||
ares_configure_command = ' '.join([
|
||||
"(cd ", quoted_dep_abspath('c-ares'),
|
||||
" && if [ -r ares_build.h ]; then cp ares_build.h ares_build.h.orig; fi ",
|
||||
- " && sh ./configure --disable-dependency-tracking " + _m32 + "CONFIG_COMMANDS= ",
|
||||
+ " && sh ./configure --host={} --disable-dependency-tracking ".format(os.environ['TOOLCHAIN_PREFIX']) + _m32 + "CONFIG_COMMANDS= ",
|
||||
" && cp ares_config.h ares_build.h \"$OLDPWD\" ",
|
||||
" && cat ares_build.h ",
|
||||
" && if [ -r ares_build.h.orig ]; then mv ares_build.h.orig ares_build.h; fi)",
|
||||
diff --git a/_setuplibev.py b/_setuplibev.py
|
||||
index 2a5841bf..b6433c94 100644
|
||||
--- a/_setuplibev.py
|
||||
+++ b/_setuplibev.py
|
||||
@@ -31,7 +31,7 @@ LIBEV_EMBED = should_embed('libev')
|
||||
# and the PyPy branch will clean it up.
|
||||
libev_configure_command = ' '.join([
|
||||
"(cd ", quoted_dep_abspath('libev'),
|
||||
- " && sh ./configure ",
|
||||
+ " && sh ./configure --host={} ".format(os.environ['TOOLCHAIN_PREFIX']),
|
||||
" && cp config.h \"$OLDPWD\"",
|
||||
")",
|
||||
'> configure-output.txt'
|
|
@ -1,21 +0,0 @@
|
|||
diff -Naur gevent-1.1.1/setup.py gevent-1.1.1_diff/setup.py
|
||||
--- gevent-1.1.1/setup.py 2016-04-04 17:27:33.000000000 +0200
|
||||
+++ gevent-1.1.1_diff/setup.py 2016-05-10 10:10:39.145881610 +0200
|
||||
@@ -96,7 +96,7 @@
|
||||
# and the PyPy branch will clean it up.
|
||||
libev_configure_command = ' '.join([
|
||||
"(cd ", _quoted_abspath('libev/'),
|
||||
- " && /bin/sh ./configure ",
|
||||
+ " && /bin/sh ./configure --host={}".format(os.environ['TOOLCHAIN_PREFIX']),
|
||||
" && cp config.h \"$OLDPWD\"",
|
||||
")",
|
||||
'> configure-output.txt'
|
||||
@@ -112,7 +112,7 @@
|
||||
# Use -r, not -e, for support of old solaris. See https://github.com/gevent/gevent/issues/777
|
||||
ares_configure_command = ' '.join(["(cd ", _quoted_abspath('c-ares/'),
|
||||
" && if [ -r ares_build.h ]; then cp ares_build.h ares_build.h.orig; fi ",
|
||||
- " && /bin/sh ./configure " + _m32 + "CONFIG_COMMANDS= CONFIG_FILES= ",
|
||||
+ " && /bin/sh ./configure --host={} ".format(os.environ['TOOLCHAIN_PREFIX']) + "CFLAGS= LDFLAGS= CONFIG_COMMANDS= CONFIG_FILES= ",
|
||||
" && cp ares_config.h ares_build.h \"$OLDPWD\" ",
|
||||
" && mv ares_build.h.orig ares_build.h)",
|
||||
"> configure-output.txt"])
|
|
@ -1,9 +1,11 @@
|
|||
from pythonforandroid.toolchain import PythonRecipe
|
||||
from pythonforandroid.recipe import CompiledComponentsPythonRecipe
|
||||
|
||||
|
||||
class GreenletRecipe(PythonRecipe):
|
||||
version = '0.4.9'
|
||||
class GreenletRecipe(CompiledComponentsPythonRecipe):
|
||||
version = '0.4.15'
|
||||
url = 'https://pypi.python.org/packages/source/g/greenlet/greenlet-{version}.tar.gz'
|
||||
depends = [('python2', 'python3crystax')]
|
||||
depends = ['setuptools']
|
||||
call_hostpython_via_targetpython = False
|
||||
|
||||
|
||||
recipe = GreenletRecipe()
|
||||
|
|
11
p4a/pythonforandroid/recipes/groestlcoin_hash/__init__.py
Normal file
11
p4a/pythonforandroid/recipes/groestlcoin_hash/__init__.py
Normal file
|
@ -0,0 +1,11 @@
|
|||
from pythonforandroid.recipe import CythonRecipe
|
||||
|
||||
|
||||
class GroestlcoinHashRecipe(CythonRecipe):
|
||||
version = '1.0.1'
|
||||
url = 'https://github.com/Groestlcoin/groestlcoin-hash-python/archive/{version}.tar.gz'
|
||||
depends = []
|
||||
cythonize = False
|
||||
|
||||
|
||||
recipe = GroestlcoinHashRecipe()
|
|
@ -1,17 +1,17 @@
|
|||
|
||||
from pythonforandroid.toolchain import Recipe, shprint, current_directory, ArchARM
|
||||
from os.path import exists, join, realpath
|
||||
from os import uname
|
||||
import glob
|
||||
from pythonforandroid.toolchain import Recipe
|
||||
from pythonforandroid.util import current_directory
|
||||
from pythonforandroid.logger import shprint
|
||||
from os.path import exists, join
|
||||
import sh
|
||||
|
||||
|
||||
class HarfbuzzRecipe(Recipe):
|
||||
version = '0.9.40'
|
||||
url = 'http://www.freedesktop.org/software/harfbuzz/release/harfbuzz-{version}.tar.bz2'
|
||||
url = 'http://www.freedesktop.org/software/harfbuzz/release/harfbuzz-{version}.tar.bz2' # noqa
|
||||
|
||||
def should_build(self, arch):
|
||||
if exists(join(self.get_build_dir(arch.arch), 'src', '.libs', 'libharfbuzz.so')):
|
||||
if exists(join(self.get_build_dir(arch.arch),
|
||||
'src', '.libs', 'libharfbuzz.a')):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
@ -24,10 +24,16 @@ class HarfbuzzRecipe(Recipe):
|
|||
with current_directory(self.get_build_dir(arch.arch)):
|
||||
configure = sh.Command('./configure')
|
||||
shprint(configure, '--without-icu', '--host=arm-linux=androideabi',
|
||||
'--prefix={}'.format(join(self.ctx.build_dir, 'python-install')),
|
||||
'--without-freetype', '--without-glib', _env=env)
|
||||
'--prefix={}'.format(
|
||||
join(self.ctx.build_dir, 'python-install')),
|
||||
'--without-freetype',
|
||||
'--without-glib',
|
||||
'--disable-shared',
|
||||
_env=env)
|
||||
shprint(sh.make, '-j5', _env=env)
|
||||
|
||||
shprint(sh.cp, '-L', join('src', '.libs', 'libharfbuzz.so'), self.ctx.libs_dir)
|
||||
shprint(sh.cp, '-L', join('src', '.libs', 'libharfbuzz.a'),
|
||||
self.ctx.libs_dir)
|
||||
|
||||
|
||||
recipe = HarfbuzzRecipe()
|
||||
|
|
|
@ -1,59 +1,18 @@
|
|||
|
||||
from pythonforandroid.toolchain import Recipe, shprint, current_directory, info, warning
|
||||
from os.path import join, exists
|
||||
from os import chdir
|
||||
import sh
|
||||
from pythonforandroid.python import HostPythonRecipe
|
||||
|
||||
|
||||
class Hostpython2Recipe(Recipe):
|
||||
version = '2.7.2'
|
||||
url = 'http://python.org/ftp/python/{version}/Python-{version}.tar.bz2'
|
||||
class Hostpython2Recipe(HostPythonRecipe):
|
||||
'''
|
||||
The hostpython2's recipe.
|
||||
|
||||
.. versionchanged:: 0.6.0
|
||||
Updated to version 2.7.15 and the build process has been changed in
|
||||
favour of the recently added class
|
||||
:class:`~pythonforandroid.python.HostPythonRecipe`
|
||||
'''
|
||||
version = '2.7.15'
|
||||
name = 'hostpython2'
|
||||
|
||||
conflicts = ['hostpython3']
|
||||
|
||||
def get_build_container_dir(self, arch=None):
|
||||
choices = self.check_recipe_choices()
|
||||
dir_name = '-'.join([self.name] + choices)
|
||||
return join(self.ctx.build_dir, 'other_builds', dir_name, 'desktop')
|
||||
|
||||
def get_build_dir(self, arch=None):
|
||||
return join(self.get_build_container_dir(), self.name)
|
||||
|
||||
def prebuild_arch(self, arch):
|
||||
# Override hostpython Setup?
|
||||
shprint(sh.cp, join(self.get_recipe_dir(), 'Setup'),
|
||||
join(self.get_build_dir(), 'Modules', 'Setup'))
|
||||
|
||||
def build_arch(self, arch):
|
||||
with current_directory(self.get_build_dir()):
|
||||
|
||||
if exists('hostpython'):
|
||||
info('hostpython already exists, skipping build')
|
||||
self.ctx.hostpython = join(self.get_build_dir(),
|
||||
'hostpython')
|
||||
self.ctx.hostpgen = join(self.get_build_dir(),
|
||||
'hostpgen')
|
||||
return
|
||||
|
||||
configure = sh.Command('./configure')
|
||||
|
||||
shprint(configure)
|
||||
shprint(sh.make, '-j5')
|
||||
|
||||
shprint(sh.mv, join('Parser', 'pgen'), 'hostpgen')
|
||||
|
||||
if exists('python.exe'):
|
||||
shprint(sh.mv, 'python.exe', 'hostpython')
|
||||
elif exists('python'):
|
||||
shprint(sh.mv, 'python', 'hostpython')
|
||||
else:
|
||||
warning('Unable to find the python executable after '
|
||||
'hostpython build! Exiting.')
|
||||
exit(1)
|
||||
|
||||
self.ctx.hostpython = join(self.get_build_dir(), 'hostpython')
|
||||
self.ctx.hostpgen = join(self.get_build_dir(), 'hostpgen')
|
||||
conflicts = ['hostpython3', 'hostpython3crystax', 'hostpython2legacy']
|
||||
|
||||
|
||||
recipe = Hostpython2Recipe()
|
||||
|
|
|
@ -91,7 +91,7 @@ SITEPATH=
|
|||
TESTPATH=
|
||||
|
||||
# Path components for machine- or system-dependent modules and shared libraries
|
||||
MACHDEPPATH=:plat-$(PLATDIR)
|
||||
MACHDEPPATH=:plat-$(MACHDEP)
|
||||
EXTRAMACHDEPPATH=
|
||||
|
||||
# Path component for the Tkinter-related modules
|
||||
|
@ -109,7 +109,7 @@ PYTHONPATH=$(COREPYTHONPATH)
|
|||
# various reasons; therefore they are listed here instead of in the
|
||||
# normal order.
|
||||
|
||||
# This only contains the minimal set of modules required to run the
|
||||
# This only contains the minimal set of modules required to run the
|
||||
# setup.py script in the root of the Python source tree.
|
||||
|
||||
posix posixmodule.c # posix (UNIX) system calls
|
||||
|
@ -118,7 +118,6 @@ pwd pwdmodule.c # this is needed to find out the user's home dir
|
|||
# if $HOME is not set
|
||||
_sre _sre.c # Fredrik Lundh's new regular expressions
|
||||
_codecs _codecsmodule.c # access to the builtin codecs and codec registry
|
||||
_weakref _weakref.c # weak referencess
|
||||
|
||||
# The zipimport module is always imported at startup. Having it as a
|
||||
# builtin module avoids some bootstrapping problems and reduces overhead.
|
||||
|
@ -127,9 +126,9 @@ zipimport zipimport.c
|
|||
# The rest of the modules listed in this file are all commented out by
|
||||
# default. Usually they can be detected and built as dynamically
|
||||
# loaded modules by the new setup.py script added in Python 2.1. If
|
||||
# you're on a platform that doesn't support dynamic loading, want to
|
||||
# compile modules statically into the Python binary, or need to
|
||||
# specify some odd set of compiler switches, you can uncomment the
|
||||
# you're on a platform that doesn't support dynamic loading, want to
|
||||
# compile modules statically into the Python binary, or need to
|
||||
# specify some odd set of compiler switches, you can uncomment the
|
||||
# appropriate lines below.
|
||||
|
||||
# ======================================================================
|
||||
|
@ -165,19 +164,20 @@ GLHACK=-Dclear=__GLclear
|
|||
|
||||
#readline readline.c -lreadline -ltermcap
|
||||
|
||||
|
||||
# Modules that should always be present (non UNIX dependent):
|
||||
|
||||
array arraymodule.c # array objects
|
||||
cmath cmathmodule.c _math.c # -lm # complex math library functions
|
||||
math mathmodule.c _math.c # -lm # math library functions, e.g. sin()
|
||||
cmath cmathmodule.c # -lm # complex math library functions
|
||||
math mathmodule.c # -lm # math library functions, e.g. sin()
|
||||
_struct _struct.c # binary structure packing/unpacking
|
||||
time timemodule.c # -lm # time operations and variables
|
||||
operator operator.c # operator.add() and similar goodies
|
||||
_weakref _weakref.c # basic weak reference support
|
||||
#_testcapi _testcapimodule.c # Python C API test module
|
||||
_random _randommodule.c # Random number generator
|
||||
_collections _collectionsmodule.c # Container types
|
||||
_heapq _heapqmodule.c # Heapq type
|
||||
itertools itertoolsmodule.c # Functions creating iterators for efficient looping
|
||||
itertools itertoolsmodule.c # Functions creating iterators for efficient looping
|
||||
strop stropmodule.c # String manipulations
|
||||
_functools _functoolsmodule.c # Tools for working with functions and callable objects
|
||||
_elementtree -I$(srcdir)/Modules/expat -DHAVE_EXPAT_CONFIG_H -DUSE_PYEXPAT_CAPI _elementtree.c # elementtree accelerator
|
||||
|
@ -185,7 +185,7 @@ _elementtree -I$(srcdir)/Modules/expat -DHAVE_EXPAT_CONFIG_H -DUSE_PYEXPAT_CAPI
|
|||
datetime datetimemodule.c # date/time type
|
||||
_bisect _bisectmodule.c # Bisection algorithms
|
||||
|
||||
unicodedata unicodedata.c # static Unicode character database
|
||||
#unicodedata unicodedata.c # static Unicode character database
|
||||
|
||||
# access to ISO C locale support
|
||||
#_locale _localemodule.c # -lintl
|
||||
|
@ -196,7 +196,7 @@ unicodedata unicodedata.c # static Unicode character database
|
|||
# supported...)
|
||||
|
||||
fcntl fcntlmodule.c # fcntl(2) and ioctl(2)
|
||||
#spwd spwdmodule.c # spwd(3)
|
||||
#spwd spwdmodule.c # spwd(3)
|
||||
#grp grpmodule.c # grp(3)
|
||||
select selectmodule.c # select(2); not on ancient System V
|
||||
|
||||
|
@ -299,7 +299,7 @@ _sha512 sha512module.c
|
|||
#sunaudiodev sunaudiodev.c
|
||||
|
||||
|
||||
# A Linux specific module -- off by default; this may also work on
|
||||
# A Linux specific module -- off by default; this may also work on
|
||||
# some *BSDs.
|
||||
|
||||
#linuxaudiodev linuxaudiodev.c
|
||||
|
@ -365,7 +365,7 @@ _sha512 sha512module.c
|
|||
|
||||
#_curses _cursesmodule.c -lcurses -ltermcap
|
||||
# Wrapper for the panel library that's part of ncurses and SYSV curses.
|
||||
#_curses_panel _curses_panel.c -lpanel -lncurses
|
||||
#_curses_panel _curses_panel.c -lpanel -lncurses
|
||||
|
||||
|
||||
# Generic (SunOS / SVR4) dynamic loading module.
|
67
p4a/pythonforandroid/recipes/hostpython2legacy/__init__.py
Normal file
67
p4a/pythonforandroid/recipes/hostpython2legacy/__init__.py
Normal file
|
@ -0,0 +1,67 @@
|
|||
import os
|
||||
import sh
|
||||
from os.path import join, exists
|
||||
|
||||
from pythonforandroid.recipe import Recipe
|
||||
from pythonforandroid.logger import info, warning, shprint
|
||||
from pythonforandroid.util import current_directory
|
||||
|
||||
|
||||
class Hostpython2LegacyRecipe(Recipe):
|
||||
'''
|
||||
.. versionadded:: 0.6.0
|
||||
This was the original hostpython2's recipe by tito reintroduced as
|
||||
hostpython2legacy.
|
||||
'''
|
||||
version = '2.7.2'
|
||||
url = 'https://python.org/ftp/python/{version}/Python-{version}.tar.bz2'
|
||||
name = 'hostpython2legacy'
|
||||
patches = ['fix-segfault-pygchead.patch']
|
||||
|
||||
conflicts = ['hostpython2', 'hostpython3', 'hostpython3crystax']
|
||||
|
||||
def get_build_container_dir(self, arch=None):
|
||||
choices = self.check_recipe_choices()
|
||||
dir_name = '-'.join([self.name] + choices)
|
||||
return join(self.ctx.build_dir, 'other_builds', dir_name, 'desktop')
|
||||
|
||||
def get_build_dir(self, arch=None):
|
||||
return join(self.get_build_container_dir(), self.name)
|
||||
|
||||
def prebuild_arch(self, arch):
|
||||
# Override hostpython Setup?
|
||||
shprint(sh.cp, join(self.get_recipe_dir(), 'Setup'),
|
||||
join(self.get_build_dir(), 'Modules', 'Setup'))
|
||||
|
||||
def build_arch(self, arch):
|
||||
with current_directory(self.get_build_dir()):
|
||||
|
||||
if exists('hostpython'):
|
||||
info('hostpython already exists, skipping build')
|
||||
self.ctx.hostpython = join(self.get_build_dir(), 'hostpython')
|
||||
self.ctx.hostpgen = join(self.get_build_dir(), 'hostpgen')
|
||||
return
|
||||
|
||||
if 'LIBS' in os.environ:
|
||||
os.environ.pop('LIBS')
|
||||
configure = sh.Command('./configure')
|
||||
|
||||
shprint(configure)
|
||||
shprint(sh.make, '-j5')
|
||||
|
||||
shprint(sh.mv, join('Parser', 'pgen'), 'hostpgen')
|
||||
|
||||
if exists('python.exe'):
|
||||
shprint(sh.mv, 'python.exe', 'hostpython')
|
||||
elif exists('python'):
|
||||
shprint(sh.mv, 'python', 'hostpython')
|
||||
else:
|
||||
warning('Unable to find the python executable after '
|
||||
'hostpython build! Exiting.')
|
||||
exit(1)
|
||||
|
||||
self.ctx.hostpython = join(self.get_build_dir(), 'hostpython')
|
||||
self.ctx.hostpgen = join(self.get_build_dir(), 'hostpgen')
|
||||
|
||||
|
||||
recipe = Hostpython2LegacyRecipe()
|
|
@ -0,0 +1,12 @@
|
|||
diff -Naur Python-2.7.2.orig/Include/objimpl.h Python-2.7.2/Include/objimpl.h
|
||||
--- Python-2.7.2.orig/Include/objimpl.h 2011-06-11 17:46:23.000000000 +0200
|
||||
+++ Python-2.7.2/Include/objimpl.h 2018-09-04 17:33:09.254654565 +0200
|
||||
@@ -255,7 +255,7 @@
|
||||
union _gc_head *gc_prev;
|
||||
Py_ssize_t gc_refs;
|
||||
} gc;
|
||||
- long double dummy; /* force worst-case alignment */
|
||||
+ double dummy; /* force worst-case alignment */
|
||||
} PyGC_Head;
|
||||
|
||||
extern PyGC_Head *_PyGC_generation0;
|
|
@ -1,61 +1,17 @@
|
|||
|
||||
from pythonforandroid.toolchain import Recipe, shprint, current_directory, info, warning
|
||||
from os.path import join, exists
|
||||
from os import chdir
|
||||
import sh
|
||||
from pythonforandroid.python import HostPythonRecipe
|
||||
|
||||
|
||||
class Hostpython3Recipe(Recipe):
|
||||
version = '3.5'
|
||||
# url = 'http://python.org/ftp/python/{version}/Python-{version}.tgz'
|
||||
url = 'https://github.com/crystax/android-vendor-python-3-5/archive/master.zip'
|
||||
class Hostpython3Recipe(HostPythonRecipe):
|
||||
'''
|
||||
The hostpython3's recipe.
|
||||
|
||||
.. versionchanged:: 0.6.0
|
||||
Refactored into the new class
|
||||
:class:`~pythonforandroid.python.HostPythonRecipe`
|
||||
'''
|
||||
version = '3.7.1'
|
||||
name = 'hostpython3'
|
||||
|
||||
conflicts = ['hostpython2']
|
||||
|
||||
# def prebuild_armeabi(self):
|
||||
# # Override hostpython Setup?
|
||||
# shprint(sh.cp, join(self.get_recipe_dir(), 'Setup'),
|
||||
# join(self.get_build_dir('armeabi'), 'Modules', 'Setup'))
|
||||
|
||||
def build_arch(self, arch):
|
||||
# AND: Should use an i386 recipe system
|
||||
warning('Running hostpython build. Arch is armeabi! '
|
||||
'This is naughty, need to fix the Arch system!')
|
||||
|
||||
# AND: Fix armeabi again
|
||||
with current_directory(self.get_build_dir(arch.arch)):
|
||||
|
||||
if exists('hostpython'):
|
||||
info('hostpython already exists, skipping build')
|
||||
self.ctx.hostpython = join(self.get_build_dir('armeabi'),
|
||||
'hostpython')
|
||||
self.ctx.hostpgen = join(self.get_build_dir('armeabi'),
|
||||
'hostpgen')
|
||||
return
|
||||
|
||||
configure = sh.Command('./configure')
|
||||
|
||||
shprint(configure)
|
||||
shprint(sh.make, '-j5', 'BUILDPYTHON=hostpython', 'hostpython',
|
||||
'PGEN=Parser/hostpgen', 'Parser/hostpgen')
|
||||
|
||||
shprint(sh.mv, join('Parser', 'hostpgen'), 'hostpgen')
|
||||
|
||||
# if exists('python.exe'):
|
||||
# shprint(sh.mv, 'python.exe', 'hostpython')
|
||||
# elif exists('python'):
|
||||
# shprint(sh.mv, 'python', 'hostpython')
|
||||
if exists('hostpython'):
|
||||
pass # The above commands should automatically create
|
||||
# the hostpython binary, unlike with python2
|
||||
else:
|
||||
warning('Unable to find the python executable after '
|
||||
'hostpython build! Exiting.')
|
||||
exit(1)
|
||||
|
||||
self.ctx.hostpython = join(self.get_build_dir(arch.arch), 'hostpython')
|
||||
self.ctx.hostpgen = join(self.get_build_dir(arch.arch), 'hostpgen')
|
||||
conflicts = ['hostpython2', 'hostpython3crystax']
|
||||
|
||||
|
||||
recipe = Hostpython3Recipe()
|
||||
|
|
|
@ -1,26 +1,44 @@
|
|||
|
||||
from pythonforandroid.toolchain import Recipe, shprint, current_directory, info, warning
|
||||
from os.path import join, exists
|
||||
from os import chdir
|
||||
from pythonforandroid.toolchain import Recipe, shprint
|
||||
from os.path import join
|
||||
import sh
|
||||
|
||||
|
||||
class Hostpython3Recipe(Recipe):
|
||||
version = '3.5'
|
||||
# url = 'http://python.org/ftp/python/{version}/Python-{version}.tgz'
|
||||
# url = 'https://github.com/crystax/android-vendor-python-3-5/archive/master.zip'
|
||||
class Hostpython3CrystaXRecipe(Recipe):
|
||||
version = 'auto' # the version is taken from the python3crystax recipe
|
||||
name = 'hostpython3crystax'
|
||||
|
||||
conflicts = ['hostpython2']
|
||||
|
||||
def get_build_container_dir(self, arch=None):
|
||||
choices = self.check_recipe_choices()
|
||||
dir_name = '-'.join([self.name] + choices)
|
||||
return join(self.ctx.build_dir, 'other_builds', dir_name, 'desktop')
|
||||
|
||||
# def prebuild_armeabi(self):
|
||||
# # Override hostpython Setup?
|
||||
# shprint(sh.cp, join(self.get_recipe_dir(), 'Setup'),
|
||||
# join(self.get_build_dir('armeabi'), 'Modules', 'Setup'))
|
||||
|
||||
def get_build_dir(self, arch=None):
|
||||
return join(self.get_build_container_dir(), self.name)
|
||||
|
||||
def build_arch(self, arch):
|
||||
"""
|
||||
Creates expected build and symlinks system Python version.
|
||||
"""
|
||||
self.ctx.hostpython = '/usr/bin/false'
|
||||
self.ctx.hostpgen = '/usr/bin/false'
|
||||
# creates the sub buildir (used by other recipes)
|
||||
# https://github.com/kivy/python-for-android/issues/1154
|
||||
sub_build_dir = join(self.get_build_dir(), 'build')
|
||||
shprint(sh.mkdir, '-p', sub_build_dir)
|
||||
python3crystax = self.get_recipe('python3crystax', self.ctx)
|
||||
system_python = sh.which("python" + python3crystax.version)
|
||||
if system_python is None:
|
||||
raise OSError(
|
||||
('Trying to use python3crystax=={} but this Python version '
|
||||
'is not installed locally.').format(python3crystax.version))
|
||||
link_dest = join(self.get_build_dir(), 'hostpython')
|
||||
shprint(sh.ln, '-sf', system_python, link_dest)
|
||||
|
||||
|
||||
recipe = Hostpython3Recipe()
|
||||
recipe = Hostpython3CrystaXRecipe()
|
||||
|
|
|
@ -2,7 +2,7 @@ import sh
|
|||
import os
|
||||
from os.path import join, isdir
|
||||
from pythonforandroid.recipe import NDKRecipe
|
||||
from pythonforandroid.toolchain import shprint, info
|
||||
from pythonforandroid.toolchain import shprint
|
||||
from pythonforandroid.util import current_directory, ensure_dir
|
||||
|
||||
|
||||
|
@ -11,7 +11,7 @@ class ICURecipe(NDKRecipe):
|
|||
version = '57.1'
|
||||
url = 'http://download.icu-project.org/files/icu4c/57.1/icu4c-57_1-src.tgz'
|
||||
|
||||
depends = [('python2', 'python3crystax'), 'hostpython2'] # installs in python
|
||||
depends = [('hostpython2', 'hostpython3')] # installs in python
|
||||
generated_libraries = [
|
||||
'libicui18n.so', 'libicuuc.so', 'libicudata.so', 'libicule.so']
|
||||
|
||||
|
|
|
@ -2,13 +2,13 @@ from pythonforandroid.recipe import PythonRecipe
|
|||
|
||||
|
||||
class IdnaRecipe(PythonRecipe):
|
||||
name = 'idna'
|
||||
version = '2.0'
|
||||
url = 'https://pypi.python.org/packages/source/i/idna/idna-{version}.tar.gz'
|
||||
name = 'idna'
|
||||
version = '2.8'
|
||||
url = 'https://github.com/kjd/idna/archive/v{version}.tar.gz'
|
||||
|
||||
depends = [('python2', 'python3crystax'), 'setuptools']
|
||||
depends = ['setuptools']
|
||||
|
||||
call_hostpython_via_targetpython = False
|
||||
call_hostpython_via_targetpython = False
|
||||
|
||||
|
||||
recipe = IdnaRecipe()
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue