2017-08-13 03:24:00 +02:00
|
|
|
#!/usr/bin/env python
|
|
|
|
"""
|
|
|
|
Tool for packaging Python apps for Android
|
|
|
|
==========================================
|
|
|
|
|
|
|
|
This module defines the entry point for command line and programmatic use.
|
|
|
|
"""
|
|
|
|
|
|
|
|
from __future__ import print_function
|
2019-03-30 21:58:45 +01:00
|
|
|
from os import environ
|
2017-12-21 08:24:31 +01:00
|
|
|
from pythonforandroid import __version__
|
2019-03-30 21:58:45 +01:00
|
|
|
from pythonforandroid.recommendations import (
|
|
|
|
RECOMMENDED_NDK_API, RECOMMENDED_TARGET_API)
|
|
|
|
from pythonforandroid.util import BuildInterruptingException, handle_build_exception
|
|
|
|
|
2017-08-13 03:24:00 +02:00
|
|
|
|
|
|
|
def check_python_dependencies():
|
|
|
|
# Check if the Python requirements are installed. This appears
|
|
|
|
# before the imports because otherwise they're imported elsewhere.
|
|
|
|
|
|
|
|
# Using the ok check instead of failing immediately so that all
|
|
|
|
# errors are printed at once
|
|
|
|
|
|
|
|
from distutils.version import LooseVersion
|
|
|
|
from importlib import import_module
|
|
|
|
import sys
|
|
|
|
|
|
|
|
ok = True
|
|
|
|
|
|
|
|
modules = [('colorama', '0.3.3'), 'appdirs', ('sh', '1.10'), 'jinja2',
|
|
|
|
'six']
|
|
|
|
|
|
|
|
for module in modules:
|
|
|
|
if isinstance(module, tuple):
|
|
|
|
module, version = module
|
|
|
|
else:
|
|
|
|
version = None
|
|
|
|
|
|
|
|
try:
|
|
|
|
import_module(module)
|
|
|
|
except ImportError:
|
|
|
|
if version is None:
|
|
|
|
print('ERROR: The {} Python module could not be found, please '
|
|
|
|
'install it.'.format(module))
|
|
|
|
ok = False
|
|
|
|
else:
|
|
|
|
print('ERROR: The {} Python module could not be found, '
|
|
|
|
'please install version {} or higher'.format(
|
|
|
|
module, version))
|
|
|
|
ok = False
|
|
|
|
else:
|
|
|
|
if version is None:
|
|
|
|
continue
|
|
|
|
try:
|
|
|
|
cur_ver = sys.modules[module].__version__
|
|
|
|
except AttributeError: # this is sometimes not available
|
|
|
|
continue
|
|
|
|
if LooseVersion(cur_ver) < LooseVersion(version):
|
|
|
|
print('ERROR: {} version is {}, but python-for-android needs '
|
|
|
|
'at least {}.'.format(module, cur_ver, version))
|
|
|
|
ok = False
|
|
|
|
|
|
|
|
if not ok:
|
2019-03-30 21:58:45 +01:00
|
|
|
print('python-for-android is exiting due to the errors logged above')
|
2017-08-13 03:24:00 +02:00
|
|
|
exit(1)
|
|
|
|
|
|
|
|
|
|
|
|
check_python_dependencies()
|
|
|
|
|
|
|
|
|
|
|
|
import sys
|
|
|
|
from sys import platform
|
2017-12-21 08:24:31 +01:00
|
|
|
from os.path import (join, dirname, realpath, exists, expanduser, basename)
|
2017-08-13 03:24:00 +02:00
|
|
|
import os
|
|
|
|
import glob
|
|
|
|
import shutil
|
|
|
|
import re
|
|
|
|
import shlex
|
|
|
|
from functools import wraps
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
import sh
|
|
|
|
import imp
|
|
|
|
from appdirs import user_data_dir
|
|
|
|
import logging
|
2017-12-21 08:24:31 +01:00
|
|
|
from distutils.version import LooseVersion
|
2017-08-13 03:24:00 +02:00
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
from pythonforandroid.recipe import Recipe
|
2017-08-13 03:24:00 +02:00
|
|
|
from pythonforandroid.logger import (logger, info, warning, setup_color,
|
2019-03-30 21:58:45 +01:00
|
|
|
Out_Style, Out_Fore,
|
|
|
|
info_notify, info_main, shprint)
|
|
|
|
from pythonforandroid.util import current_directory
|
2017-08-13 03:24:00 +02:00
|
|
|
from pythonforandroid.bootstrap import Bootstrap
|
|
|
|
from pythonforandroid.distribution import Distribution, pretty_log_dists
|
|
|
|
from pythonforandroid.graph import get_recipe_order_and_bootstrap
|
|
|
|
from pythonforandroid.build import Context, build_recipes
|
|
|
|
|
|
|
|
user_dir = dirname(realpath(os.path.curdir))
|
|
|
|
toolchain_dir = dirname(__file__)
|
|
|
|
sys.path.insert(0, join(toolchain_dir, "tools", "external"))
|
|
|
|
|
|
|
|
|
|
|
|
def add_boolean_option(parser, names, no_names=None,
|
|
|
|
default=True, dest=None, description=None):
|
|
|
|
group = parser.add_argument_group(description=description)
|
|
|
|
if not isinstance(names, (list, tuple)):
|
|
|
|
names = [names]
|
|
|
|
if dest is None:
|
|
|
|
dest = names[0].strip("-").replace("-", "_")
|
|
|
|
|
|
|
|
def add_dashes(x):
|
|
|
|
return x if x.startswith("-") else "--"+x
|
|
|
|
|
|
|
|
opts = [add_dashes(x) for x in names]
|
|
|
|
group.add_argument(
|
|
|
|
*opts, help=("(this is the default)" if default else None),
|
|
|
|
dest=dest, action='store_true')
|
|
|
|
if no_names is None:
|
|
|
|
def add_no(x):
|
|
|
|
x = x.lstrip("-")
|
|
|
|
return ("no_"+x) if "_" in x else ("no-"+x)
|
|
|
|
no_names = [add_no(x) for x in names]
|
|
|
|
opts = [add_dashes(x) for x in no_names]
|
|
|
|
group.add_argument(
|
|
|
|
*opts, help=(None if default else "(this is the default)"),
|
|
|
|
dest=dest, action='store_false')
|
|
|
|
parser.set_defaults(**{dest: default})
|
|
|
|
|
|
|
|
|
|
|
|
def require_prebuilt_dist(func):
|
2019-03-30 21:58:45 +01:00
|
|
|
"""Decorator for ToolchainCL methods. If present, the method will
|
2017-08-13 03:24:00 +02:00
|
|
|
automatically make sure a dist has been built before continuing
|
|
|
|
or, if no dists are present or can be obtained, will raise an
|
|
|
|
error.
|
2019-03-30 21:58:45 +01:00
|
|
|
"""
|
2017-08-13 03:24:00 +02:00
|
|
|
|
|
|
|
@wraps(func)
|
|
|
|
def wrapper_func(self, args):
|
|
|
|
ctx = self.ctx
|
|
|
|
ctx.set_archs(self._archs)
|
|
|
|
ctx.prepare_build_environment(user_sdk_dir=self.sdk_dir,
|
|
|
|
user_ndk_dir=self.ndk_dir,
|
|
|
|
user_android_api=self.android_api,
|
2019-03-30 21:58:45 +01:00
|
|
|
user_ndk_api=self.ndk_api)
|
2017-08-13 03:24:00 +02:00
|
|
|
dist = self._dist
|
|
|
|
if dist.needs_build:
|
2019-03-30 21:58:45 +01:00
|
|
|
if dist.folder_exists(): # possible if the dist is being replaced
|
|
|
|
dist.delete()
|
2017-08-13 03:24:00 +02:00
|
|
|
info_notify('No dist exists that meets your requirements, '
|
|
|
|
'so one will be built.')
|
|
|
|
build_dist_from_args(ctx, dist, args)
|
|
|
|
func(self, args)
|
|
|
|
return wrapper_func
|
|
|
|
|
|
|
|
|
|
|
|
def dist_from_args(ctx, args):
|
2019-03-30 21:58:45 +01:00
|
|
|
"""Parses out any distribution-related arguments, and uses them to
|
2017-08-13 03:24:00 +02:00
|
|
|
obtain a Distribution class instance for the build.
|
2019-03-30 21:58:45 +01:00
|
|
|
"""
|
2017-08-13 03:24:00 +02:00
|
|
|
return Distribution.get_distribution(
|
|
|
|
ctx,
|
|
|
|
name=args.dist_name,
|
|
|
|
recipes=split_argument_list(args.requirements),
|
2019-03-30 21:58:45 +01:00
|
|
|
ndk_api=args.ndk_api,
|
|
|
|
force_build=args.force_build,
|
|
|
|
require_perfect_match=args.require_perfect_match,
|
|
|
|
allow_replace_dist=args.allow_replace_dist)
|
2017-08-13 03:24:00 +02:00
|
|
|
|
|
|
|
|
|
|
|
def build_dist_from_args(ctx, dist, args):
|
2019-03-30 21:58:45 +01:00
|
|
|
"""Parses out any bootstrap related arguments, and uses them to build
|
|
|
|
a dist."""
|
2017-08-13 03:24:00 +02:00
|
|
|
bs = Bootstrap.get_bootstrap(args.bootstrap, ctx)
|
|
|
|
build_order, python_modules, bs \
|
|
|
|
= get_recipe_order_and_bootstrap(ctx, dist.recipes, bs)
|
|
|
|
ctx.recipe_build_order = build_order
|
|
|
|
ctx.python_modules = python_modules
|
|
|
|
|
|
|
|
info('The selected bootstrap is {}'.format(bs.name))
|
|
|
|
info_main('# Creating dist with {} bootstrap'.format(bs.name))
|
|
|
|
bs.distribution = dist
|
|
|
|
info_notify('Dist will have name {} and recipes ({})'.format(
|
|
|
|
dist.name, ', '.join(dist.recipes)))
|
2017-12-21 08:24:31 +01:00
|
|
|
info('Dist will also contain modules ({}) installed from pip'.format(
|
|
|
|
', '.join(ctx.python_modules)))
|
2017-08-13 03:24:00 +02:00
|
|
|
|
|
|
|
ctx.dist_name = bs.distribution.name
|
|
|
|
ctx.prepare_bootstrap(bs)
|
2019-03-30 21:58:45 +01:00
|
|
|
if dist.needs_build:
|
|
|
|
ctx.prepare_dist(ctx.dist_name)
|
2017-08-13 03:24:00 +02:00
|
|
|
|
|
|
|
build_recipes(build_order, python_modules, ctx)
|
|
|
|
|
|
|
|
ctx.bootstrap.run_distribute()
|
|
|
|
|
|
|
|
info_main('# Your distribution was created successfully, exiting.')
|
|
|
|
info('Dist can be found at (for now) {}'
|
|
|
|
.format(join(ctx.dist_dir, ctx.dist_name)))
|
|
|
|
|
|
|
|
|
|
|
|
def split_argument_list(l):
|
|
|
|
if not len(l):
|
|
|
|
return []
|
|
|
|
return re.split(r'[ ,]+', l)
|
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
|
2017-08-13 03:24:00 +02:00
|
|
|
class NoAbbrevParser(argparse.ArgumentParser):
|
2019-03-30 21:58:45 +01:00
|
|
|
"""We want to disable argument abbreviation so as not to interfere
|
2017-08-13 03:24:00 +02:00
|
|
|
with passing through arguments to build.py, but in python2 argparse
|
|
|
|
doesn't have this option.
|
|
|
|
|
|
|
|
This subclass alternative is follows the suggestion at
|
|
|
|
https://bugs.python.org/issue14910.
|
2019-03-30 21:58:45 +01:00
|
|
|
"""
|
2017-08-13 03:24:00 +02:00
|
|
|
def _get_option_tuples(self, option_string):
|
|
|
|
return []
|
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
|
2017-08-13 03:24:00 +02:00
|
|
|
class ToolchainCL(object):
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
|
|
|
|
argv = sys.argv
|
|
|
|
# Buildozer used to pass these arguments in a now-invalid order
|
|
|
|
# If that happens, apply this fix
|
|
|
|
# This fix will be removed once a fixed buildozer is released
|
2019-03-30 21:58:45 +01:00
|
|
|
if (len(argv) > 2
|
|
|
|
and argv[1].startswith('--color')
|
|
|
|
and argv[2].startswith('--storage-dir')):
|
2017-08-13 03:24:00 +02:00
|
|
|
argv.append(argv.pop(1)) # the --color arg
|
|
|
|
argv.append(argv.pop(1)) # the --storage-dir arg
|
|
|
|
|
|
|
|
parser = NoAbbrevParser(
|
2019-03-30 21:58:45 +01:00
|
|
|
description='A packaging tool for turning Python scripts and apps '
|
|
|
|
'into Android APKs')
|
2017-08-13 03:24:00 +02:00
|
|
|
|
|
|
|
generic_parser = argparse.ArgumentParser(
|
|
|
|
add_help=False,
|
2019-03-30 21:58:45 +01:00
|
|
|
description='Generic arguments applied to all commands')
|
|
|
|
argparse.ArgumentParser(
|
|
|
|
add_help=False, description='Arguments for dist building')
|
2017-08-13 03:24:00 +02:00
|
|
|
|
|
|
|
generic_parser.add_argument(
|
2019-03-30 21:58:45 +01:00
|
|
|
'--debug', dest='debug', action='store_true', default=False,
|
2017-08-13 03:24:00 +02:00
|
|
|
help='Display debug output and all build info')
|
|
|
|
generic_parser.add_argument(
|
|
|
|
'--color', dest='color', choices=['always', 'never', 'auto'],
|
|
|
|
help='Enable or disable color output (default enabled on tty)')
|
|
|
|
generic_parser.add_argument(
|
|
|
|
'--sdk-dir', '--sdk_dir', dest='sdk_dir', default='',
|
|
|
|
help='The filepath where the Android SDK is installed')
|
|
|
|
generic_parser.add_argument(
|
|
|
|
'--ndk-dir', '--ndk_dir', dest='ndk_dir', default='',
|
|
|
|
help='The filepath where the Android NDK is installed')
|
|
|
|
generic_parser.add_argument(
|
2019-03-30 21:58:45 +01:00
|
|
|
'--android-api',
|
|
|
|
'--android_api',
|
|
|
|
dest='android_api',
|
|
|
|
default=0,
|
|
|
|
type=int,
|
|
|
|
help=('The Android API level to build against defaults to {} if '
|
|
|
|
'not specified.').format(RECOMMENDED_TARGET_API))
|
2018-04-11 17:30:16 +02:00
|
|
|
generic_parser.add_argument(
|
2019-03-30 21:58:45 +01:00
|
|
|
'--ndk-version', '--ndk_version', dest='ndk_version', default=None,
|
|
|
|
help=('DEPRECATED: the NDK version is now found automatically or '
|
|
|
|
'not at all.'))
|
2017-08-13 03:24:00 +02:00
|
|
|
generic_parser.add_argument(
|
2019-03-30 21:58:45 +01:00
|
|
|
'--ndk-api', type=int, default=None,
|
|
|
|
help=('The Android API level to compile against. This should be your '
|
|
|
|
'*minimal supported* API, not normally the same as your --android-api. '
|
|
|
|
'Defaults to min(ANDROID_API, {}) if not specified.').format(RECOMMENDED_NDK_API))
|
2017-08-13 03:24:00 +02:00
|
|
|
generic_parser.add_argument(
|
|
|
|
'--symlink-java-src', '--symlink_java_src',
|
|
|
|
action='store_true',
|
|
|
|
dest='symlink_java_src',
|
|
|
|
default=False,
|
|
|
|
help=('If True, symlinks the java src folder during build and dist '
|
2019-03-30 21:58:45 +01:00
|
|
|
'creation. This is useful for development only, it could also'
|
|
|
|
' cause weird problems.'))
|
2017-08-13 03:24:00 +02:00
|
|
|
|
|
|
|
default_storage_dir = user_data_dir('python-for-android')
|
|
|
|
if ' ' in default_storage_dir:
|
|
|
|
default_storage_dir = '~/.python-for-android'
|
|
|
|
generic_parser.add_argument(
|
2019-03-30 21:58:45 +01:00
|
|
|
'--storage-dir', dest='storage_dir', default=default_storage_dir,
|
2017-08-13 03:24:00 +02:00
|
|
|
help=('Primary storage directory for downloads and builds '
|
|
|
|
'(default: {})'.format(default_storage_dir)))
|
|
|
|
|
|
|
|
generic_parser.add_argument(
|
2019-03-30 21:58:45 +01:00
|
|
|
'--arch', help='The archs to build for, separated by commas.',
|
2019-07-10 22:34:52 +02:00
|
|
|
default='arm64-v8a')
|
2017-08-13 03:24:00 +02:00
|
|
|
|
|
|
|
# Options for specifying the Distribution
|
|
|
|
generic_parser.add_argument(
|
|
|
|
'--dist-name', '--dist_name',
|
2019-03-30 21:58:45 +01:00
|
|
|
help='The name of the distribution to use or create', default='')
|
2017-08-13 03:24:00 +02:00
|
|
|
|
|
|
|
generic_parser.add_argument(
|
|
|
|
'--requirements',
|
|
|
|
help=('Dependencies of your app, should be recipe names or '
|
|
|
|
'Python modules'),
|
|
|
|
default='')
|
|
|
|
|
|
|
|
generic_parser.add_argument(
|
|
|
|
'--bootstrap',
|
2019-03-30 21:58:45 +01:00
|
|
|
help='The bootstrap to build with. Leave unset to choose '
|
|
|
|
'automatically.',
|
2017-08-13 03:24:00 +02:00
|
|
|
default=None)
|
|
|
|
|
|
|
|
generic_parser.add_argument(
|
|
|
|
'--hook',
|
2019-03-30 21:58:45 +01:00
|
|
|
help='Filename to a module that contains python-for-android hooks',
|
2017-08-13 03:24:00 +02:00
|
|
|
default=None)
|
|
|
|
|
|
|
|
add_boolean_option(
|
|
|
|
generic_parser, ["force-build"],
|
|
|
|
default=False,
|
2019-03-30 21:58:45 +01:00
|
|
|
description='Whether to force compilation of a new distribution')
|
2017-08-13 03:24:00 +02:00
|
|
|
|
|
|
|
add_boolean_option(
|
|
|
|
generic_parser, ["require-perfect-match"],
|
|
|
|
default=False,
|
|
|
|
description=('Whether the dist recipes must perfectly match '
|
|
|
|
'those requested'))
|
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
add_boolean_option(
|
|
|
|
generic_parser, ["allow-replace-dist"],
|
|
|
|
default=True,
|
|
|
|
description='Whether existing dist names can be automatically replaced'
|
|
|
|
)
|
|
|
|
|
2017-08-13 03:24:00 +02:00
|
|
|
generic_parser.add_argument(
|
|
|
|
'--local-recipes', '--local_recipes',
|
|
|
|
dest='local_recipes', default='./p4a-recipes',
|
|
|
|
help='Directory to look for local recipes')
|
|
|
|
|
2017-12-21 08:24:31 +01:00
|
|
|
generic_parser.add_argument(
|
|
|
|
'--java-build-tool',
|
|
|
|
dest='java_build_tool', default='auto',
|
|
|
|
choices=['auto', 'ant', 'gradle'],
|
|
|
|
help=('The java build tool to use when packaging the APK, defaults '
|
|
|
|
'to automatically selecting an appropriate tool.'))
|
|
|
|
|
2017-08-13 03:24:00 +02:00
|
|
|
add_boolean_option(
|
|
|
|
generic_parser, ['copy-libs'],
|
|
|
|
default=False,
|
2019-03-30 21:58:45 +01:00
|
|
|
description='Copy libraries instead of using biglink (Android 4.3+)'
|
|
|
|
)
|
2017-08-13 03:24:00 +02:00
|
|
|
|
|
|
|
self._read_configuration()
|
|
|
|
|
|
|
|
subparsers = parser.add_subparsers(dest='subparser_name',
|
|
|
|
help='The command to run')
|
|
|
|
|
|
|
|
def add_parser(subparsers, *args, **kwargs):
|
2019-03-30 21:58:45 +01:00
|
|
|
"""
|
2017-08-13 03:24:00 +02:00
|
|
|
argparse in python2 doesn't support the aliases option,
|
|
|
|
so we just don't provide the aliases there.
|
2019-03-30 21:58:45 +01:00
|
|
|
"""
|
2017-08-13 03:24:00 +02:00
|
|
|
if 'aliases' in kwargs and sys.version_info.major < 3:
|
|
|
|
kwargs.pop('aliases')
|
|
|
|
return subparsers.add_parser(*args, **kwargs)
|
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
parser_recommendations = add_parser(
|
|
|
|
subparsers,
|
|
|
|
'recommendations',
|
|
|
|
parents=[generic_parser],
|
|
|
|
help='List recommended p4a dependencies')
|
|
|
|
parser_recipes = add_parser(
|
|
|
|
subparsers,
|
2017-08-13 03:24:00 +02:00
|
|
|
'recipes',
|
|
|
|
parents=[generic_parser],
|
|
|
|
help='List the available recipes')
|
|
|
|
parser_recipes.add_argument(
|
2019-03-30 21:58:45 +01:00
|
|
|
"--compact",
|
|
|
|
action="store_true", default=False,
|
|
|
|
help="Produce a compact list suitable for scripting")
|
|
|
|
add_parser(
|
2017-08-13 03:24:00 +02:00
|
|
|
subparsers, 'bootstraps',
|
|
|
|
help='List the available bootstraps',
|
|
|
|
parents=[generic_parser])
|
2019-03-30 21:58:45 +01:00
|
|
|
add_parser(
|
2017-08-13 03:24:00 +02:00
|
|
|
subparsers, 'clean_all',
|
|
|
|
aliases=['clean-all'],
|
|
|
|
help='Delete all builds, dists and caches',
|
|
|
|
parents=[generic_parser])
|
2019-03-30 21:58:45 +01:00
|
|
|
add_parser(
|
|
|
|
subparsers, 'clean_dists',
|
|
|
|
aliases=['clean-dists'],
|
2017-08-13 03:24:00 +02:00
|
|
|
help='Delete all dists',
|
|
|
|
parents=[generic_parser])
|
2019-03-30 21:58:45 +01:00
|
|
|
add_parser(
|
|
|
|
subparsers, 'clean_bootstrap_builds',
|
|
|
|
aliases=['clean-bootstrap-builds'],
|
2017-08-13 03:24:00 +02:00
|
|
|
help='Delete all bootstrap builds',
|
|
|
|
parents=[generic_parser])
|
2019-03-30 21:58:45 +01:00
|
|
|
add_parser(
|
|
|
|
subparsers, 'clean_builds',
|
|
|
|
aliases=['clean-builds'],
|
2017-08-13 03:24:00 +02:00
|
|
|
help='Delete all builds',
|
|
|
|
parents=[generic_parser])
|
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
parser_clean = add_parser(
|
|
|
|
subparsers, 'clean',
|
|
|
|
help='Delete build components.',
|
|
|
|
parents=[generic_parser])
|
2017-08-13 03:24:00 +02:00
|
|
|
parser_clean.add_argument(
|
|
|
|
'component', nargs='+',
|
|
|
|
help=('The build component(s) to delete. You can pass any '
|
|
|
|
'number of arguments from "all", "builds", "dists", '
|
|
|
|
'"distributions", "bootstrap_builds", "downloads".'))
|
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
parser_clean_recipe_build = add_parser(
|
|
|
|
subparsers,
|
2017-08-13 03:24:00 +02:00
|
|
|
'clean_recipe_build', aliases=['clean-recipe-build'],
|
|
|
|
help=('Delete the build components of the given recipe. '
|
|
|
|
'By default this will also delete built dists'),
|
|
|
|
parents=[generic_parser])
|
2019-03-30 21:58:45 +01:00
|
|
|
parser_clean_recipe_build.add_argument(
|
|
|
|
'recipe', help='The recipe name')
|
|
|
|
parser_clean_recipe_build.add_argument(
|
|
|
|
'--no-clean-dists', default=False,
|
|
|
|
dest='no_clean_dists',
|
|
|
|
action='store_true',
|
|
|
|
help='If passed, do not delete existing dists')
|
2017-08-13 03:24:00 +02:00
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
parser_clean_download_cache = add_parser(
|
|
|
|
subparsers,
|
2017-08-13 03:24:00 +02:00
|
|
|
'clean_download_cache', aliases=['clean-download-cache'],
|
|
|
|
help='Delete cached downloads for requirement builds',
|
|
|
|
parents=[generic_parser])
|
|
|
|
parser_clean_download_cache.add_argument(
|
2019-03-30 21:58:45 +01:00
|
|
|
'recipes',
|
|
|
|
nargs='*',
|
|
|
|
help='The recipes to clean (space-separated). If no recipe name is'
|
|
|
|
' provided, the entire cache is cleared.')
|
2017-08-13 03:24:00 +02:00
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
parser_export_dist = add_parser(
|
|
|
|
subparsers,
|
2017-08-13 03:24:00 +02:00
|
|
|
'export_dist', aliases=['export-dist'],
|
|
|
|
help='Copy the named dist to the given path',
|
|
|
|
parents=[generic_parser])
|
2019-03-30 21:58:45 +01:00
|
|
|
parser_export_dist.add_argument('output_dir',
|
|
|
|
help='The output dir to copy to')
|
|
|
|
parser_export_dist.add_argument(
|
|
|
|
'--symlink',
|
|
|
|
action='store_true',
|
|
|
|
help='Symlink the dist instead of copying')
|
2017-08-13 03:24:00 +02:00
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
parser_apk = add_parser(
|
|
|
|
|
|
|
|
subparsers,
|
2017-08-13 03:24:00 +02:00
|
|
|
'apk', help='Build an APK',
|
|
|
|
parents=[generic_parser])
|
2019-03-30 21:58:45 +01:00
|
|
|
parser_apk.add_argument(
|
|
|
|
'--release', dest='build_mode', action='store_const',
|
|
|
|
const='release', default='debug',
|
|
|
|
help='Build the PARSER_APK. in Release mode')
|
|
|
|
parser_apk.add_argument(
|
|
|
|
'--keystore', dest='keystore', action='store', default=None,
|
|
|
|
help=('Keystore for JAR signing key, will use jarsigner '
|
|
|
|
'default if not specified (release build only)'))
|
|
|
|
parser_apk.add_argument(
|
|
|
|
'--signkey', dest='signkey', action='store', default=None,
|
|
|
|
help='Key alias to sign PARSER_APK. with (release build only)')
|
|
|
|
parser_apk.add_argument(
|
|
|
|
'--keystorepw', dest='keystorepw', action='store', default=None,
|
|
|
|
help='Password for keystore')
|
|
|
|
parser_apk.add_argument(
|
|
|
|
'--signkeypw', dest='signkeypw', action='store', default=None,
|
|
|
|
help='Password for key alias')
|
|
|
|
|
|
|
|
parser_create = add_parser(
|
|
|
|
subparsers,
|
2017-08-13 03:24:00 +02:00
|
|
|
'create', help='Compile a set of requirements into a dist',
|
|
|
|
parents=[generic_parser])
|
2019-03-30 21:58:45 +01:00
|
|
|
parser_archs = add_parser(
|
|
|
|
subparsers,
|
2017-08-13 03:24:00 +02:00
|
|
|
'archs', help='List the available target architectures',
|
|
|
|
parents=[generic_parser])
|
2019-03-30 21:58:45 +01:00
|
|
|
parser_distributions = add_parser(
|
|
|
|
subparsers,
|
2017-08-13 03:24:00 +02:00
|
|
|
'distributions', aliases=['dists'],
|
|
|
|
help='List the currently available (compiled) dists',
|
|
|
|
parents=[generic_parser])
|
2019-03-30 21:58:45 +01:00
|
|
|
parser_delete_dist = add_parser(
|
|
|
|
subparsers,
|
2017-08-13 03:24:00 +02:00
|
|
|
'delete_dist', aliases=['delete-dist'], help='Delete a compiled dist',
|
|
|
|
parents=[generic_parser])
|
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
parser_sdk_tools = add_parser(
|
|
|
|
subparsers,
|
2017-08-13 03:24:00 +02:00
|
|
|
'sdk_tools', aliases=['sdk-tools'],
|
|
|
|
help='Run the given binary from the SDK tools dis',
|
|
|
|
parents=[generic_parser])
|
|
|
|
parser_sdk_tools.add_argument(
|
2019-03-30 21:58:45 +01:00
|
|
|
'tool', help='The binary tool name to run')
|
2017-08-13 03:24:00 +02:00
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
parser_adb = add_parser(
|
|
|
|
subparsers,
|
2017-08-13 03:24:00 +02:00
|
|
|
'adb', help='Run adb from the given SDK',
|
|
|
|
parents=[generic_parser])
|
2019-03-30 21:58:45 +01:00
|
|
|
parser_logcat = add_parser(
|
|
|
|
subparsers,
|
2017-08-13 03:24:00 +02:00
|
|
|
'logcat', help='Run logcat from the given SDK',
|
|
|
|
parents=[generic_parser])
|
2019-03-30 21:58:45 +01:00
|
|
|
parser_build_status = add_parser(
|
|
|
|
subparsers,
|
2017-08-13 03:24:00 +02:00
|
|
|
'build_status', aliases=['build-status'],
|
|
|
|
help='Print some debug information about current built components',
|
|
|
|
parents=[generic_parser])
|
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
parser.add_argument('-v', '--version', action='version',
|
|
|
|
version=__version__)
|
2017-12-21 08:24:31 +01:00
|
|
|
|
2017-08-13 03:24:00 +02:00
|
|
|
args, unknown = parser.parse_known_args(sys.argv[1:])
|
|
|
|
args.unknown_args = unknown
|
|
|
|
|
|
|
|
self.args = args
|
|
|
|
|
|
|
|
if args.subparser_name is None:
|
|
|
|
parser.print_help()
|
|
|
|
exit(1)
|
|
|
|
|
|
|
|
setup_color(args.color)
|
|
|
|
|
|
|
|
if args.debug:
|
|
|
|
logger.setLevel(logging.DEBUG)
|
|
|
|
|
|
|
|
# strip version from requirements, and put them in environ
|
|
|
|
if hasattr(args, 'requirements'):
|
|
|
|
requirements = []
|
|
|
|
for requirement in split_argument_list(args.requirements):
|
|
|
|
if "==" in requirement:
|
|
|
|
requirement, version = requirement.split(u"==", 1)
|
|
|
|
os.environ["VERSION_{}".format(requirement)] = version
|
|
|
|
info('Recipe {}: version "{}" requested'.format(
|
|
|
|
requirement, version))
|
|
|
|
requirements.append(requirement)
|
|
|
|
args.requirements = u",".join(requirements)
|
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
self.warn_on_deprecated_args(args)
|
|
|
|
|
2017-08-13 03:24:00 +02:00
|
|
|
self.ctx = Context()
|
|
|
|
self.storage_dir = args.storage_dir
|
|
|
|
self.ctx.setup_dirs(self.storage_dir)
|
|
|
|
self.sdk_dir = args.sdk_dir
|
|
|
|
self.ndk_dir = args.ndk_dir
|
|
|
|
self.android_api = args.android_api
|
2019-03-30 21:58:45 +01:00
|
|
|
self.ndk_api = args.ndk_api
|
2017-08-13 03:24:00 +02:00
|
|
|
self.ctx.symlink_java_src = args.symlink_java_src
|
2017-12-21 08:24:31 +01:00
|
|
|
self.ctx.java_build_tool = args.java_build_tool
|
2017-08-13 03:24:00 +02:00
|
|
|
|
|
|
|
self._archs = split_argument_list(args.arch)
|
|
|
|
|
|
|
|
self.ctx.local_recipes = args.local_recipes
|
|
|
|
self.ctx.copy_libs = args.copy_libs
|
|
|
|
|
|
|
|
# Each subparser corresponds to a method
|
|
|
|
getattr(self, args.subparser_name.replace('-', '_'))(args)
|
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
def warn_on_deprecated_args(self, args):
|
|
|
|
"""
|
|
|
|
Print warning messages for any deprecated arguments that were passed.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# NDK version is now determined automatically
|
|
|
|
if args.ndk_version is not None:
|
|
|
|
warning('--ndk-version is deprecated and no longer necessary, '
|
|
|
|
'the value you passed is ignored')
|
|
|
|
if 'ANDROIDNDKVER' in environ:
|
|
|
|
warning('$ANDROIDNDKVER is deprecated and no longer necessary, '
|
|
|
|
'the value you set is ignored')
|
|
|
|
|
2017-08-13 03:24:00 +02:00
|
|
|
def hook(self, name):
|
|
|
|
if not self.args.hook:
|
|
|
|
return
|
|
|
|
if not hasattr(self, "hook_module"):
|
|
|
|
# first time, try to load the hook module
|
2019-03-30 21:58:45 +01:00
|
|
|
self.hook_module = imp.load_source("pythonforandroid.hook",
|
|
|
|
self.args.hook)
|
2017-08-13 03:24:00 +02:00
|
|
|
if hasattr(self.hook_module, name):
|
|
|
|
info("Hook: execute {}".format(name))
|
|
|
|
getattr(self.hook_module, name)(self)
|
|
|
|
else:
|
|
|
|
info("Hook: ignore {}".format(name))
|
|
|
|
|
|
|
|
@property
|
|
|
|
def default_storage_dir(self):
|
|
|
|
udd = user_data_dir('python-for-android')
|
|
|
|
if ' ' in udd:
|
|
|
|
udd = '~/.python-for-android'
|
|
|
|
return udd
|
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
@staticmethod
|
|
|
|
def _read_configuration():
|
2017-08-13 03:24:00 +02:00
|
|
|
# search for a .p4a configuration file in the current directory
|
|
|
|
if not exists(".p4a"):
|
|
|
|
return
|
|
|
|
info("Reading .p4a configuration")
|
|
|
|
with open(".p4a") as fd:
|
|
|
|
lines = fd.readlines()
|
|
|
|
lines = [shlex.split(line)
|
|
|
|
for line in lines if not line.startswith("#")]
|
|
|
|
for line in lines:
|
|
|
|
for arg in line:
|
|
|
|
sys.argv.append(arg)
|
|
|
|
|
|
|
|
def recipes(self, args):
|
|
|
|
ctx = self.ctx
|
|
|
|
if args.compact:
|
|
|
|
print(" ".join(set(Recipe.list_recipes(ctx))))
|
|
|
|
else:
|
|
|
|
for name in sorted(Recipe.list_recipes(ctx)):
|
|
|
|
try:
|
|
|
|
recipe = Recipe.get_recipe(name, ctx)
|
|
|
|
except IOError:
|
|
|
|
warning('Recipe "{}" could not be loaded'.format(name))
|
2017-12-21 08:24:31 +01:00
|
|
|
except SyntaxError:
|
|
|
|
import traceback
|
|
|
|
traceback.print_exc()
|
|
|
|
warning(('Recipe "{}" could not be loaded due to a '
|
|
|
|
'syntax error').format(name))
|
2017-08-13 03:24:00 +02:00
|
|
|
version = str(recipe.version)
|
|
|
|
print('{Fore.BLUE}{Style.BRIGHT}{recipe.name:<12} '
|
|
|
|
'{Style.RESET_ALL}{Fore.LIGHTBLUE_EX}'
|
|
|
|
'{version:<8}{Style.RESET_ALL}'.format(
|
2019-03-30 21:58:45 +01:00
|
|
|
recipe=recipe, Fore=Out_Fore, Style=Out_Style,
|
|
|
|
version=version))
|
2017-08-13 03:24:00 +02:00
|
|
|
print(' {Fore.GREEN}depends: {recipe.depends}'
|
|
|
|
'{Fore.RESET}'.format(recipe=recipe, Fore=Out_Fore))
|
|
|
|
if recipe.conflicts:
|
|
|
|
print(' {Fore.RED}conflicts: {recipe.conflicts}'
|
|
|
|
'{Fore.RESET}'
|
|
|
|
.format(recipe=recipe, Fore=Out_Fore))
|
|
|
|
if recipe.opt_depends:
|
|
|
|
print(' {Fore.YELLOW}optional depends: '
|
|
|
|
'{recipe.opt_depends}{Fore.RESET}'
|
|
|
|
.format(recipe=recipe, Fore=Out_Fore))
|
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
def bootstraps(self, _args):
|
|
|
|
"""List all the bootstraps available to build with."""
|
2017-08-13 03:24:00 +02:00
|
|
|
for bs in Bootstrap.list_bootstraps():
|
|
|
|
bs = Bootstrap.get_bootstrap(bs, self.ctx)
|
|
|
|
print('{Fore.BLUE}{Style.BRIGHT}{bs.name}{Style.RESET_ALL}'
|
|
|
|
.format(bs=bs, Fore=Out_Fore, Style=Out_Style))
|
|
|
|
print(' {Fore.GREEN}depends: {bs.recipe_depends}{Fore.RESET}'
|
|
|
|
.format(bs=bs, Fore=Out_Fore))
|
|
|
|
|
|
|
|
def clean(self, args):
|
|
|
|
components = args.component
|
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
component_clean_methods = {
|
|
|
|
'all': self.clean_all,
|
|
|
|
'dists': self.clean_dists,
|
|
|
|
'distributions': self.clean_dists,
|
|
|
|
'builds': self.clean_builds,
|
|
|
|
'bootstrap_builds': self.clean_bootstrap_builds,
|
|
|
|
'downloads': self.clean_download_cache}
|
2017-08-13 03:24:00 +02:00
|
|
|
|
|
|
|
for component in components:
|
|
|
|
if component not in component_clean_methods:
|
2019-03-30 21:58:45 +01:00
|
|
|
raise BuildInterruptingException((
|
2017-08-13 03:24:00 +02:00
|
|
|
'Asked to clean "{}" but this argument is not '
|
|
|
|
'recognised'.format(component)))
|
|
|
|
component_clean_methods[component](args)
|
2017-12-21 08:24:31 +01:00
|
|
|
|
2017-08-13 03:24:00 +02:00
|
|
|
def clean_all(self, args):
|
2019-03-30 21:58:45 +01:00
|
|
|
"""Delete all build components; the package cache, package builds,
|
|
|
|
bootstrap builds and distributions."""
|
2017-08-13 03:24:00 +02:00
|
|
|
self.clean_dists(args)
|
|
|
|
self.clean_builds(args)
|
|
|
|
self.clean_download_cache(args)
|
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
def clean_dists(self, _args):
|
|
|
|
"""Delete all compiled distributions in the internal distribution
|
|
|
|
directory."""
|
2017-08-13 03:24:00 +02:00
|
|
|
ctx = self.ctx
|
|
|
|
if exists(ctx.dist_dir):
|
|
|
|
shutil.rmtree(ctx.dist_dir)
|
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
def clean_bootstrap_builds(self, _args):
|
|
|
|
"""Delete all the bootstrap builds."""
|
2017-08-13 03:24:00 +02:00
|
|
|
if exists(join(self.ctx.build_dir, 'bootstrap_builds')):
|
|
|
|
shutil.rmtree(join(self.ctx.build_dir, 'bootstrap_builds'))
|
|
|
|
# for bs in Bootstrap.list_bootstraps():
|
|
|
|
# bs = Bootstrap.get_bootstrap(bs, self.ctx)
|
|
|
|
# if bs.build_dir and exists(bs.build_dir):
|
|
|
|
# info('Cleaning build for {} bootstrap.'.format(bs.name))
|
|
|
|
# shutil.rmtree(bs.build_dir)
|
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
def clean_builds(self, _args):
|
|
|
|
"""Delete all build caches for each recipe, python-install, java code
|
2017-08-13 03:24:00 +02:00
|
|
|
and compiled libs collection.
|
|
|
|
|
|
|
|
This does *not* delete the package download cache or the final
|
|
|
|
distributions. You can also use clean_recipe_build to delete the build
|
|
|
|
of a specific recipe.
|
2019-03-30 21:58:45 +01:00
|
|
|
"""
|
2017-08-13 03:24:00 +02:00
|
|
|
ctx = self.ctx
|
|
|
|
if exists(ctx.build_dir):
|
|
|
|
shutil.rmtree(ctx.build_dir)
|
|
|
|
if exists(ctx.python_installs_dir):
|
|
|
|
shutil.rmtree(ctx.python_installs_dir)
|
|
|
|
libs_dir = join(self.ctx.build_dir, 'libs_collections')
|
|
|
|
if exists(libs_dir):
|
|
|
|
shutil.rmtree(libs_dir)
|
|
|
|
|
|
|
|
def clean_recipe_build(self, args):
|
2019-03-30 21:58:45 +01:00
|
|
|
"""Deletes the build files of the given recipe.
|
2017-08-13 03:24:00 +02:00
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
This is intended for debug purposes. You may experience
|
|
|
|
strange behaviour or problems with some recipes if their
|
|
|
|
build has made unexpected state changes. If this happens, run
|
2017-08-13 03:24:00 +02:00
|
|
|
clean_builds, or attempt to clean other recipes until things
|
|
|
|
work again.
|
2019-03-30 21:58:45 +01:00
|
|
|
"""
|
2017-08-13 03:24:00 +02:00
|
|
|
recipe = Recipe.get_recipe(args.recipe, self.ctx)
|
|
|
|
info('Cleaning build for {} recipe.'.format(recipe.name))
|
|
|
|
recipe.clean_build()
|
|
|
|
if not args.no_clean_dists:
|
|
|
|
self.clean_dists(args)
|
|
|
|
|
|
|
|
def clean_download_cache(self, args):
|
2019-03-30 21:58:45 +01:00
|
|
|
""" Deletes a download cache for recipes passed as arguments. If no
|
|
|
|
argument is passed, it'll delete *all* downloaded caches. ::
|
2017-08-13 03:24:00 +02:00
|
|
|
|
|
|
|
p4a clean_download_cache kivy,pyjnius
|
|
|
|
|
|
|
|
This does *not* delete the build caches or final distributions.
|
2019-03-30 21:58:45 +01:00
|
|
|
"""
|
2017-08-13 03:24:00 +02:00
|
|
|
ctx = self.ctx
|
|
|
|
if hasattr(args, 'recipes') and args.recipes:
|
|
|
|
for package in args.recipes:
|
|
|
|
remove_path = join(ctx.packages_path, package)
|
|
|
|
if exists(remove_path):
|
|
|
|
shutil.rmtree(remove_path)
|
|
|
|
info('Download cache removed for: "{}"'.format(package))
|
|
|
|
else:
|
2019-03-30 21:58:45 +01:00
|
|
|
warning('No download cache found for "{}", skipping'.format(
|
|
|
|
package))
|
2017-08-13 03:24:00 +02:00
|
|
|
else:
|
|
|
|
if exists(ctx.packages_path):
|
|
|
|
shutil.rmtree(ctx.packages_path)
|
|
|
|
info('Download cache removed.')
|
|
|
|
else:
|
|
|
|
print('No cache found at "{}"'.format(ctx.packages_path))
|
|
|
|
|
|
|
|
@require_prebuilt_dist
|
|
|
|
def export_dist(self, args):
|
2019-03-30 21:58:45 +01:00
|
|
|
"""Copies a created dist to an output dir.
|
2017-08-13 03:24:00 +02:00
|
|
|
|
|
|
|
This makes it easy to navigate to the dist to investigate it
|
|
|
|
or call build.py, though you do not in general need to do this
|
|
|
|
and can use the apk command instead.
|
2019-03-30 21:58:45 +01:00
|
|
|
"""
|
2017-08-13 03:24:00 +02:00
|
|
|
ctx = self.ctx
|
|
|
|
dist = dist_from_args(ctx, args)
|
|
|
|
if dist.needs_build:
|
2019-03-30 21:58:45 +01:00
|
|
|
raise BuildInterruptingException(
|
|
|
|
'You asked to export a dist, but there is no dist '
|
|
|
|
'with suitable recipes available. For now, you must '
|
|
|
|
' create one first with the create argument.')
|
2017-08-13 03:24:00 +02:00
|
|
|
if args.symlink:
|
|
|
|
shprint(sh.ln, '-s', dist.dist_dir, args.output_dir)
|
|
|
|
else:
|
|
|
|
shprint(sh.cp, '-r', dist.dist_dir, args.output_dir)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def _dist(self):
|
|
|
|
ctx = self.ctx
|
|
|
|
dist = dist_from_args(ctx, self.args)
|
|
|
|
return dist
|
|
|
|
|
|
|
|
@require_prebuilt_dist
|
|
|
|
def apk(self, args):
|
2019-03-30 21:58:45 +01:00
|
|
|
"""Create an APK using the given distribution."""
|
2017-08-13 03:24:00 +02:00
|
|
|
|
|
|
|
ctx = self.ctx
|
|
|
|
dist = self._dist
|
|
|
|
|
|
|
|
# Manually fixing these arguments at the string stage is
|
|
|
|
# unsatisfactory and should probably be changed somehow, but
|
|
|
|
# we can't leave it until later as the build.py scripts assume
|
|
|
|
# they are in the current directory.
|
|
|
|
fix_args = ('--dir', '--private', '--add-jar', '--add-source',
|
|
|
|
'--whitelist', '--blacklist', '--presplash', '--icon')
|
|
|
|
unknown_args = args.unknown_args
|
2019-03-30 21:58:45 +01:00
|
|
|
for i, arg in enumerate(unknown_args):
|
2017-08-13 03:24:00 +02:00
|
|
|
argx = arg.split('=')
|
|
|
|
if argx[0] in fix_args:
|
|
|
|
if len(argx) > 1:
|
2019-03-30 21:58:45 +01:00
|
|
|
unknown_args[i] = '='.join(
|
|
|
|
(argx[0], realpath(expanduser(argx[1]))))
|
|
|
|
elif i + 1 < len(unknown_args):
|
2017-08-13 03:24:00 +02:00
|
|
|
unknown_args[i+1] = realpath(expanduser(unknown_args[i+1]))
|
|
|
|
|
|
|
|
env = os.environ.copy()
|
|
|
|
if args.build_mode == 'release':
|
|
|
|
if args.keystore:
|
|
|
|
env['P4A_RELEASE_KEYSTORE'] = realpath(expanduser(args.keystore))
|
|
|
|
if args.signkey:
|
|
|
|
env['P4A_RELEASE_KEYALIAS'] = args.signkey
|
|
|
|
if args.keystorepw:
|
|
|
|
env['P4A_RELEASE_KEYSTORE_PASSWD'] = args.keystorepw
|
|
|
|
if args.signkeypw:
|
|
|
|
env['P4A_RELEASE_KEYALIAS_PASSWD'] = args.signkeypw
|
|
|
|
elif args.keystorepw and 'P4A_RELEASE_KEYALIAS_PASSWD' not in env:
|
|
|
|
env['P4A_RELEASE_KEYALIAS_PASSWD'] = args.keystorepw
|
|
|
|
|
|
|
|
build = imp.load_source('build', join(dist.dist_dir, 'build.py'))
|
|
|
|
with current_directory(dist.dist_dir):
|
|
|
|
self.hook("before_apk_build")
|
2017-12-21 08:24:31 +01:00
|
|
|
os.environ["ANDROID_API"] = str(self.ctx.android_api)
|
2017-08-13 03:24:00 +02:00
|
|
|
build_args = build.parse_args(args.unknown_args)
|
|
|
|
self.hook("after_apk_build")
|
|
|
|
self.hook("before_apk_assemble")
|
|
|
|
|
2017-12-21 08:24:31 +01:00
|
|
|
build_type = ctx.java_build_tool
|
|
|
|
if build_type == 'auto':
|
|
|
|
info('Selecting java build tool:')
|
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
build_tools_versions = os.listdir(join(ctx.sdk_dir,
|
|
|
|
'build-tools'))
|
2017-12-21 08:24:31 +01:00
|
|
|
build_tools_versions = sorted(build_tools_versions,
|
|
|
|
key=LooseVersion)
|
|
|
|
build_tools_version = build_tools_versions[-1]
|
|
|
|
info(('Detected highest available build tools '
|
|
|
|
'version to be {}').format(build_tools_version))
|
|
|
|
|
|
|
|
if build_tools_version >= '25.0' and exists('gradlew'):
|
|
|
|
build_type = 'gradle'
|
2019-03-30 21:58:45 +01:00
|
|
|
info(' Building with gradle, as gradle executable is '
|
|
|
|
'present')
|
2017-12-21 08:24:31 +01:00
|
|
|
else:
|
|
|
|
build_type = 'ant'
|
|
|
|
if build_tools_version < '25.0':
|
|
|
|
info((' Building with ant, as the highest '
|
2019-03-30 21:58:45 +01:00
|
|
|
'build-tools-version is only {}').format(
|
|
|
|
build_tools_version))
|
2017-12-21 08:24:31 +01:00
|
|
|
else:
|
2019-03-30 21:58:45 +01:00
|
|
|
info(' Building with ant, as no gradle executable '
|
|
|
|
'detected')
|
2017-12-21 08:24:31 +01:00
|
|
|
|
|
|
|
if build_type == 'gradle':
|
|
|
|
# gradle-based build
|
|
|
|
env["ANDROID_NDK_HOME"] = self.ctx.ndk_dir
|
|
|
|
env["ANDROID_HOME"] = self.ctx.sdk_dir
|
|
|
|
|
|
|
|
gradlew = sh.Command('./gradlew')
|
2019-03-30 21:58:45 +01:00
|
|
|
if exists('/usr/bin/dos2unix'):
|
|
|
|
# .../dists/bdisttest_python3/gradlew
|
|
|
|
# .../build/bootstrap_builds/sdl2-python3crystax/gradlew
|
|
|
|
# if docker on windows, gradle contains CRLF
|
|
|
|
output = shprint(
|
|
|
|
sh.Command('dos2unix'), gradlew._path.decode('utf8'),
|
|
|
|
_tail=20, _critical=True, _env=env
|
|
|
|
)
|
2017-12-21 08:24:31 +01:00
|
|
|
if args.build_mode == "debug":
|
|
|
|
gradle_task = "assembleDebug"
|
|
|
|
elif args.build_mode == "release":
|
|
|
|
gradle_task = "assembleRelease"
|
|
|
|
else:
|
2019-03-30 21:58:45 +01:00
|
|
|
raise BuildInterruptingException(
|
|
|
|
"Unknown build mode {} for apk()".format(args.build_mode))
|
2019-01-21 14:35:11 +01:00
|
|
|
output = shprint(gradlew, "--console=plain", gradle_task, _tail=20,
|
2017-12-21 08:24:31 +01:00
|
|
|
_critical=True, _env=env)
|
|
|
|
|
|
|
|
# gradle output apks somewhere else
|
|
|
|
# and don't have version in file
|
2019-03-30 21:58:45 +01:00
|
|
|
apk_dir = join(dist.dist_dir,
|
|
|
|
"build", "outputs", "apk",
|
|
|
|
args.build_mode)
|
2017-12-21 08:24:31 +01:00
|
|
|
apk_glob = "*-{}.apk"
|
|
|
|
apk_add_version = True
|
|
|
|
|
|
|
|
else:
|
|
|
|
# ant-based build
|
|
|
|
try:
|
|
|
|
ant = sh.Command('ant')
|
|
|
|
except sh.CommandNotFound:
|
2019-03-30 21:58:45 +01:00
|
|
|
raise BuildInterruptingException(
|
|
|
|
'Could not find ant binary, please install it '
|
|
|
|
'and make sure it is in your $PATH.')
|
2017-12-21 08:24:31 +01:00
|
|
|
output = shprint(ant, args.build_mode, _tail=20,
|
|
|
|
_critical=True, _env=env)
|
|
|
|
apk_dir = join(dist.dist_dir, "bin")
|
|
|
|
apk_glob = "*-*-{}.apk"
|
|
|
|
apk_add_version = False
|
2017-08-13 03:24:00 +02:00
|
|
|
|
|
|
|
self.hook("after_apk_assemble")
|
|
|
|
|
|
|
|
info_main('# Copying APK to current directory')
|
|
|
|
|
|
|
|
apk_re = re.compile(r'.*Package: (.*\.apk)$')
|
|
|
|
apk_file = None
|
|
|
|
for line in reversed(output.splitlines()):
|
|
|
|
m = apk_re.match(line)
|
|
|
|
if m:
|
|
|
|
apk_file = m.groups()[0]
|
|
|
|
break
|
|
|
|
|
|
|
|
if not apk_file:
|
2019-03-30 21:58:45 +01:00
|
|
|
info_main('# APK filename not found in build output. Guessing...')
|
2017-12-21 08:24:31 +01:00
|
|
|
if args.build_mode == "release":
|
|
|
|
suffixes = ("release", "release-unsigned")
|
|
|
|
else:
|
|
|
|
suffixes = ("debug", )
|
|
|
|
for suffix in suffixes:
|
|
|
|
apks = glob.glob(join(apk_dir, apk_glob.format(suffix)))
|
|
|
|
if apks:
|
|
|
|
if len(apks) > 1:
|
|
|
|
info('More than one built APK found... guessing you '
|
|
|
|
'just built {}'.format(apks[-1]))
|
|
|
|
apk_file = apks[-1]
|
|
|
|
break
|
|
|
|
else:
|
2019-03-30 21:58:45 +01:00
|
|
|
raise BuildInterruptingException('Couldn\'t find the built APK')
|
2017-08-13 03:24:00 +02:00
|
|
|
|
|
|
|
info_main('# Found APK file: {}'.format(apk_file))
|
2017-12-21 08:24:31 +01:00
|
|
|
if apk_add_version:
|
|
|
|
info('# Add version number to APK')
|
|
|
|
apk_name, apk_suffix = basename(apk_file).split("-", 1)
|
|
|
|
apk_file_dest = "{}-{}-{}".format(
|
|
|
|
apk_name, build_args.version, apk_suffix)
|
|
|
|
info('# APK renamed to {}'.format(apk_file_dest))
|
|
|
|
shprint(sh.cp, apk_file, apk_file_dest)
|
|
|
|
else:
|
|
|
|
shprint(sh.cp, apk_file, './')
|
2017-08-13 03:24:00 +02:00
|
|
|
|
|
|
|
@require_prebuilt_dist
|
|
|
|
def create(self, args):
|
2019-03-30 21:58:45 +01:00
|
|
|
"""Create a distribution directory if it doesn't already exist, run
|
2017-08-13 03:24:00 +02:00
|
|
|
any recipes if necessary, and build the apk.
|
2019-03-30 21:58:45 +01:00
|
|
|
"""
|
2017-08-13 03:24:00 +02:00
|
|
|
pass # The decorator does everything
|
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
def archs(self, _args):
|
|
|
|
"""List the target architectures available to be built for."""
|
2017-08-13 03:24:00 +02:00
|
|
|
print('{Style.BRIGHT}Available target architectures are:'
|
|
|
|
'{Style.RESET_ALL}'.format(Style=Out_Style))
|
|
|
|
for arch in self.ctx.archs:
|
|
|
|
print(' {}'.format(arch.arch))
|
|
|
|
|
|
|
|
def dists(self, args):
|
2019-03-30 21:58:45 +01:00
|
|
|
"""The same as :meth:`distributions`."""
|
2017-08-13 03:24:00 +02:00
|
|
|
self.distributions(args)
|
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
def distributions(self, _args):
|
|
|
|
"""Lists all distributions currently available (i.e. that have already
|
|
|
|
been built)."""
|
2017-08-13 03:24:00 +02:00
|
|
|
ctx = self.ctx
|
|
|
|
dists = Distribution.get_distributions(ctx)
|
|
|
|
|
|
|
|
if dists:
|
|
|
|
print('{Style.BRIGHT}Distributions currently installed are:'
|
|
|
|
'{Style.RESET_ALL}'.format(Style=Out_Style, Fore=Out_Fore))
|
|
|
|
pretty_log_dists(dists, print)
|
|
|
|
else:
|
|
|
|
print('{Style.BRIGHT}There are no dists currently built.'
|
|
|
|
'{Style.RESET_ALL}'.format(Style=Out_Style))
|
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
def delete_dist(self, _args):
|
2017-08-13 03:24:00 +02:00
|
|
|
dist = self._dist
|
2019-03-30 21:58:45 +01:00
|
|
|
if not dist.folder_exists():
|
2017-08-13 03:24:00 +02:00
|
|
|
info('No dist exists that matches your specifications, '
|
|
|
|
'exiting without deleting.')
|
2019-03-30 21:58:45 +01:00
|
|
|
return
|
|
|
|
dist.delete()
|
2017-08-13 03:24:00 +02:00
|
|
|
|
|
|
|
def sdk_tools(self, args):
|
2019-03-30 21:58:45 +01:00
|
|
|
"""Runs the android binary from the detected SDK directory, passing
|
2017-08-13 03:24:00 +02:00
|
|
|
all arguments straight to it. This binary is used to install
|
|
|
|
e.g. platform-tools for different API level targets. This is
|
|
|
|
intended as a convenience function if android is not in your
|
|
|
|
$PATH.
|
2019-03-30 21:58:45 +01:00
|
|
|
"""
|
2017-08-13 03:24:00 +02:00
|
|
|
ctx = self.ctx
|
|
|
|
ctx.prepare_build_environment(user_sdk_dir=self.sdk_dir,
|
|
|
|
user_ndk_dir=self.ndk_dir,
|
|
|
|
user_android_api=self.android_api,
|
2019-03-30 21:58:45 +01:00
|
|
|
user_ndk_api=self.ndk_api)
|
2017-08-13 03:24:00 +02:00
|
|
|
android = sh.Command(join(ctx.sdk_dir, 'tools', args.tool))
|
|
|
|
output = android(
|
|
|
|
*args.unknown_args, _iter=True, _out_bufsize=1, _err_to_out=True)
|
|
|
|
for line in output:
|
|
|
|
sys.stdout.write(line)
|
|
|
|
sys.stdout.flush()
|
|
|
|
|
|
|
|
def adb(self, args):
|
2019-03-30 21:58:45 +01:00
|
|
|
"""Runs the adb binary from the detected SDK directory, passing all
|
2017-08-13 03:24:00 +02:00
|
|
|
arguments straight to it. This is intended as a convenience
|
|
|
|
function if adb is not in your $PATH.
|
2019-03-30 21:58:45 +01:00
|
|
|
"""
|
2017-08-13 03:24:00 +02:00
|
|
|
self._adb(args.unknown_args)
|
|
|
|
|
|
|
|
def logcat(self, args):
|
2019-03-30 21:58:45 +01:00
|
|
|
"""Runs ``adb logcat`` using the adb binary from the detected SDK
|
|
|
|
directory. All extra args are passed as arguments to logcat."""
|
2017-08-13 03:24:00 +02:00
|
|
|
self._adb(['logcat'] + args.unknown_args)
|
|
|
|
|
|
|
|
def _adb(self, commands):
|
2019-03-30 21:58:45 +01:00
|
|
|
"""Call the adb executable from the SDK, passing the given commands as
|
|
|
|
arguments."""
|
2017-08-13 03:24:00 +02:00
|
|
|
ctx = self.ctx
|
|
|
|
ctx.prepare_build_environment(user_sdk_dir=self.sdk_dir,
|
|
|
|
user_ndk_dir=self.ndk_dir,
|
|
|
|
user_android_api=self.android_api,
|
2019-03-30 21:58:45 +01:00
|
|
|
user_ndk_api=self.ndk_api)
|
2017-08-13 03:24:00 +02:00
|
|
|
if platform in ('win32', 'cygwin'):
|
|
|
|
adb = sh.Command(join(ctx.sdk_dir, 'platform-tools', 'adb.exe'))
|
|
|
|
else:
|
|
|
|
adb = sh.Command(join(ctx.sdk_dir, 'platform-tools', 'adb'))
|
|
|
|
info_notify('Starting adb...')
|
|
|
|
output = adb(*commands, _iter=True, _out_bufsize=1, _err_to_out=True)
|
|
|
|
for line in output:
|
|
|
|
sys.stdout.write(line)
|
|
|
|
sys.stdout.flush()
|
|
|
|
|
2019-03-30 21:58:45 +01:00
|
|
|
def build_status(self, _args):
|
|
|
|
"""Print the status of the specified build. """
|
2017-08-13 03:24:00 +02:00
|
|
|
print('{Style.BRIGHT}Bootstraps whose core components are probably '
|
|
|
|
'already built:{Style.RESET_ALL}'.format(Style=Out_Style))
|
|
|
|
|
|
|
|
bootstrap_dir = join(self.ctx.build_dir, 'bootstrap_builds')
|
|
|
|
if exists(bootstrap_dir):
|
|
|
|
for filen in os.listdir(bootstrap_dir):
|
|
|
|
print(' {Fore.GREEN}{Style.BRIGHT}{filen}{Style.RESET_ALL}'
|
|
|
|
.format(filen=filen, Fore=Out_Fore, Style=Out_Style))
|
|
|
|
|
|
|
|
print('{Style.BRIGHT}Recipes that are probably already built:'
|
|
|
|
'{Style.RESET_ALL}'.format(Style=Out_Style))
|
|
|
|
other_builds_dir = join(self.ctx.build_dir, 'other_builds')
|
|
|
|
if exists(other_builds_dir):
|
|
|
|
for filen in sorted(os.listdir(other_builds_dir)):
|
|
|
|
name = filen.split('-')[0]
|
|
|
|
dependencies = filen.split('-')[1:]
|
|
|
|
recipe_str = (' {Style.BRIGHT}{Fore.GREEN}{name}'
|
|
|
|
'{Style.RESET_ALL}'.format(
|
|
|
|
Style=Out_Style, name=name, Fore=Out_Fore))
|
|
|
|
if dependencies:
|
|
|
|
recipe_str += (
|
|
|
|
' ({Fore.BLUE}with ' + ', '.join(dependencies) +
|
|
|
|
'{Fore.RESET})').format(Fore=Out_Fore)
|
|
|
|
recipe_str += '{Style.RESET_ALL}'.format(Style=Out_Style)
|
|
|
|
print(recipe_str)
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
2019-03-30 21:58:45 +01:00
|
|
|
try:
|
|
|
|
ToolchainCL()
|
|
|
|
except BuildInterruptingException as exc:
|
|
|
|
handle_build_exception(exc)
|
|
|
|
|
2017-08-13 03:24:00 +02:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|