2018-10-28 22:07:10 +01:00
|
|
|
#!/usr/bin/env python3
|
2015-02-01 18:35:28 +01:00
|
|
|
"""
|
|
|
|
Tool for compiling iOS toolchain
|
|
|
|
================================
|
|
|
|
|
|
|
|
This tool intend to replace all the previous tools/ in shell script.
|
|
|
|
"""
|
|
|
|
|
|
|
|
import sys
|
|
|
|
from sys import stdout
|
|
|
|
from os.path import join, dirname, realpath, exists, isdir, basename
|
2015-08-15 21:22:41 +02:00
|
|
|
from os import listdir, unlink, makedirs, environ, chdir, getcwd, walk, remove
|
2015-02-01 18:35:28 +01:00
|
|
|
import zipfile
|
|
|
|
import tarfile
|
|
|
|
import importlib
|
2015-02-10 12:24:20 +01:00
|
|
|
import io
|
|
|
|
import json
|
2015-02-01 18:35:28 +01:00
|
|
|
import shutil
|
2015-02-23 22:51:16 +01:00
|
|
|
import fnmatch
|
2018-11-09 23:53:36 +01:00
|
|
|
import tempfile
|
2015-02-10 12:24:20 +01:00
|
|
|
from datetime import datetime
|
2015-02-01 18:35:28 +01:00
|
|
|
try:
|
2016-11-06 09:30:58 +01:00
|
|
|
from urllib.request import FancyURLopener, urlcleanup
|
2015-02-01 18:35:28 +01:00
|
|
|
except ImportError:
|
2016-11-06 09:30:58 +01:00
|
|
|
from urllib import FancyURLopener, urlcleanup
|
2018-10-28 22:07:10 +01:00
|
|
|
try:
|
|
|
|
from pbxproj import XcodeProject
|
|
|
|
from pbxproj.pbxextensions.ProjectFiles import FileOptions
|
|
|
|
except ImportError:
|
|
|
|
print("ERROR: pbxproj requirements is missing")
|
|
|
|
print("To install: pip install -r requirements.txt")
|
|
|
|
sys.exit(0)
|
2015-02-14 13:05:54 +01:00
|
|
|
curdir = dirname(__file__)
|
|
|
|
sys.path.insert(0, join(curdir, "tools", "external"))
|
|
|
|
|
|
|
|
import sh
|
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
IS_PY3 = sys.version_info[0] >= 3
|
|
|
|
|
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
def shprint(command, *args, **kwargs):
|
|
|
|
kwargs["_iter"] = True
|
|
|
|
kwargs["_out_bufsize"] = 1
|
2015-02-02 05:22:13 +01:00
|
|
|
kwargs["_err_to_out"] = True
|
2015-02-01 18:35:28 +01:00
|
|
|
for line in command(*args, **kwargs):
|
2017-12-10 15:56:08 +01:00
|
|
|
stdout.write(line.encode("ascii", "replace").decode())
|
2015-02-01 18:35:28 +01:00
|
|
|
|
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
def cache_execution(f):
|
|
|
|
def _cache_execution(self, *args, **kwargs):
|
|
|
|
state = self.ctx.state
|
|
|
|
key = "{}.{}".format(self.name, f.__name__)
|
2015-02-12 00:53:08 +01:00
|
|
|
force = kwargs.pop("force", False)
|
2015-02-11 12:52:46 +01:00
|
|
|
if args:
|
|
|
|
for arg in args:
|
|
|
|
key += ".{}".format(arg)
|
2015-02-10 12:24:20 +01:00
|
|
|
key_time = "{}.at".format(key)
|
2015-02-12 00:53:08 +01:00
|
|
|
if key in state and not force:
|
2015-02-10 12:24:20 +01:00
|
|
|
print("# (ignored) {} {}".format(f.__name__.capitalize(), self.name))
|
|
|
|
return
|
|
|
|
print("{} {}".format(f.__name__.capitalize(), self.name))
|
|
|
|
f(self, *args, **kwargs)
|
|
|
|
state[key] = True
|
|
|
|
state[key_time] = str(datetime.utcnow())
|
|
|
|
return _cache_execution
|
|
|
|
|
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
class ChromeDownloader(FancyURLopener):
|
|
|
|
version = (
|
|
|
|
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 '
|
|
|
|
'(KHTML, like Gecko) Chrome/28.0.1500.71 Safari/537.36')
|
|
|
|
|
|
|
|
urlretrieve = ChromeDownloader().retrieve
|
|
|
|
|
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
class JsonStore(object):
|
|
|
|
"""Replacement of shelve using json, needed for support python 2 and 3.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, filename):
|
|
|
|
super(JsonStore, self).__init__()
|
|
|
|
self.filename = filename
|
|
|
|
self.data = {}
|
|
|
|
if exists(filename):
|
|
|
|
try:
|
|
|
|
with io.open(filename, encoding='utf-8') as fd:
|
|
|
|
self.data = json.load(fd)
|
|
|
|
except ValueError:
|
|
|
|
print("Unable to read the state.db, content will be replaced.")
|
|
|
|
|
|
|
|
def __getitem__(self, key):
|
|
|
|
return self.data[key]
|
|
|
|
|
|
|
|
def __setitem__(self, key, value):
|
|
|
|
self.data[key] = value
|
|
|
|
self.sync()
|
|
|
|
|
|
|
|
def __delitem__(self, key):
|
|
|
|
del self.data[key]
|
|
|
|
self.sync()
|
|
|
|
|
|
|
|
def __contains__(self, item):
|
|
|
|
return item in self.data
|
|
|
|
|
|
|
|
def get(self, item, default=None):
|
|
|
|
return self.data.get(item, default)
|
|
|
|
|
|
|
|
def keys(self):
|
|
|
|
return self.data.keys()
|
|
|
|
|
2015-02-11 12:52:46 +01:00
|
|
|
def remove_all(self, prefix):
|
2018-03-17 00:16:29 +01:00
|
|
|
for key in tuple(self.data.keys()):
|
2015-02-11 12:52:46 +01:00
|
|
|
if not key.startswith(prefix):
|
|
|
|
continue
|
|
|
|
del self.data[key]
|
|
|
|
self.sync()
|
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
def sync(self):
|
|
|
|
# http://stackoverflow.com/questions/12309269/write-json-data-to-file-in-python/14870531#14870531
|
|
|
|
if IS_PY3:
|
|
|
|
with open(self.filename, 'w') as fd:
|
|
|
|
json.dump(self.data, fd, ensure_ascii=False)
|
|
|
|
else:
|
|
|
|
with io.open(self.filename, 'w', encoding='utf-8') as fd:
|
|
|
|
fd.write(unicode(json.dumps(self.data, ensure_ascii=False)))
|
|
|
|
|
2015-02-03 00:19:53 +01:00
|
|
|
class Arch(object):
|
2015-02-03 18:42:17 +01:00
|
|
|
def __init__(self, ctx):
|
|
|
|
super(Arch, self).__init__()
|
|
|
|
self.ctx = ctx
|
2016-04-17 06:08:58 +02:00
|
|
|
self._ccsh = None
|
2015-02-03 18:42:17 +01:00
|
|
|
|
2015-02-11 12:52:46 +01:00
|
|
|
def __str__(self):
|
|
|
|
return self.arch
|
|
|
|
|
2015-02-09 00:45:28 +01:00
|
|
|
@property
|
|
|
|
def include_dirs(self):
|
|
|
|
return [
|
|
|
|
"{}/{}".format(
|
|
|
|
self.ctx.include_dir,
|
|
|
|
d.format(arch=self))
|
|
|
|
for d in self.ctx.include_dirs]
|
|
|
|
|
|
|
|
|
2015-02-03 18:42:17 +01:00
|
|
|
def get_env(self):
|
2015-02-06 01:53:21 +01:00
|
|
|
include_dirs = [
|
|
|
|
"-I{}/{}".format(
|
|
|
|
self.ctx.include_dir,
|
|
|
|
d.format(arch=self))
|
|
|
|
for d in self.ctx.include_dirs]
|
2018-11-09 17:54:56 +01:00
|
|
|
include_dirs += ["-I{}".format(
|
|
|
|
join(self.ctx.dist_dir, "include", self.arch))]
|
2015-02-06 01:53:21 +01:00
|
|
|
|
2015-02-03 18:42:17 +01:00
|
|
|
env = {}
|
2016-04-17 06:08:58 +02:00
|
|
|
cc = sh.xcrun("-find", "-sdk", self.sdk, "clang").strip()
|
2018-10-28 12:01:15 +01:00
|
|
|
cxx = sh.xcrun("-find", "-sdk", self.sdk, "clang++").strip()
|
|
|
|
|
|
|
|
# we put the flags in CC / CXX as sometimes the ./configure test
|
|
|
|
# with the preprocessor (aka CC -E) without CFLAGS, which fails for
|
|
|
|
# cross compiled projects
|
|
|
|
flags = " ".join([
|
|
|
|
"--sysroot", self.sysroot,
|
|
|
|
"-arch", self.arch,
|
|
|
|
"-pipe", "-no-cpp-precomp",
|
|
|
|
])
|
|
|
|
cc += " " + flags
|
|
|
|
cxx += " " + flags
|
2018-11-09 23:53:36 +01:00
|
|
|
|
|
|
|
use_ccache = environ.get("USE_CCACHE", "1")
|
|
|
|
ccache = None
|
|
|
|
if use_ccache == "1":
|
|
|
|
ccache = sh.which('ccache')
|
2016-04-17 06:08:58 +02:00
|
|
|
if ccache:
|
2016-04-18 22:22:15 +02:00
|
|
|
ccache = ccache.strip()
|
2018-11-09 23:53:36 +01:00
|
|
|
env["USE_CCACHE"] = "1"
|
|
|
|
env["CCACHE"] = ccache
|
|
|
|
env.update({k: v for k, v in environ.items() if k.startswith('CCACHE_')})
|
|
|
|
env.setdefault('CCACHE_MAXSIZE', '10G')
|
|
|
|
env.setdefault('CCACHE_HARDLINK', 'true')
|
|
|
|
env.setdefault('CCACHE_SLOPPINESS', ('file_macro,time_macros,'
|
|
|
|
'include_file_mtime,include_file_ctime,file_stat_matches'))
|
|
|
|
|
|
|
|
if not self._ccsh:
|
|
|
|
self._ccsh = tempfile.NamedTemporaryFile()
|
|
|
|
self._cxxsh = tempfile.NamedTemporaryFile()
|
|
|
|
sh.chmod("+x", self._ccsh.name)
|
|
|
|
sh.chmod("+x", self._cxxsh.name)
|
|
|
|
self._ccsh.write(b'#!/bin/sh\n')
|
|
|
|
self._cxxsh.write(b'#!/bin/sh\n')
|
|
|
|
if ccache:
|
|
|
|
print("CC and CXX will use ccache")
|
|
|
|
self._ccsh.write(
|
|
|
|
(ccache + ' ' + cc + ' "$@"\n').encode("utf8"))
|
|
|
|
self._cxxsh.write(
|
|
|
|
(ccache + ' ' + cxx + ' "$@"\n').encode("utf8"))
|
2016-04-17 06:08:58 +02:00
|
|
|
else:
|
2018-11-09 23:53:36 +01:00
|
|
|
print("CC and CXX will not use ccache")
|
|
|
|
self._ccsh.write(
|
|
|
|
(cc + ' "$@"\n').encode("utf8"))
|
|
|
|
self._cxxsh.write(
|
|
|
|
(cxx + ' "$@"\n').encode("utf8"))
|
|
|
|
self._ccsh.flush()
|
|
|
|
self._cxxsh.flush()
|
|
|
|
|
|
|
|
env["CC"] = self._ccsh.name
|
|
|
|
env["CXX"] = self._cxxsh.name
|
2015-02-03 18:42:17 +01:00
|
|
|
env["AR"] = sh.xcrun("-find", "-sdk", self.sdk, "ar").strip()
|
|
|
|
env["LD"] = sh.xcrun("-find", "-sdk", self.sdk, "ld").strip()
|
2015-02-06 01:53:21 +01:00
|
|
|
env["OTHER_CFLAGS"] = " ".join(include_dirs)
|
|
|
|
env["OTHER_LDFLAGS"] = " ".join([
|
|
|
|
"-L{}/{}".format(self.ctx.dist_dir, "lib"),
|
|
|
|
])
|
2015-02-03 18:42:17 +01:00
|
|
|
env["CFLAGS"] = " ".join([
|
|
|
|
"-O3",
|
|
|
|
self.version_min
|
2015-02-06 01:53:21 +01:00
|
|
|
] + include_dirs)
|
2015-02-03 18:42:17 +01:00
|
|
|
env["LDFLAGS"] = " ".join([
|
|
|
|
"-arch", self.arch,
|
2015-02-09 23:34:02 +01:00
|
|
|
"--sysroot", self.sysroot,
|
2015-02-03 18:42:17 +01:00
|
|
|
"-L{}/{}".format(self.ctx.dist_dir, "lib"),
|
|
|
|
"-lsqlite3",
|
|
|
|
self.version_min
|
|
|
|
])
|
|
|
|
return env
|
|
|
|
|
2015-02-03 00:19:53 +01:00
|
|
|
|
|
|
|
|
|
|
|
class ArchSimulator(Arch):
|
|
|
|
sdk = "iphonesimulator"
|
|
|
|
arch = "i386"
|
|
|
|
triple = "i386-apple-darwin11"
|
2015-02-03 18:42:17 +01:00
|
|
|
version_min = "-miphoneos-version-min=6.0.0"
|
2015-02-03 00:19:53 +01:00
|
|
|
sysroot = sh.xcrun("--sdk", "iphonesimulator", "--show-sdk-path").strip()
|
|
|
|
|
|
|
|
|
|
|
|
class Arch64Simulator(Arch):
|
|
|
|
sdk = "iphonesimulator"
|
|
|
|
arch = "x86_64"
|
|
|
|
triple = "x86_64-apple-darwin13"
|
|
|
|
version_min = "-miphoneos-version-min=7.0"
|
|
|
|
sysroot = sh.xcrun("--sdk", "iphonesimulator", "--show-sdk-path").strip()
|
|
|
|
|
|
|
|
|
|
|
|
class ArchIOS(Arch):
|
|
|
|
sdk = "iphoneos"
|
|
|
|
arch = "armv7"
|
|
|
|
triple = "arm-apple-darwin11"
|
2015-02-03 18:42:17 +01:00
|
|
|
version_min = "-miphoneos-version-min=6.0.0"
|
2015-02-03 00:19:53 +01:00
|
|
|
sysroot = sh.xcrun("--sdk", "iphoneos", "--show-sdk-path").strip()
|
|
|
|
|
|
|
|
|
|
|
|
class Arch64IOS(Arch):
|
|
|
|
sdk = "iphoneos"
|
|
|
|
arch = "arm64"
|
|
|
|
triple = "aarch64-apple-darwin13"
|
|
|
|
version_min = "-miphoneos-version-min=7.0"
|
|
|
|
sysroot = sh.xcrun("--sdk", "iphoneos", "--show-sdk-path").strip()
|
2017-05-30 16:05:54 +02:00
|
|
|
|
2015-02-03 00:19:53 +01:00
|
|
|
|
|
|
|
class Graph(object):
|
|
|
|
# Taken from python-for-android/depsort
|
|
|
|
def __init__(self):
|
|
|
|
# `graph`: dict that maps each package to a set of its dependencies.
|
|
|
|
self.graph = {}
|
|
|
|
|
|
|
|
def add(self, dependent, dependency):
|
|
|
|
"""Add a dependency relationship to the graph"""
|
|
|
|
self.graph.setdefault(dependent, set())
|
|
|
|
self.graph.setdefault(dependency, set())
|
|
|
|
if dependent != dependency:
|
|
|
|
self.graph[dependent].add(dependency)
|
|
|
|
|
|
|
|
def add_optional(self, dependent, dependency):
|
|
|
|
"""Add an optional (ordering only) dependency relationship to the graph
|
|
|
|
|
|
|
|
Only call this after all mandatory requirements are added
|
|
|
|
"""
|
|
|
|
if dependent in self.graph and dependency in self.graph:
|
|
|
|
self.add(dependent, dependency)
|
|
|
|
|
|
|
|
def find_order(self):
|
|
|
|
"""Do a topological sort on a dependency graph
|
|
|
|
|
|
|
|
:Parameters:
|
|
|
|
:Returns:
|
|
|
|
iterator, sorted items form first to last
|
|
|
|
"""
|
|
|
|
graph = dict((k, set(v)) for k, v in self.graph.items())
|
|
|
|
while graph:
|
|
|
|
# Find all items without a parent
|
|
|
|
leftmost = [l for l, s in graph.items() if not s]
|
|
|
|
if not leftmost:
|
|
|
|
raise ValueError('Dependency cycle detected! %s' % graph)
|
|
|
|
# If there is more than one, sort them for predictable order
|
|
|
|
leftmost.sort()
|
|
|
|
for result in leftmost:
|
|
|
|
# Yield and remove them from the graph
|
|
|
|
yield result
|
|
|
|
graph.pop(result)
|
|
|
|
for bset in graph.values():
|
|
|
|
bset.discard(result)
|
|
|
|
|
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
class Context(object):
|
|
|
|
env = environ.copy()
|
|
|
|
root_dir = None
|
|
|
|
cache_dir = None
|
|
|
|
build_dir = None
|
|
|
|
dist_dir = None
|
|
|
|
install_dir = None
|
|
|
|
ccache = None
|
|
|
|
cython = None
|
|
|
|
sdkver = None
|
|
|
|
sdksimver = None
|
2018-11-02 11:44:25 +01:00
|
|
|
so_suffix = None # set by one of the hostpython
|
2015-02-01 18:35:28 +01:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
super(Context, self).__init__()
|
2015-02-06 01:53:21 +01:00
|
|
|
self.include_dirs = []
|
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
ok = True
|
|
|
|
|
|
|
|
sdks = sh.xcodebuild("-showsdks").splitlines()
|
|
|
|
|
|
|
|
# get the latest iphoneos
|
|
|
|
iphoneos = [x for x in sdks if "iphoneos" in x]
|
|
|
|
if not iphoneos:
|
|
|
|
print("No iphone SDK installed")
|
|
|
|
ok = False
|
|
|
|
else:
|
2015-02-02 05:22:13 +01:00
|
|
|
iphoneos = iphoneos[0].split()[-1].replace("iphoneos", "")
|
2015-02-01 18:35:28 +01:00
|
|
|
self.sdkver = iphoneos
|
|
|
|
|
|
|
|
# get the latest iphonesimulator version
|
|
|
|
iphonesim = [x for x in sdks if "iphonesimulator" in x]
|
2015-02-11 16:28:18 +01:00
|
|
|
if not iphonesim:
|
2015-02-01 18:35:28 +01:00
|
|
|
ok = False
|
|
|
|
print("Error: No iphonesimulator SDK installed")
|
|
|
|
else:
|
2015-02-02 05:22:13 +01:00
|
|
|
iphonesim = iphonesim[0].split()[-1].replace("iphonesimulator", "")
|
2015-02-01 18:35:28 +01:00
|
|
|
self.sdksimver = iphonesim
|
|
|
|
|
|
|
|
# get the path for Developer
|
|
|
|
self.devroot = "{}/Platforms/iPhoneOS.platform/Developer".format(
|
|
|
|
sh.xcode_select("-print-path").strip())
|
|
|
|
|
|
|
|
# path to the iOS SDK
|
2015-02-02 05:22:13 +01:00
|
|
|
self.iossdkroot = "{}/SDKs/iPhoneOS{}.sdk".format(
|
2015-02-01 18:35:28 +01:00
|
|
|
self.devroot, self.sdkver)
|
|
|
|
|
|
|
|
# root of the toolchain
|
|
|
|
self.root_dir = realpath(dirname(__file__))
|
|
|
|
self.build_dir = "{}/build".format(self.root_dir)
|
|
|
|
self.cache_dir = "{}/.cache".format(self.root_dir)
|
|
|
|
self.dist_dir = "{}/dist".format(self.root_dir)
|
|
|
|
self.install_dir = "{}/dist/root".format(self.root_dir)
|
2015-02-04 16:11:53 +01:00
|
|
|
self.include_dir = "{}/dist/include".format(self.root_dir)
|
2015-02-03 18:42:17 +01:00
|
|
|
self.archs = (
|
2018-10-27 16:21:15 +02:00
|
|
|
# ArchSimulator(self),
|
2015-02-03 18:42:17 +01:00
|
|
|
Arch64Simulator(self),
|
|
|
|
ArchIOS(self),
|
|
|
|
Arch64IOS(self))
|
2015-02-01 18:35:28 +01:00
|
|
|
|
|
|
|
# path to some tools
|
|
|
|
self.ccache = sh.which("ccache")
|
|
|
|
if not self.ccache:
|
2015-02-10 12:24:20 +01:00
|
|
|
#print("ccache is missing, the build will not be optimized in the future.")
|
|
|
|
pass
|
2015-02-01 18:35:28 +01:00
|
|
|
for cython_fn in ("cython-2.7", "cython"):
|
|
|
|
cython = sh.which(cython_fn)
|
|
|
|
if cython:
|
|
|
|
self.cython = cython
|
|
|
|
break
|
|
|
|
if not self.cython:
|
|
|
|
ok = False
|
|
|
|
print("Missing requirement: cython is not installed")
|
|
|
|
|
|
|
|
# check the basic tools
|
2015-03-07 22:14:38 +01:00
|
|
|
for tool in ("pkg-config", "autoconf", "automake", "libtool"):
|
2015-02-01 18:35:28 +01:00
|
|
|
if not sh.which(tool):
|
|
|
|
print("Missing requirement: {} is not installed".format(
|
|
|
|
tool))
|
|
|
|
|
|
|
|
if not ok:
|
|
|
|
sys.exit(1)
|
|
|
|
|
2017-05-02 08:10:11 +02:00
|
|
|
self.use_pigz = sh.which('pigz')
|
|
|
|
self.use_pbzip2 = sh.which('pbzip2')
|
|
|
|
|
|
|
|
try:
|
|
|
|
num_cores = int(sh.sysctl('-n', 'hw.ncpu'))
|
|
|
|
except Exception:
|
|
|
|
num_cores = None
|
|
|
|
self.num_cores = num_cores if num_cores else 4 # default to 4 if we can't detect
|
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
ensure_dir(self.root_dir)
|
|
|
|
ensure_dir(self.build_dir)
|
|
|
|
ensure_dir(self.cache_dir)
|
|
|
|
ensure_dir(self.dist_dir)
|
|
|
|
ensure_dir(self.install_dir)
|
2015-02-04 16:11:53 +01:00
|
|
|
ensure_dir(self.include_dir)
|
|
|
|
ensure_dir(join(self.include_dir, "common"))
|
2015-02-01 18:35:28 +01:00
|
|
|
|
2015-02-02 05:22:13 +01:00
|
|
|
# remove the most obvious flags that can break the compilation
|
|
|
|
self.env.pop("MACOSX_DEPLOYMENT_TARGET", None)
|
|
|
|
self.env.pop("PYTHONDONTWRITEBYTECODE", None)
|
|
|
|
self.env.pop("ARCHFLAGS", None)
|
|
|
|
self.env.pop("CFLAGS", None)
|
|
|
|
self.env.pop("LDFLAGS", None)
|
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
# set the state
|
|
|
|
self.state = JsonStore(join(self.dist_dir, "state.db"))
|
|
|
|
|
2017-05-02 08:10:11 +02:00
|
|
|
@property
|
|
|
|
def concurrent_make(self):
|
|
|
|
return "-j{}".format(self.num_cores)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def concurrent_xcodebuild(self):
|
|
|
|
return "IDEBuildOperationMaxNumberOfConcurrentCompileTasks={}".format(self.num_cores)
|
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
|
2018-11-02 11:44:25 +01:00
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
class Recipe(object):
|
2018-11-02 11:44:25 +01:00
|
|
|
props = {
|
|
|
|
"is_alias": False,
|
|
|
|
"version": None,
|
|
|
|
"url": None,
|
|
|
|
"archs": [],
|
|
|
|
"depends": [],
|
|
|
|
"optional_depends": [],
|
|
|
|
"library": None,
|
|
|
|
"libraries": [],
|
|
|
|
"include_dir": None,
|
|
|
|
"include_per_arch": False,
|
2018-11-09 17:54:56 +01:00
|
|
|
"include_name": None,
|
2018-11-02 11:44:25 +01:00
|
|
|
"frameworks": [],
|
|
|
|
"sources": [],
|
|
|
|
"pbx_frameworks": [],
|
|
|
|
"pbx_libraries": []
|
|
|
|
}
|
|
|
|
|
|
|
|
def __new__(cls):
|
|
|
|
for prop, value in cls.props.items():
|
|
|
|
if not hasattr(cls, prop):
|
|
|
|
setattr(cls, prop, value)
|
|
|
|
return super(Recipe, cls).__new__(cls)
|
2015-02-01 18:35:28 +01:00
|
|
|
|
|
|
|
# API available for recipes
|
|
|
|
def download_file(self, url, filename, cwd=None):
|
|
|
|
"""
|
|
|
|
Download an `url` to `outfn`
|
|
|
|
"""
|
2015-02-26 23:21:05 +01:00
|
|
|
if not url:
|
|
|
|
return
|
2015-02-01 18:35:28 +01:00
|
|
|
def report_hook(index, blksize, size):
|
|
|
|
if size <= 0:
|
|
|
|
progression = '{0} bytes'.format(index * blksize)
|
|
|
|
else:
|
|
|
|
progression = '{0:.2f}%'.format(
|
|
|
|
index * blksize * 100. / float(size))
|
|
|
|
stdout.write('- Download {}\r'.format(progression))
|
|
|
|
stdout.flush()
|
|
|
|
|
|
|
|
if cwd:
|
|
|
|
filename = join(cwd, filename)
|
|
|
|
if exists(filename):
|
|
|
|
unlink(filename)
|
|
|
|
|
2016-11-06 09:30:58 +01:00
|
|
|
# Clean up temporary files just in case before downloading.
|
|
|
|
urlcleanup()
|
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
print('Downloading {0}'.format(url))
|
|
|
|
urlretrieve(url, filename, report_hook)
|
|
|
|
return filename
|
|
|
|
|
|
|
|
def extract_file(self, filename, cwd):
|
|
|
|
"""
|
|
|
|
Extract the `filename` into the directory `cwd`.
|
|
|
|
"""
|
2015-02-26 23:21:05 +01:00
|
|
|
if not filename:
|
|
|
|
return
|
2015-02-01 18:35:28 +01:00
|
|
|
print("Extract {} into {}".format(filename, cwd))
|
|
|
|
if filename.endswith(".tgz") or filename.endswith(".tar.gz"):
|
2017-05-02 08:10:11 +02:00
|
|
|
if self.ctx.use_pigz:
|
|
|
|
comp = '--use-compress-program={}'.format(self.ctx.use_pigz)
|
|
|
|
else:
|
|
|
|
comp = '-z'
|
|
|
|
shprint(sh.tar, "-C", cwd, "-xv", comp, "-f", filename)
|
2015-02-01 18:35:28 +01:00
|
|
|
|
|
|
|
elif filename.endswith(".tbz2") or filename.endswith(".tar.bz2"):
|
2017-05-02 08:10:11 +02:00
|
|
|
if self.ctx.use_pbzip2:
|
|
|
|
comp = '--use-compress-program={}'.format(self.ctx.use_pbzip2)
|
|
|
|
else:
|
|
|
|
comp = '-j'
|
|
|
|
shprint(sh.tar, "-C", cwd, "-xv", comp, "-f", filename)
|
2015-02-01 18:35:28 +01:00
|
|
|
|
|
|
|
elif filename.endswith(".zip"):
|
2015-08-07 22:56:37 +02:00
|
|
|
shprint(sh.unzip, "-d", cwd, filename)
|
2015-02-01 18:35:28 +01:00
|
|
|
|
|
|
|
else:
|
2017-05-02 08:10:11 +02:00
|
|
|
print("Error: cannot extract, unrecognized extension for {}".format(
|
2015-02-01 18:35:28 +01:00
|
|
|
filename))
|
|
|
|
raise Exception()
|
|
|
|
|
|
|
|
def get_archive_rootdir(self, filename):
|
|
|
|
if filename.endswith(".tgz") or filename.endswith(".tar.gz") or \
|
2015-08-15 21:22:41 +02:00
|
|
|
filename.endswith(".tbz2") or filename.endswith(".tar.bz2"):
|
|
|
|
try:
|
|
|
|
archive = tarfile.open(filename)
|
|
|
|
except tarfile.ReadError:
|
|
|
|
print('Error extracting the archive {0}'.format(filename))
|
|
|
|
print('This is usually caused by a corrupt download. The file'
|
|
|
|
' will be removed and re-downloaded on the next run.')
|
|
|
|
remove(filename)
|
2016-01-14 07:19:15 +01:00
|
|
|
return
|
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
root = archive.next().path.split("/")
|
|
|
|
return root[0]
|
2015-02-09 11:58:29 +01:00
|
|
|
elif filename.endswith(".zip"):
|
|
|
|
with zipfile.ZipFile(filename) as zf:
|
|
|
|
return dirname(zf.namelist()[0])
|
2015-02-01 18:35:28 +01:00
|
|
|
else:
|
2015-02-09 11:58:29 +01:00
|
|
|
print("Error: cannot detect root directory")
|
2015-02-01 18:35:28 +01:00
|
|
|
print("Unrecognized extension for {}".format(filename))
|
|
|
|
raise Exception()
|
|
|
|
|
2015-07-17 20:30:22 +02:00
|
|
|
def apply_patch(self, filename, target_dir=''):
|
2015-02-01 18:35:28 +01:00
|
|
|
"""
|
|
|
|
Apply a patch from the current recipe directory into the current
|
|
|
|
build directory.
|
|
|
|
"""
|
2015-07-17 20:30:22 +02:00
|
|
|
target_dir = target_dir or self.build_dir
|
2015-02-01 18:35:28 +01:00
|
|
|
print("Apply patch {}".format(filename))
|
|
|
|
filename = join(self.recipe_dir, filename)
|
2015-07-17 20:30:22 +02:00
|
|
|
sh.patch("-t", "-d", target_dir, "-p1", "-i", filename)
|
2015-02-01 18:35:28 +01:00
|
|
|
|
|
|
|
def copy_file(self, filename, dest):
|
2015-02-02 05:22:13 +01:00
|
|
|
print("Copy {} to {}".format(filename, dest))
|
2015-02-01 18:35:28 +01:00
|
|
|
filename = join(self.recipe_dir, filename)
|
|
|
|
dest = join(self.build_dir, dest)
|
|
|
|
shutil.copy(filename, dest)
|
|
|
|
|
2015-02-02 05:22:13 +01:00
|
|
|
def append_file(self, filename, dest):
|
|
|
|
print("Append {} to {}".format(filename, dest))
|
|
|
|
filename = join(self.recipe_dir, filename)
|
|
|
|
dest = join(self.build_dir, dest)
|
|
|
|
with open(filename, "rb") as fd:
|
|
|
|
data = fd.read()
|
|
|
|
with open(dest, "ab") as fd:
|
|
|
|
fd.write(data)
|
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
def has_marker(self, marker):
|
|
|
|
"""
|
|
|
|
Return True if the current build directory has the marker set
|
|
|
|
"""
|
|
|
|
return exists(join(self.build_dir, ".{}".format(marker)))
|
|
|
|
|
|
|
|
def set_marker(self, marker):
|
|
|
|
"""
|
|
|
|
Set a marker info the current build directory
|
|
|
|
"""
|
|
|
|
with open(join(self.build_dir, ".{}".format(marker)), "w") as fd:
|
|
|
|
fd.write("ok")
|
|
|
|
|
2015-02-02 05:22:13 +01:00
|
|
|
def delete_marker(self, marker):
|
2015-02-01 18:35:28 +01:00
|
|
|
"""
|
2015-02-02 05:22:13 +01:00
|
|
|
Delete a specific marker
|
2015-02-01 18:35:28 +01:00
|
|
|
"""
|
2015-02-02 05:22:13 +01:00
|
|
|
try:
|
|
|
|
unlink(join(self.build_dir, ".{}".format(marker)))
|
|
|
|
except:
|
|
|
|
pass
|
2015-02-01 18:35:28 +01:00
|
|
|
|
2015-02-06 01:53:21 +01:00
|
|
|
def get_include_dir(self):
|
|
|
|
"""
|
|
|
|
Return the common include dir for this recipe
|
|
|
|
"""
|
|
|
|
return join(self.ctx.include_dir, "common", self.name)
|
|
|
|
|
2018-11-02 11:44:25 +01:00
|
|
|
def so_filename(self, name):
|
|
|
|
"""Return the filename of a library with the appropriate so suffix
|
|
|
|
(.so for Python 2.7, .cpython-37m-darwin for Python 3.7)
|
|
|
|
"""
|
|
|
|
return "{}{}".format(name, self.ctx.so_suffix)
|
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
@property
|
|
|
|
def name(self):
|
|
|
|
modname = self.__class__.__module__
|
|
|
|
return modname.split(".", 1)[-1]
|
|
|
|
|
|
|
|
@property
|
|
|
|
def archive_fn(self):
|
2015-02-03 00:19:53 +01:00
|
|
|
bfn = basename(self.url.format(version=self.version))
|
|
|
|
fn = "{}/{}-{}".format(
|
2015-02-01 18:35:28 +01:00
|
|
|
self.ctx.cache_dir,
|
2015-02-03 00:19:53 +01:00
|
|
|
self.name, bfn)
|
2015-02-01 18:35:28 +01:00
|
|
|
return fn
|
|
|
|
|
2015-02-03 00:19:53 +01:00
|
|
|
@property
|
|
|
|
def filtered_archs(self):
|
2015-02-25 13:37:26 +01:00
|
|
|
result = []
|
2015-02-03 00:19:53 +01:00
|
|
|
for arch in self.ctx.archs:
|
|
|
|
if not self.archs or (arch.arch in self.archs):
|
2015-02-25 13:37:26 +01:00
|
|
|
result.append(arch)
|
|
|
|
return result
|
2015-02-03 00:19:53 +01:00
|
|
|
|
2015-02-23 11:34:36 +01:00
|
|
|
@property
|
|
|
|
def dist_libraries(self):
|
|
|
|
libraries = []
|
|
|
|
name = self.name
|
|
|
|
if not name.startswith("lib"):
|
|
|
|
name = "lib{}".format(name)
|
|
|
|
if self.library:
|
|
|
|
static_fn = join(self.ctx.dist_dir, "lib", "{}.a".format(name))
|
|
|
|
libraries.append(static_fn)
|
|
|
|
for library in self.libraries:
|
|
|
|
static_fn = join(self.ctx.dist_dir, "lib", basename(library))
|
|
|
|
libraries.append(static_fn)
|
|
|
|
return libraries
|
|
|
|
|
2015-02-03 00:19:53 +01:00
|
|
|
def get_build_dir(self, arch):
|
2015-02-03 18:42:17 +01:00
|
|
|
return join(self.ctx.build_dir, self.name, arch, self.archive_root)
|
2015-02-03 00:19:53 +01:00
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
# Public Recipe API to be subclassed if needed
|
|
|
|
|
2015-02-02 05:22:13 +01:00
|
|
|
def init_with_ctx(self, ctx):
|
|
|
|
self.ctx = ctx
|
2015-02-06 01:53:21 +01:00
|
|
|
include_dir = None
|
|
|
|
if self.include_dir:
|
2018-11-09 17:54:56 +01:00
|
|
|
include_name = self.include_name or self.name
|
2015-02-06 01:53:21 +01:00
|
|
|
if self.include_per_arch:
|
2018-11-09 17:54:56 +01:00
|
|
|
include_dir = join("{arch.arch}", include_name)
|
2015-02-06 01:53:21 +01:00
|
|
|
else:
|
2018-11-09 17:54:56 +01:00
|
|
|
include_dir = join("common", include_name)
|
2015-02-06 01:53:21 +01:00
|
|
|
if include_dir:
|
2015-08-14 16:11:47 +02:00
|
|
|
print("Include dir added: {}".format(include_dir))
|
2015-02-06 01:53:21 +01:00
|
|
|
self.ctx.include_dirs.append(include_dir)
|
2015-02-02 05:22:13 +01:00
|
|
|
|
2015-02-25 13:37:26 +01:00
|
|
|
def get_recipe_env(self, arch=None):
|
|
|
|
"""Return the env specialized for the recipe
|
|
|
|
"""
|
|
|
|
if arch is None:
|
|
|
|
arch = self.filtered_archs[0]
|
|
|
|
return arch.get_env()
|
|
|
|
|
2018-11-02 11:44:25 +01:00
|
|
|
def set_hostpython(self, instance, version):
|
|
|
|
state = self.ctx.state
|
|
|
|
hostpython = state.get("hostpython")
|
|
|
|
if hostpython is None:
|
|
|
|
state["hostpython"] = instance.name
|
|
|
|
state.sync()
|
|
|
|
elif hostpython != instance.name:
|
|
|
|
print("ERROR: Wanted to use {}".format(instance.name))
|
|
|
|
print("ERROR: but hostpython is already provided by {}.".format(
|
|
|
|
hostpython))
|
|
|
|
print("ERROR: You can have only one hostpython version compiled")
|
|
|
|
sys.exit(1)
|
|
|
|
self.ctx.python_major = int(version)
|
|
|
|
self.ctx.hostpython_ver = version
|
|
|
|
self.ctx.hostpython_recipe = instance
|
|
|
|
|
|
|
|
def set_python(self, instance, version):
|
|
|
|
state = self.ctx.state
|
|
|
|
python = state.get("python")
|
|
|
|
if python is None:
|
|
|
|
state["python"] = instance.name
|
|
|
|
state.sync()
|
|
|
|
elif python != instance.name:
|
|
|
|
print("ERROR: Wanted to use {}".format(instance.name))
|
|
|
|
print("ERROR: but python is already provided by {}.".format(
|
|
|
|
python))
|
|
|
|
print("ERROR: You can have only one python version compiled")
|
|
|
|
sys.exit(1)
|
|
|
|
self.ctx.python_ver = version
|
|
|
|
self.ctx.python_recipe = instance
|
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
@property
|
|
|
|
def archive_root(self):
|
|
|
|
key = "{}.archive_root".format(self.name)
|
|
|
|
value = self.ctx.state.get(key)
|
2016-05-03 22:38:59 +02:00
|
|
|
if not value:
|
2015-02-10 12:24:20 +01:00
|
|
|
value = self.get_archive_rootdir(self.archive_fn)
|
2016-11-25 13:25:22 +01:00
|
|
|
if value is not None:
|
|
|
|
self.ctx.state[key] = value
|
2015-02-10 12:24:20 +01:00
|
|
|
return value
|
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
def execute(self):
|
2015-02-12 00:53:08 +01:00
|
|
|
if self.custom_dir:
|
|
|
|
self.ctx.state.remove_all(self.name)
|
2015-02-01 18:35:28 +01:00
|
|
|
self.download()
|
|
|
|
self.extract()
|
|
|
|
self.build_all()
|
|
|
|
|
2015-02-12 00:53:08 +01:00
|
|
|
@property
|
|
|
|
def custom_dir(self):
|
|
|
|
"""Check if there is a variable name to specify a custom version /
|
|
|
|
directory to use instead of the current url.
|
|
|
|
"""
|
2015-08-07 18:07:37 +02:00
|
|
|
envname = "{}_DIR".format(self.name.upper())
|
|
|
|
d = environ.get(envname)
|
2015-02-12 00:53:08 +01:00
|
|
|
if not d:
|
|
|
|
return
|
|
|
|
if not exists(d):
|
2015-08-07 18:07:37 +02:00
|
|
|
raise ValueError("Invalid path passed into {}".format(envname))
|
2015-02-12 00:53:08 +01:00
|
|
|
return d
|
|
|
|
|
2018-11-02 11:44:25 +01:00
|
|
|
def init_after_import(cls, ctx):
|
|
|
|
"""This can be used to dynamically set some variables
|
|
|
|
depending of the state
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
@cache_execution
|
2015-02-01 18:35:28 +01:00
|
|
|
def download(self):
|
2015-02-10 12:24:20 +01:00
|
|
|
key = "{}.archive_root".format(self.name)
|
2015-02-12 00:53:08 +01:00
|
|
|
if self.custom_dir:
|
|
|
|
self.ctx.state[key] = basename(self.custom_dir)
|
|
|
|
else:
|
|
|
|
src_dir = join(self.recipe_dir, self.url)
|
|
|
|
if exists(src_dir):
|
|
|
|
self.ctx.state[key] = basename(src_dir)
|
|
|
|
return
|
|
|
|
fn = self.archive_fn
|
|
|
|
if not exists(fn):
|
|
|
|
self.download_file(self.url.format(version=self.version), fn)
|
2016-11-25 13:25:22 +01:00
|
|
|
status = self.get_archive_rootdir(self.archive_fn)
|
|
|
|
if status is not None:
|
|
|
|
self.ctx.state[key] = status
|
2015-02-01 18:35:28 +01:00
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
@cache_execution
|
2015-02-01 18:35:28 +01:00
|
|
|
def extract(self):
|
|
|
|
# recipe tmp directory
|
2015-02-03 00:19:53 +01:00
|
|
|
for arch in self.filtered_archs:
|
|
|
|
print("Extract {} for {}".format(self.name, arch.arch))
|
|
|
|
self.extract_arch(arch.arch)
|
2015-02-02 05:22:13 +01:00
|
|
|
|
2015-02-03 00:19:53 +01:00
|
|
|
def extract_arch(self, arch):
|
2015-02-03 18:42:17 +01:00
|
|
|
build_dir = join(self.ctx.build_dir, self.name, arch)
|
2015-02-12 00:53:08 +01:00
|
|
|
dest_dir = join(build_dir, self.archive_root)
|
|
|
|
if self.custom_dir:
|
|
|
|
if exists(dest_dir):
|
|
|
|
shutil.rmtree(dest_dir)
|
|
|
|
shutil.copytree(self.custom_dir, dest_dir)
|
|
|
|
else:
|
|
|
|
if exists(dest_dir):
|
|
|
|
return
|
|
|
|
src_dir = join(self.recipe_dir, self.url)
|
|
|
|
if exists(src_dir):
|
|
|
|
shutil.copytree(src_dir, dest_dir)
|
|
|
|
return
|
|
|
|
ensure_dir(build_dir)
|
2017-05-30 16:05:54 +02:00
|
|
|
self.extract_file(self.archive_fn, build_dir)
|
2015-02-01 18:35:28 +01:00
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
@cache_execution
|
|
|
|
def build(self, arch):
|
|
|
|
self.build_dir = self.get_build_dir(arch.arch)
|
|
|
|
if self.has_marker("building"):
|
|
|
|
print("Warning: {} build for {} has been incomplete".format(
|
|
|
|
self.name, arch.arch))
|
|
|
|
print("Warning: deleting the build and restarting.")
|
|
|
|
shutil.rmtree(self.build_dir)
|
|
|
|
self.extract_arch(arch.arch)
|
|
|
|
|
|
|
|
if self.has_marker("build_done"):
|
|
|
|
print("Build python for {} already done.".format(arch.arch))
|
|
|
|
return
|
|
|
|
|
|
|
|
self.set_marker("building")
|
|
|
|
|
|
|
|
chdir(self.build_dir)
|
|
|
|
print("Prebuild {} for {}".format(self.name, arch.arch))
|
|
|
|
self.prebuild_arch(arch)
|
|
|
|
print("Build {} for {}".format(self.name, arch.arch))
|
|
|
|
self.build_arch(arch)
|
|
|
|
print("Postbuild {} for {}".format(self.name, arch.arch))
|
|
|
|
self.postbuild_arch(arch)
|
|
|
|
self.delete_marker("building")
|
|
|
|
self.set_marker("build_done")
|
|
|
|
|
|
|
|
@cache_execution
|
2015-02-01 18:35:28 +01:00
|
|
|
def build_all(self):
|
2015-02-25 13:37:26 +01:00
|
|
|
filtered_archs = self.filtered_archs
|
2015-02-03 00:19:53 +01:00
|
|
|
print("Build {} for {} (filtered)".format(
|
|
|
|
self.name,
|
|
|
|
", ".join([x.arch for x in filtered_archs])))
|
|
|
|
for arch in self.filtered_archs:
|
2015-02-10 12:24:20 +01:00
|
|
|
self.build(arch)
|
2015-02-03 00:19:53 +01:00
|
|
|
|
|
|
|
name = self.name
|
2015-02-20 16:12:20 +01:00
|
|
|
if self.library:
|
|
|
|
print("Create lipo library for {}".format(name))
|
|
|
|
if not name.startswith("lib"):
|
|
|
|
name = "lib{}".format(name)
|
|
|
|
static_fn = join(self.ctx.dist_dir, "lib", "{}.a".format(name))
|
|
|
|
ensure_dir(dirname(static_fn))
|
|
|
|
print("Lipo {} to {}".format(self.name, static_fn))
|
|
|
|
self.make_lipo(static_fn)
|
2015-02-24 12:02:06 +01:00
|
|
|
if self.libraries:
|
2015-02-20 16:12:20 +01:00
|
|
|
print("Create multiple lipo for {}".format(name))
|
|
|
|
for library in self.libraries:
|
|
|
|
static_fn = join(self.ctx.dist_dir, "lib", basename(library))
|
|
|
|
ensure_dir(dirname(static_fn))
|
|
|
|
print(" - Lipo-ize {}".format(library))
|
|
|
|
self.make_lipo(static_fn, library)
|
2015-02-06 01:53:21 +01:00
|
|
|
print("Install include files for {}".format(self.name))
|
|
|
|
self.install_include()
|
2015-02-27 18:54:21 +01:00
|
|
|
print("Install frameworks for {}".format(self.name))
|
|
|
|
self.install_frameworks()
|
|
|
|
print("Install sources for {}".format(self.name))
|
|
|
|
self.install_sources()
|
2015-02-03 18:42:17 +01:00
|
|
|
print("Install {}".format(self.name))
|
2015-02-04 16:11:53 +01:00
|
|
|
self.install()
|
2015-02-01 18:35:28 +01:00
|
|
|
|
|
|
|
def prebuild_arch(self, arch):
|
2015-02-03 00:19:53 +01:00
|
|
|
prebuild = "prebuild_{}".format(arch.arch)
|
2015-02-01 18:35:28 +01:00
|
|
|
if hasattr(self, prebuild):
|
|
|
|
getattr(self, prebuild)()
|
|
|
|
|
|
|
|
def build_arch(self, arch):
|
2015-02-03 00:19:53 +01:00
|
|
|
build = "build_{}".format(arch.arch)
|
2015-02-01 18:35:28 +01:00
|
|
|
if hasattr(self, build):
|
|
|
|
getattr(self, build)()
|
|
|
|
|
|
|
|
def postbuild_arch(self, arch):
|
2015-02-03 00:19:53 +01:00
|
|
|
postbuild = "postbuild_{}".format(arch.arch)
|
2015-02-01 18:35:28 +01:00
|
|
|
if hasattr(self, postbuild):
|
|
|
|
getattr(self, postbuild)()
|
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
@cache_execution
|
2015-02-20 16:12:20 +01:00
|
|
|
def make_lipo(self, filename, library=None):
|
|
|
|
if library is None:
|
|
|
|
library = self.library
|
|
|
|
if not library:
|
2015-02-03 18:42:17 +01:00
|
|
|
return
|
|
|
|
args = []
|
|
|
|
for arch in self.filtered_archs:
|
2015-02-20 16:12:20 +01:00
|
|
|
library_fn = library.format(arch=arch)
|
2015-02-03 18:42:17 +01:00
|
|
|
args += [
|
|
|
|
"-arch", arch.arch,
|
2015-02-20 16:12:20 +01:00
|
|
|
join(self.get_build_dir(arch.arch), library_fn)]
|
2015-02-03 18:42:17 +01:00
|
|
|
shprint(sh.lipo, "-create", "-output", filename, *args)
|
|
|
|
|
2015-02-27 18:54:21 +01:00
|
|
|
@cache_execution
|
|
|
|
def install_frameworks(self):
|
|
|
|
if not self.frameworks:
|
|
|
|
return
|
|
|
|
arch = self.filtered_archs[0]
|
|
|
|
build_dir = self.get_build_dir(arch.arch)
|
|
|
|
for framework in self.frameworks:
|
|
|
|
print(" - Install {}".format(framework))
|
|
|
|
src = join(build_dir, framework)
|
|
|
|
dest = join(self.ctx.dist_dir, "frameworks", framework)
|
|
|
|
ensure_dir(dirname(dest))
|
|
|
|
if exists(dest):
|
|
|
|
shutil.rmtree(dest)
|
|
|
|
shutil.copytree(src, dest)
|
|
|
|
|
|
|
|
@cache_execution
|
|
|
|
def install_sources(self):
|
|
|
|
if not self.sources:
|
|
|
|
return
|
|
|
|
arch = self.filtered_archs[0]
|
|
|
|
build_dir = self.get_build_dir(arch.arch)
|
|
|
|
for source in self.sources:
|
|
|
|
print(" - Install {}".format(source))
|
|
|
|
src = join(build_dir, source)
|
|
|
|
dest = join(self.ctx.dist_dir, "sources", self.name)
|
|
|
|
ensure_dir(dirname(dest))
|
|
|
|
if exists(dest):
|
|
|
|
shutil.rmtree(dest)
|
|
|
|
shutil.copytree(src, dest)
|
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
@cache_execution
|
2015-02-06 01:53:21 +01:00
|
|
|
def install_include(self):
|
|
|
|
if not self.include_dir:
|
|
|
|
return
|
|
|
|
if self.include_per_arch:
|
2015-02-09 00:45:28 +01:00
|
|
|
archs = self.ctx.archs
|
2015-02-06 01:53:21 +01:00
|
|
|
else:
|
2015-02-25 18:46:57 +01:00
|
|
|
archs = self.filtered_archs[:1]
|
2015-02-09 00:45:28 +01:00
|
|
|
|
|
|
|
include_dirs = self.include_dir
|
|
|
|
if not isinstance(include_dirs, (list, tuple)):
|
|
|
|
include_dirs = list([include_dirs])
|
|
|
|
|
|
|
|
for arch in archs:
|
|
|
|
arch_dir = "common"
|
|
|
|
if self.include_per_arch:
|
|
|
|
arch_dir = arch.arch
|
2018-11-09 17:54:56 +01:00
|
|
|
include_name = self.include_name or self.name
|
|
|
|
dest_dir = join(self.ctx.include_dir, arch_dir, include_name)
|
2015-02-06 01:53:21 +01:00
|
|
|
if exists(dest_dir):
|
|
|
|
shutil.rmtree(dest_dir)
|
2015-02-09 00:45:28 +01:00
|
|
|
build_dir = self.get_build_dir(arch.arch)
|
|
|
|
|
|
|
|
for include_dir in include_dirs:
|
|
|
|
dest_name = None
|
|
|
|
if isinstance(include_dir, (list, tuple)):
|
|
|
|
include_dir, dest_name = include_dir
|
|
|
|
include_dir = include_dir.format(arch=arch, ctx=self.ctx)
|
|
|
|
src_dir = join(build_dir, include_dir)
|
|
|
|
if dest_name is None:
|
|
|
|
dest_name = basename(src_dir)
|
|
|
|
if isdir(src_dir):
|
|
|
|
shutil.copytree(src_dir, dest_dir)
|
|
|
|
else:
|
|
|
|
dest = join(dest_dir, dest_name)
|
|
|
|
print("Copy {} to {}".format(src_dir, dest))
|
|
|
|
ensure_dir(dirname(dest))
|
|
|
|
shutil.copy(src_dir, dest)
|
2015-02-06 01:53:21 +01:00
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
@cache_execution
|
2015-02-03 18:42:17 +01:00
|
|
|
def install(self):
|
|
|
|
pass
|
2015-02-03 00:19:53 +01:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def list_recipes(cls):
|
|
|
|
recipes_dir = join(dirname(__file__), "recipes")
|
2018-10-28 12:14:35 +01:00
|
|
|
for name in sorted(listdir(recipes_dir)):
|
2015-02-03 00:19:53 +01:00
|
|
|
fn = join(recipes_dir, name)
|
|
|
|
if isdir(fn):
|
|
|
|
yield name
|
|
|
|
|
|
|
|
@classmethod
|
2015-02-10 12:24:20 +01:00
|
|
|
def get_recipe(cls, name, ctx):
|
2015-02-03 00:19:53 +01:00
|
|
|
if not hasattr(cls, "recipes"):
|
|
|
|
cls.recipes = {}
|
2016-04-17 06:13:58 +02:00
|
|
|
|
|
|
|
if '==' in name:
|
|
|
|
name, version = name.split('==')
|
|
|
|
else:
|
|
|
|
version = None
|
|
|
|
|
2015-02-03 00:19:53 +01:00
|
|
|
if name in cls.recipes:
|
2016-04-17 06:13:58 +02:00
|
|
|
recipe = cls.recipes[name]
|
|
|
|
else:
|
|
|
|
mod = importlib.import_module("recipes.{}".format(name))
|
|
|
|
recipe = mod.recipe
|
|
|
|
recipe.recipe_dir = join(ctx.root_dir, "recipes", name)
|
2018-11-02 11:44:25 +01:00
|
|
|
recipe.init_after_import(ctx)
|
2016-04-17 06:13:58 +02:00
|
|
|
|
|
|
|
if version:
|
|
|
|
recipe.version = version
|
|
|
|
|
2015-02-03 00:19:53 +01:00
|
|
|
return recipe
|
|
|
|
|
|
|
|
|
2015-02-25 13:37:26 +01:00
|
|
|
class PythonRecipe(Recipe):
|
|
|
|
@cache_execution
|
|
|
|
def install(self):
|
|
|
|
self.install_python_package()
|
|
|
|
self.reduce_python_package()
|
|
|
|
|
2016-07-12 15:36:45 +02:00
|
|
|
@staticmethod
|
|
|
|
def remove_junk(d):
|
|
|
|
exts = [".pyc", ".py", ".so.lib", ".so.o", ".sh"]
|
2015-04-30 19:51:01 +02:00
|
|
|
for root, dirnames, filenames in walk(d):
|
|
|
|
for fn in filenames:
|
2016-07-12 15:36:45 +02:00
|
|
|
if any([fn.endswith(ext) for ext in exts]):
|
2015-04-30 19:51:01 +02:00
|
|
|
unlink(join(root, fn))
|
|
|
|
|
2015-02-25 13:37:26 +01:00
|
|
|
def install_python_package(self, name=None, env=None, is_dir=True):
|
|
|
|
"""Automate the installation of a Python package into the target
|
|
|
|
site-packages.
|
|
|
|
|
|
|
|
It will works with the first filtered_archs, and the name of the recipe.
|
|
|
|
"""
|
|
|
|
arch = self.filtered_archs[0]
|
|
|
|
if name is None:
|
|
|
|
name = self.name
|
|
|
|
if env is None:
|
|
|
|
env = self.get_recipe_env(arch)
|
|
|
|
print("Install {} into the site-packages".format(name))
|
|
|
|
build_dir = self.get_build_dir(arch.arch)
|
|
|
|
chdir(build_dir)
|
|
|
|
hostpython = sh.Command(self.ctx.hostpython)
|
|
|
|
iosbuild = join(build_dir, "iosbuild")
|
|
|
|
shprint(hostpython, "setup.py", "install", "-O2",
|
|
|
|
"--prefix", iosbuild,
|
|
|
|
_env=env)
|
|
|
|
dest_dir = join(self.ctx.site_packages_dir, name)
|
2018-11-02 11:44:25 +01:00
|
|
|
#self.remove_junk(iosbuild)
|
2015-02-25 13:37:26 +01:00
|
|
|
if is_dir:
|
|
|
|
if exists(dest_dir):
|
|
|
|
shutil.rmtree(dest_dir)
|
|
|
|
func = shutil.copytree
|
|
|
|
else:
|
|
|
|
func = shutil.copy
|
|
|
|
func(
|
|
|
|
join(iosbuild, "lib",
|
|
|
|
self.ctx.python_ver_dir, "site-packages", name),
|
|
|
|
dest_dir)
|
|
|
|
|
|
|
|
def reduce_python_package(self):
|
|
|
|
"""Feel free to remove things you don't want in the final
|
|
|
|
site-packages.
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class CythonRecipe(PythonRecipe):
|
|
|
|
pre_build_ext = False
|
2015-02-27 10:30:47 +01:00
|
|
|
cythonize = True
|
|
|
|
|
|
|
|
def cythonize_file(self, filename):
|
|
|
|
if filename.startswith(self.build_dir):
|
|
|
|
filename = filename[len(self.build_dir) + 1:]
|
|
|
|
print("Cythonize {}".format(filename))
|
|
|
|
cmd = sh.Command(join(self.ctx.root_dir, "tools", "cythonize.py"))
|
2018-11-02 11:44:25 +01:00
|
|
|
hostpython = self.ctx.state.get("hostpython")
|
2015-02-27 10:30:47 +01:00
|
|
|
shprint(cmd, filename)
|
|
|
|
|
|
|
|
def cythonize_build(self):
|
|
|
|
if not self.cythonize:
|
|
|
|
return
|
|
|
|
root_dir = self.build_dir
|
|
|
|
for root, dirnames, filenames in walk(root_dir):
|
|
|
|
for filename in fnmatch.filter(filenames, "*.pyx"):
|
|
|
|
self.cythonize_file(join(root, filename))
|
|
|
|
|
|
|
|
def biglink(self):
|
|
|
|
dirs = []
|
|
|
|
for root, dirnames, filenames in walk(self.build_dir):
|
|
|
|
if fnmatch.filter(filenames, "*.so.libs"):
|
|
|
|
dirs.append(root)
|
|
|
|
cmd = sh.Command(join(self.ctx.root_dir, "tools", "biglink"))
|
|
|
|
shprint(cmd, join(self.build_dir, "lib{}.a".format(self.name)), *dirs)
|
2015-02-25 13:37:26 +01:00
|
|
|
|
|
|
|
def get_recipe_env(self, arch):
|
|
|
|
env = super(CythonRecipe, self).get_recipe_env(arch)
|
|
|
|
env["KIVYIOSROOT"] = self.ctx.root_dir
|
|
|
|
env["IOSSDKROOT"] = arch.sysroot
|
|
|
|
env["LDSHARED"] = join(self.ctx.root_dir, "tools", "liblink")
|
|
|
|
env["ARM_LD"] = env["LD"]
|
|
|
|
env["ARCH"] = arch.arch
|
|
|
|
return env
|
|
|
|
|
|
|
|
def build_arch(self, arch):
|
|
|
|
build_env = self.get_recipe_env(arch)
|
|
|
|
hostpython = sh.Command(self.ctx.hostpython)
|
|
|
|
if self.pre_build_ext:
|
|
|
|
try:
|
|
|
|
shprint(hostpython, "setup.py", "build_ext", "-g",
|
|
|
|
_env=build_env)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
self.cythonize_build()
|
|
|
|
shprint(hostpython, "setup.py", "build_ext", "-g",
|
|
|
|
_env=build_env)
|
|
|
|
self.biglink()
|
|
|
|
|
|
|
|
|
2015-02-03 00:19:53 +01:00
|
|
|
def build_recipes(names, ctx):
|
|
|
|
# gather all the dependencies
|
|
|
|
print("Want to build {}".format(names))
|
|
|
|
graph = Graph()
|
2018-11-09 19:14:05 +01:00
|
|
|
ctx.wanted_recipes = names[:]
|
2015-02-03 00:19:53 +01:00
|
|
|
recipe_to_load = names
|
|
|
|
recipe_loaded = []
|
|
|
|
while names:
|
|
|
|
name = recipe_to_load.pop(0)
|
|
|
|
if name in recipe_loaded:
|
|
|
|
continue
|
2015-02-10 12:24:20 +01:00
|
|
|
try:
|
|
|
|
recipe = Recipe.get_recipe(name, ctx)
|
|
|
|
except ImportError:
|
|
|
|
print("ERROR: No recipe named {}".format(name))
|
|
|
|
sys.exit(1)
|
2015-02-04 16:11:53 +01:00
|
|
|
graph.add(name, name)
|
2015-08-14 16:11:47 +02:00
|
|
|
print("Loaded recipe {} (depends of {}, optional are {})".format(name,
|
|
|
|
recipe.depends, recipe.optional_depends))
|
2015-02-03 00:19:53 +01:00
|
|
|
for depend in recipe.depends:
|
|
|
|
graph.add(name, depend)
|
|
|
|
recipe_to_load += recipe.depends
|
2015-08-14 12:17:48 +02:00
|
|
|
for depend in recipe.optional_depends:
|
|
|
|
# in case of compilation after the initial one, take in account
|
|
|
|
# of the already compiled recipes
|
2015-08-14 16:11:47 +02:00
|
|
|
key = "{}.build_all".format(depend)
|
2015-08-14 12:17:48 +02:00
|
|
|
if key in ctx.state:
|
2015-08-14 16:11:47 +02:00
|
|
|
recipe_to_load.append(name)
|
2015-08-14 12:17:48 +02:00
|
|
|
graph.add(name, depend)
|
|
|
|
else:
|
|
|
|
graph.add_optional(name, depend)
|
2015-02-03 00:19:53 +01:00
|
|
|
recipe_loaded.append(name)
|
|
|
|
|
|
|
|
build_order = list(graph.find_order())
|
|
|
|
print("Build order is {}".format(build_order))
|
2015-02-10 12:24:20 +01:00
|
|
|
recipes = [Recipe.get_recipe(name, ctx) for name in build_order]
|
2018-11-02 11:44:25 +01:00
|
|
|
recipes = [recipe for recipe in recipes if not recipe.is_alias]
|
|
|
|
recipes_order = [recipe.name for recipe in recipes]
|
|
|
|
print("Recipe order is {}".format(recipes_order))
|
2015-02-06 01:53:21 +01:00
|
|
|
for recipe in recipes:
|
2015-02-03 00:19:53 +01:00
|
|
|
recipe.init_with_ctx(ctx)
|
2015-02-06 01:53:21 +01:00
|
|
|
for recipe in recipes:
|
2015-02-03 00:19:53 +01:00
|
|
|
recipe.execute()
|
2015-02-01 18:35:28 +01:00
|
|
|
|
2015-02-06 01:53:21 +01:00
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
def ensure_dir(filename):
|
|
|
|
if not exists(filename):
|
|
|
|
makedirs(filename)
|
|
|
|
|
|
|
|
|
2018-11-09 18:48:08 +01:00
|
|
|
def ensure_recipes_loaded(ctx):
|
|
|
|
for recipe in Recipe.list_recipes():
|
|
|
|
key = "{}.build_all".format(recipe)
|
|
|
|
if key not in ctx.state:
|
|
|
|
continue
|
|
|
|
recipe = Recipe.get_recipe(recipe, ctx)
|
|
|
|
recipe.init_with_ctx(ctx)
|
|
|
|
|
|
|
|
|
2015-02-23 11:34:36 +01:00
|
|
|
def update_pbxproj(filename):
|
|
|
|
# list all the compiled recipes
|
|
|
|
ctx = Context()
|
|
|
|
pbx_libraries = []
|
|
|
|
pbx_frameworks = []
|
2015-02-27 18:54:21 +01:00
|
|
|
frameworks = []
|
2015-02-23 11:34:36 +01:00
|
|
|
libraries = []
|
2015-02-27 18:54:21 +01:00
|
|
|
sources = []
|
2015-02-23 11:34:36 +01:00
|
|
|
for recipe in Recipe.list_recipes():
|
|
|
|
key = "{}.build_all".format(recipe)
|
|
|
|
if key not in ctx.state:
|
|
|
|
continue
|
|
|
|
recipe = Recipe.get_recipe(recipe, ctx)
|
|
|
|
recipe.init_with_ctx(ctx)
|
|
|
|
pbx_frameworks.extend(recipe.pbx_frameworks)
|
|
|
|
pbx_libraries.extend(recipe.pbx_libraries)
|
|
|
|
libraries.extend(recipe.dist_libraries)
|
2015-02-27 18:54:21 +01:00
|
|
|
frameworks.extend(recipe.frameworks)
|
|
|
|
if recipe.sources:
|
|
|
|
sources.append(recipe.name)
|
2015-02-23 11:34:36 +01:00
|
|
|
|
|
|
|
pbx_frameworks = list(set(pbx_frameworks))
|
|
|
|
pbx_libraries = list(set(pbx_libraries))
|
|
|
|
libraries = list(set(libraries))
|
|
|
|
|
|
|
|
print("-" * 70)
|
|
|
|
print("The project need to have:")
|
|
|
|
print("iOS Frameworks: {}".format(pbx_frameworks))
|
|
|
|
print("iOS Libraries: {}".format(pbx_libraries))
|
2015-02-27 18:54:21 +01:00
|
|
|
print("iOS local Frameworks: {}".format(frameworks))
|
2015-02-23 11:34:36 +01:00
|
|
|
print("Libraries: {}".format(libraries))
|
2015-02-27 18:54:21 +01:00
|
|
|
print("Sources to link: {}".format(sources))
|
2015-02-23 11:34:36 +01:00
|
|
|
|
|
|
|
print("-" * 70)
|
|
|
|
print("Analysis of {}".format(filename))
|
|
|
|
|
2018-10-28 22:07:10 +01:00
|
|
|
project = XcodeProject.load(filename)
|
2015-02-23 11:34:36 +01:00
|
|
|
sysroot = sh.xcrun("--sdk", "iphonesimulator", "--show-sdk-path").strip()
|
|
|
|
|
|
|
|
group = project.get_or_create_group("Frameworks")
|
2015-02-27 18:54:21 +01:00
|
|
|
g_classes = project.get_or_create_group("Classes")
|
2018-10-28 22:07:10 +01:00
|
|
|
file_options = FileOptions(embed_framework=False, code_sign_on_copy=True)
|
2015-02-23 11:34:36 +01:00
|
|
|
for framework in pbx_frameworks:
|
2015-02-27 18:54:21 +01:00
|
|
|
framework_name = "{}.framework".format(framework)
|
|
|
|
if framework_name in frameworks:
|
2018-10-28 22:07:10 +01:00
|
|
|
print("Ensure {} is in the project (pbx_frameworks, local)".format(framework))
|
2015-02-27 18:54:21 +01:00
|
|
|
f_path = join(ctx.dist_dir, "frameworks", framework_name)
|
|
|
|
else:
|
2018-10-28 22:07:10 +01:00
|
|
|
print("Ensure {} is in the project (pbx_frameworks, system)".format(framework))
|
2015-02-27 18:54:21 +01:00
|
|
|
f_path = join(sysroot, "System", "Library", "Frameworks",
|
|
|
|
"{}.framework".format(framework))
|
2018-10-28 22:07:10 +01:00
|
|
|
project.add_file(f_path, parent=group, tree="DEVELOPER_DIR",
|
|
|
|
force=False, file_options=file_options)
|
2015-02-23 11:34:36 +01:00
|
|
|
for library in pbx_libraries:
|
2018-10-28 22:07:10 +01:00
|
|
|
print("Ensure {} is in the project (pbx_libraries, dylib+tbd)".format(library))
|
2015-02-23 11:34:36 +01:00
|
|
|
f_path = join(sysroot, "usr", "lib",
|
|
|
|
"{}.dylib".format(library))
|
2018-10-28 22:07:10 +01:00
|
|
|
project.add_file(f_path, parent=group, tree="DEVELOPER_DIR", force=False)
|
|
|
|
f_path = join(sysroot, "usr", "lib",
|
|
|
|
"{}.tbd".format(library))
|
|
|
|
project.add_file(f_path, parent=group, tree="DEVELOPER_DIR", force=False)
|
2015-02-23 11:34:36 +01:00
|
|
|
for library in libraries:
|
2018-10-28 22:07:10 +01:00
|
|
|
print("Ensure {} is in the project (libraries)".format(library))
|
|
|
|
project.add_file(library, parent=group, force=False)
|
2015-02-27 18:54:21 +01:00
|
|
|
for name in sources:
|
|
|
|
print("Ensure {} sources are used".format(name))
|
|
|
|
fn = join(ctx.dist_dir, "sources", name)
|
|
|
|
project.add_folder(fn, parent=g_classes)
|
|
|
|
|
|
|
|
|
2018-10-28 22:07:10 +01:00
|
|
|
project.backup()
|
|
|
|
project.save()
|
2015-02-23 11:34:36 +01:00
|
|
|
|
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
if __name__ == "__main__":
|
2015-02-10 12:24:20 +01:00
|
|
|
import argparse
|
2017-05-30 16:05:54 +02:00
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
class ToolchainCL(object):
|
|
|
|
def __init__(self):
|
|
|
|
parser = argparse.ArgumentParser(
|
2015-02-14 13:10:49 +01:00
|
|
|
description="Tool for managing the iOS / Python toolchain",
|
2015-02-10 12:24:20 +01:00
|
|
|
usage="""toolchain <command> [<args>]
|
2017-05-30 16:05:54 +02:00
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
Available commands:
|
2016-07-01 21:09:10 +02:00
|
|
|
build Build a recipe (compile a library for the required target
|
|
|
|
architecture)
|
2016-07-28 10:14:18 +02:00
|
|
|
clean Clean the build of the specified recipe
|
2015-02-10 12:24:20 +01:00
|
|
|
distclean Clean the build and the result
|
2015-02-14 13:10:49 +01:00
|
|
|
recipes List all the available recipes
|
|
|
|
status List all the recipes and their build status
|
|
|
|
|
|
|
|
Xcode:
|
|
|
|
create Create a new xcode project
|
2015-02-23 11:34:36 +01:00
|
|
|
update Update an existing xcode project (frameworks, libraries..)
|
2015-08-14 02:32:03 +02:00
|
|
|
launchimage Create Launch images for your xcode project
|
2015-08-20 16:31:31 +02:00
|
|
|
icon Create Icons for your xcode project
|
2017-05-30 16:07:13 +02:00
|
|
|
pip Install a pip dependency into the distribution
|
2015-02-10 12:24:20 +01:00
|
|
|
""")
|
|
|
|
parser.add_argument("command", help="Command to run")
|
|
|
|
args = parser.parse_args(sys.argv[1:2])
|
|
|
|
if not hasattr(self, args.command):
|
2016-08-02 10:22:07 +02:00
|
|
|
print('Unrecognized command')
|
2015-02-10 12:24:20 +01:00
|
|
|
parser.print_help()
|
|
|
|
exit(1)
|
|
|
|
getattr(self, args.command)()
|
|
|
|
|
|
|
|
def build(self):
|
2017-05-02 08:10:11 +02:00
|
|
|
ctx = Context()
|
2015-02-10 12:24:20 +01:00
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description="Build the toolchain")
|
|
|
|
parser.add_argument("recipe", nargs="+", help="Recipe to compile")
|
2015-08-20 16:32:47 +02:00
|
|
|
parser.add_argument("--arch", action="append",
|
|
|
|
help="Restrict compilation to this arch")
|
2017-05-02 08:10:11 +02:00
|
|
|
parser.add_argument("--concurrency", type=int, default=ctx.num_cores,
|
|
|
|
help="number of concurrent build processes (where supported)")
|
|
|
|
parser.add_argument("--no-pigz", action="store_true", default=not bool(ctx.use_pigz),
|
|
|
|
help="do not use pigz for gzip decompression")
|
|
|
|
parser.add_argument("--no-pbzip2", action="store_true", default=not bool(ctx.use_pbzip2),
|
|
|
|
help="do not use pbzip2 for bzip2 decompression")
|
2015-02-10 12:24:20 +01:00
|
|
|
args = parser.parse_args(sys.argv[2:])
|
|
|
|
|
2015-02-12 00:53:08 +01:00
|
|
|
if args.arch:
|
2015-08-20 16:32:47 +02:00
|
|
|
if len(args.arch) == 1:
|
|
|
|
archs = args.arch[0].split()
|
|
|
|
else:
|
|
|
|
archs = args.arch
|
|
|
|
available_archs = [arch.arch for arch in ctx.archs]
|
|
|
|
for arch in archs[:]:
|
|
|
|
if arch not in available_archs:
|
|
|
|
print("ERROR: Architecture {} invalid".format(arch))
|
|
|
|
archs.remove(arch)
|
|
|
|
continue
|
2015-02-12 00:53:08 +01:00
|
|
|
ctx.archs = [arch for arch in ctx.archs if arch.arch in archs]
|
|
|
|
print("Architectures restricted to: {}".format(archs))
|
2017-05-02 08:10:11 +02:00
|
|
|
ctx.num_cores = args.concurrency
|
|
|
|
if args.no_pigz:
|
|
|
|
ctx.use_pigz = False
|
|
|
|
if args.no_pbzip2:
|
|
|
|
ctx.use_pbzip2 = False
|
|
|
|
ctx.use_pigz = ctx.use_pbzip2
|
|
|
|
print("Building with {} processes, where supported".format(ctx.num_cores))
|
|
|
|
if ctx.use_pigz:
|
|
|
|
print("Using pigz to decompress gzip data")
|
|
|
|
if ctx.use_pbzip2:
|
|
|
|
print("Using pbzip2 to decompress bzip2 data")
|
2015-02-10 12:24:20 +01:00
|
|
|
build_recipes(args.recipe, ctx)
|
|
|
|
|
|
|
|
def recipes(self):
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description="List all the available recipes")
|
|
|
|
parser.add_argument(
|
|
|
|
"--compact", action="store_true",
|
|
|
|
help="Produce a compact list suitable for scripting")
|
|
|
|
args = parser.parse_args(sys.argv[2:])
|
|
|
|
|
|
|
|
if args.compact:
|
|
|
|
print(" ".join(list(Recipe.list_recipes())))
|
|
|
|
else:
|
|
|
|
ctx = Context()
|
|
|
|
for name in Recipe.list_recipes():
|
2018-03-17 00:11:56 +01:00
|
|
|
try:
|
|
|
|
recipe = Recipe.get_recipe(name, ctx)
|
|
|
|
print("{recipe.name:<12} {recipe.version:<8}".format(recipe=recipe))
|
|
|
|
|
|
|
|
except:
|
|
|
|
pass
|
2015-02-10 12:24:20 +01:00
|
|
|
|
|
|
|
def clean(self):
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description="Clean the build")
|
2015-02-11 12:52:46 +01:00
|
|
|
parser.add_argument("recipe", nargs="*", help="Recipe to clean")
|
2015-02-10 12:24:20 +01:00
|
|
|
args = parser.parse_args(sys.argv[2:])
|
|
|
|
ctx = Context()
|
2015-02-11 12:52:46 +01:00
|
|
|
if args.recipe:
|
|
|
|
for recipe in args.recipe:
|
|
|
|
print("Cleaning {} build".format(recipe))
|
|
|
|
ctx.state.remove_all("{}.".format(recipe))
|
|
|
|
build_dir = join(ctx.build_dir, recipe)
|
|
|
|
if exists(build_dir):
|
|
|
|
shutil.rmtree(build_dir)
|
|
|
|
else:
|
|
|
|
print("Delete build directory")
|
|
|
|
if exists(ctx.build_dir):
|
|
|
|
shutil.rmtree(ctx.build_dir)
|
2015-02-10 12:24:20 +01:00
|
|
|
|
|
|
|
def distclean(self):
|
|
|
|
parser = argparse.ArgumentParser(
|
2016-11-19 06:01:27 +01:00
|
|
|
description="Clean the build, download, and dist")
|
2015-02-10 12:24:20 +01:00
|
|
|
args = parser.parse_args(sys.argv[2:])
|
|
|
|
ctx = Context()
|
|
|
|
if exists(ctx.build_dir):
|
|
|
|
shutil.rmtree(ctx.build_dir)
|
|
|
|
if exists(ctx.dist_dir):
|
|
|
|
shutil.rmtree(ctx.dist_dir)
|
|
|
|
if exists(ctx.cache_dir):
|
|
|
|
shutil.rmtree(ctx.cache_dir)
|
|
|
|
|
|
|
|
def status(self):
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description="Give a status of the build")
|
|
|
|
args = parser.parse_args(sys.argv[2:])
|
|
|
|
ctx = Context()
|
|
|
|
for recipe in Recipe.list_recipes():
|
|
|
|
key = "{}.build_all".format(recipe)
|
|
|
|
keytime = "{}.build_all.at".format(recipe)
|
|
|
|
|
|
|
|
if key in ctx.state:
|
|
|
|
status = "Build OK (built at {})".format(ctx.state[keytime])
|
|
|
|
else:
|
|
|
|
status = "Not built"
|
|
|
|
print("{:<12} - {}".format(
|
|
|
|
recipe, status))
|
|
|
|
|
2015-02-11 12:52:46 +01:00
|
|
|
def create(self):
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description="Create a new xcode project")
|
2015-02-14 13:05:54 +01:00
|
|
|
parser.add_argument("name", help="Name of your project")
|
2016-11-19 06:01:27 +01:00
|
|
|
parser.add_argument("directory", help="Directory where your project lives")
|
2015-02-11 12:52:46 +01:00
|
|
|
args = parser.parse_args(sys.argv[2:])
|
2017-05-30 16:05:54 +02:00
|
|
|
|
2015-02-14 13:05:54 +01:00
|
|
|
from cookiecutter.main import cookiecutter
|
|
|
|
ctx = Context()
|
2018-11-09 18:48:08 +01:00
|
|
|
ensure_recipes_loaded(ctx)
|
2018-11-16 10:52:02 +01:00
|
|
|
|
|
|
|
if not hasattr(ctx, "python_ver"):
|
|
|
|
print("ERROR: No python recipes compiled!")
|
|
|
|
print("ERROR: You must have compiled at least python2 or")
|
|
|
|
print("ERROR: python3 recipes to be able to create a project.")
|
|
|
|
sys.exit(1)
|
|
|
|
|
2015-02-14 13:05:54 +01:00
|
|
|
template_dir = join(curdir, "tools", "templates")
|
|
|
|
context = {
|
|
|
|
"title": args.name,
|
|
|
|
"project_name": args.name.lower(),
|
|
|
|
"domain_name": "org.kivy.{}".format(args.name.lower()),
|
2016-11-20 09:50:19 +01:00
|
|
|
"kivy_dir": dirname(realpath(__file__)),
|
2015-02-14 13:05:54 +01:00
|
|
|
"project_dir": realpath(args.directory),
|
|
|
|
"version": "1.0.0",
|
|
|
|
"dist_dir": ctx.dist_dir,
|
2018-11-09 18:48:08 +01:00
|
|
|
"python_version": ctx.python_ver,
|
|
|
|
"python_major": ctx.python_major
|
2015-02-14 13:05:54 +01:00
|
|
|
}
|
|
|
|
cookiecutter(template_dir, no_input=True, extra_context=context)
|
2015-02-23 11:34:36 +01:00
|
|
|
filename = join(
|
|
|
|
getcwd(),
|
|
|
|
"{}-ios".format(args.name.lower()),
|
|
|
|
"{}.xcodeproj".format(args.name.lower()),
|
|
|
|
"project.pbxproj")
|
|
|
|
update_pbxproj(filename)
|
|
|
|
print("--")
|
|
|
|
print("Project directory : {}-ios".format(
|
|
|
|
args.name.lower()))
|
|
|
|
print("XCode project : {0}-ios/{0}.xcodeproj".format(
|
|
|
|
args.name.lower()))
|
|
|
|
|
|
|
|
def update(self):
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description="Update an existing xcode project")
|
|
|
|
parser.add_argument("filename", help="Path to your project or xcodeproj")
|
|
|
|
args = parser.parse_args(sys.argv[2:])
|
|
|
|
|
|
|
|
filename = args.filename
|
|
|
|
if not filename.endswith(".xcodeproj"):
|
|
|
|
# try to find the xcodeproj
|
|
|
|
from glob import glob
|
|
|
|
xcodeproj = glob(join(filename, "*.xcodeproj"))
|
|
|
|
if not xcodeproj:
|
|
|
|
print("ERROR: Unable to find a xcodeproj in {}".format(filename))
|
|
|
|
sys.exit(1)
|
|
|
|
filename = xcodeproj[0]
|
|
|
|
|
|
|
|
filename = join(filename, "project.pbxproj")
|
|
|
|
if not exists(filename):
|
|
|
|
print("ERROR: {} not found".format(filename))
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
update_pbxproj(filename)
|
|
|
|
print("--")
|
|
|
|
print("Project {} updated".format(filename))
|
|
|
|
|
2016-04-19 19:53:55 +02:00
|
|
|
def pip(self):
|
|
|
|
ctx = Context()
|
|
|
|
for recipe in Recipe.list_recipes():
|
|
|
|
key = "{}.build_all".format(recipe)
|
|
|
|
if key not in ctx.state:
|
|
|
|
continue
|
|
|
|
recipe = Recipe.get_recipe(recipe, ctx)
|
|
|
|
recipe.init_with_ctx(ctx)
|
|
|
|
if not hasattr(ctx, "site_packages_dir"):
|
|
|
|
print("ERROR: python must be compiled before using pip")
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
pip_env = {
|
|
|
|
"CC": "/bin/false",
|
|
|
|
"CXX": "/bin/false",
|
|
|
|
"PYTHONPATH": ctx.site_packages_dir,
|
|
|
|
"PYTHONOPTIMIZE": "2",
|
2017-05-30 16:05:54 +02:00
|
|
|
# "PIP_INSTALL_TARGET": ctx.site_packages_dir
|
2016-04-19 19:53:55 +02:00
|
|
|
}
|
2017-05-30 16:05:54 +02:00
|
|
|
pip_path = sh.which("pip2")
|
|
|
|
pip_args = []
|
|
|
|
if len(sys.argv) > 2 and sys.argv[2] == "install":
|
|
|
|
pip_args = ["--isolated", "--prefix", ctx.python_prefix]
|
|
|
|
args = [pip_path] + [sys.argv[2]] + pip_args + sys.argv[3:]
|
|
|
|
else:
|
|
|
|
args = [pip_path] + pip_args + sys.argv[2:]
|
|
|
|
|
2016-04-19 19:53:55 +02:00
|
|
|
if not pip_path:
|
|
|
|
print("ERROR: pip not found")
|
|
|
|
sys.exit(1)
|
|
|
|
import os
|
2017-05-30 16:05:54 +02:00
|
|
|
print("-- execute pip with: {}".format(args))
|
2016-04-19 19:53:55 +02:00
|
|
|
os.execve(pip_path, args, pip_env)
|
|
|
|
|
2015-08-14 02:32:03 +02:00
|
|
|
def launchimage(self):
|
2015-08-14 02:51:48 +02:00
|
|
|
import xcassets
|
|
|
|
self._xcassets("LaunchImage", xcassets.launchimage)
|
|
|
|
|
|
|
|
def icon(self):
|
|
|
|
import xcassets
|
|
|
|
self._xcassets("Icon", xcassets.icon)
|
|
|
|
|
2016-04-19 19:53:34 +02:00
|
|
|
def xcode(self):
|
|
|
|
parser = argparse.ArgumentParser(description="Open the xcode project")
|
|
|
|
parser.add_argument("filename", help="Path to your project or xcodeproj")
|
|
|
|
args = parser.parse_args(sys.argv[2:])
|
|
|
|
filename = args.filename
|
|
|
|
if not filename.endswith(".xcodeproj"):
|
|
|
|
# try to find the xcodeproj
|
|
|
|
from glob import glob
|
|
|
|
xcodeproj = glob(join(filename, "*.xcodeproj"))
|
|
|
|
if not xcodeproj:
|
|
|
|
print("ERROR: Unable to find a xcodeproj in {}".format(filename))
|
|
|
|
sys.exit(1)
|
|
|
|
filename = xcodeproj[0]
|
|
|
|
sh.open(filename)
|
|
|
|
|
2015-08-14 02:51:48 +02:00
|
|
|
def _xcassets(self, title, command):
|
2015-08-14 02:32:03 +02:00
|
|
|
parser = argparse.ArgumentParser(
|
2015-08-14 02:51:48 +02:00
|
|
|
description="Generate {} for your project".format(title))
|
2015-08-14 02:32:03 +02:00
|
|
|
parser.add_argument("filename", help="Path to your project or xcodeproj")
|
2015-08-14 02:51:48 +02:00
|
|
|
parser.add_argument("image", help="Path to your initial {}.png".format(title.lower()))
|
2015-08-14 02:32:03 +02:00
|
|
|
args = parser.parse_args(sys.argv[2:])
|
|
|
|
|
|
|
|
if not exists(args.image):
|
|
|
|
print("ERROR: image path does not exists.")
|
|
|
|
return
|
|
|
|
|
|
|
|
filename = args.filename
|
|
|
|
if not filename.endswith(".xcodeproj"):
|
|
|
|
# try to find the xcodeproj
|
|
|
|
from glob import glob
|
|
|
|
xcodeproj = glob(join(filename, "*.xcodeproj"))
|
|
|
|
if not xcodeproj:
|
|
|
|
print("ERROR: Unable to find a xcodeproj in {}".format(filename))
|
|
|
|
sys.exit(1)
|
|
|
|
filename = xcodeproj[0]
|
|
|
|
|
|
|
|
project_name = filename.split("/")[-1].replace(".xcodeproj", "")
|
|
|
|
images_xcassets = realpath(join(filename, "..", project_name,
|
|
|
|
"Images.xcassets"))
|
|
|
|
if not exists(images_xcassets):
|
|
|
|
print("WARNING: Images.xcassets not found, creating it.")
|
|
|
|
makedirs(images_xcassets)
|
|
|
|
print("Images.xcassets located at {}".format(images_xcassets))
|
|
|
|
|
2015-08-14 02:51:48 +02:00
|
|
|
command(images_xcassets, args.image)
|
2015-02-11 12:52:46 +01:00
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
ToolchainCL()
|