2015-02-10 12:24:20 +01:00
|
|
|
#!/usr/bin/env python
|
2015-02-01 18:35:28 +01:00
|
|
|
"""
|
|
|
|
Tool for compiling iOS toolchain
|
|
|
|
================================
|
|
|
|
|
|
|
|
This tool intend to replace all the previous tools/ in shell script.
|
|
|
|
"""
|
|
|
|
|
|
|
|
import sys
|
|
|
|
from sys import stdout
|
|
|
|
from os.path import join, dirname, realpath, exists, isdir, basename
|
2015-02-23 22:51:16 +01:00
|
|
|
from os import listdir, unlink, makedirs, environ, chdir, getcwd, walk
|
2015-02-01 18:35:28 +01:00
|
|
|
import zipfile
|
|
|
|
import tarfile
|
|
|
|
import importlib
|
2015-02-10 12:24:20 +01:00
|
|
|
import io
|
|
|
|
import json
|
2015-02-01 18:35:28 +01:00
|
|
|
import shutil
|
2015-02-23 22:51:16 +01:00
|
|
|
import fnmatch
|
2015-02-10 12:24:20 +01:00
|
|
|
from datetime import datetime
|
2015-02-01 18:35:28 +01:00
|
|
|
try:
|
|
|
|
from urllib.request import FancyURLopener
|
|
|
|
except ImportError:
|
|
|
|
from urllib import FancyURLopener
|
|
|
|
|
2015-02-14 13:05:54 +01:00
|
|
|
curdir = dirname(__file__)
|
|
|
|
sys.path.insert(0, join(curdir, "tools", "external"))
|
|
|
|
|
|
|
|
import sh
|
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
IS_PY3 = sys.version_info[0] >= 3
|
|
|
|
|
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
def shprint(command, *args, **kwargs):
|
|
|
|
kwargs["_iter"] = True
|
|
|
|
kwargs["_out_bufsize"] = 1
|
2015-02-02 05:22:13 +01:00
|
|
|
kwargs["_err_to_out"] = True
|
2015-02-01 18:35:28 +01:00
|
|
|
for line in command(*args, **kwargs):
|
|
|
|
stdout.write(line)
|
|
|
|
|
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
def cache_execution(f):
|
|
|
|
def _cache_execution(self, *args, **kwargs):
|
|
|
|
state = self.ctx.state
|
|
|
|
key = "{}.{}".format(self.name, f.__name__)
|
2015-02-12 00:53:08 +01:00
|
|
|
force = kwargs.pop("force", False)
|
2015-02-11 12:52:46 +01:00
|
|
|
if args:
|
|
|
|
for arg in args:
|
|
|
|
key += ".{}".format(arg)
|
2015-02-10 12:24:20 +01:00
|
|
|
key_time = "{}.at".format(key)
|
2015-02-12 00:53:08 +01:00
|
|
|
if key in state and not force:
|
2015-02-10 12:24:20 +01:00
|
|
|
print("# (ignored) {} {}".format(f.__name__.capitalize(), self.name))
|
|
|
|
return
|
|
|
|
print("{} {}".format(f.__name__.capitalize(), self.name))
|
|
|
|
f(self, *args, **kwargs)
|
|
|
|
state[key] = True
|
|
|
|
state[key_time] = str(datetime.utcnow())
|
|
|
|
return _cache_execution
|
|
|
|
|
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
class ChromeDownloader(FancyURLopener):
|
|
|
|
version = (
|
|
|
|
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 '
|
|
|
|
'(KHTML, like Gecko) Chrome/28.0.1500.71 Safari/537.36')
|
|
|
|
|
|
|
|
urlretrieve = ChromeDownloader().retrieve
|
|
|
|
|
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
class JsonStore(object):
|
|
|
|
"""Replacement of shelve using json, needed for support python 2 and 3.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, filename):
|
|
|
|
super(JsonStore, self).__init__()
|
|
|
|
self.filename = filename
|
|
|
|
self.data = {}
|
|
|
|
if exists(filename):
|
|
|
|
try:
|
|
|
|
with io.open(filename, encoding='utf-8') as fd:
|
|
|
|
self.data = json.load(fd)
|
|
|
|
except ValueError:
|
|
|
|
print("Unable to read the state.db, content will be replaced.")
|
|
|
|
|
|
|
|
def __getitem__(self, key):
|
|
|
|
return self.data[key]
|
|
|
|
|
|
|
|
def __setitem__(self, key, value):
|
|
|
|
self.data[key] = value
|
|
|
|
self.sync()
|
|
|
|
|
|
|
|
def __delitem__(self, key):
|
|
|
|
del self.data[key]
|
|
|
|
self.sync()
|
|
|
|
|
|
|
|
def __contains__(self, item):
|
|
|
|
return item in self.data
|
|
|
|
|
|
|
|
def get(self, item, default=None):
|
|
|
|
return self.data.get(item, default)
|
|
|
|
|
|
|
|
def keys(self):
|
|
|
|
return self.data.keys()
|
|
|
|
|
2015-02-11 12:52:46 +01:00
|
|
|
def remove_all(self, prefix):
|
|
|
|
for key in self.data.keys()[:]:
|
|
|
|
if not key.startswith(prefix):
|
|
|
|
continue
|
|
|
|
del self.data[key]
|
|
|
|
self.sync()
|
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
def sync(self):
|
|
|
|
# http://stackoverflow.com/questions/12309269/write-json-data-to-file-in-python/14870531#14870531
|
|
|
|
if IS_PY3:
|
|
|
|
with open(self.filename, 'w') as fd:
|
|
|
|
json.dump(self.data, fd, ensure_ascii=False)
|
|
|
|
else:
|
|
|
|
with io.open(self.filename, 'w', encoding='utf-8') as fd:
|
|
|
|
fd.write(unicode(json.dumps(self.data, ensure_ascii=False)))
|
|
|
|
|
2015-02-03 00:19:53 +01:00
|
|
|
class Arch(object):
|
2015-02-03 18:42:17 +01:00
|
|
|
def __init__(self, ctx):
|
|
|
|
super(Arch, self).__init__()
|
|
|
|
self.ctx = ctx
|
|
|
|
|
2015-02-11 12:52:46 +01:00
|
|
|
def __str__(self):
|
|
|
|
return self.arch
|
|
|
|
|
2015-02-09 00:45:28 +01:00
|
|
|
@property
|
|
|
|
def include_dirs(self):
|
|
|
|
return [
|
|
|
|
"{}/{}".format(
|
|
|
|
self.ctx.include_dir,
|
|
|
|
d.format(arch=self))
|
|
|
|
for d in self.ctx.include_dirs]
|
|
|
|
|
|
|
|
|
2015-02-03 18:42:17 +01:00
|
|
|
def get_env(self):
|
2015-02-06 01:53:21 +01:00
|
|
|
include_dirs = [
|
|
|
|
"-I{}/{}".format(
|
|
|
|
self.ctx.include_dir,
|
|
|
|
d.format(arch=self))
|
|
|
|
for d in self.ctx.include_dirs]
|
|
|
|
|
2015-02-03 18:42:17 +01:00
|
|
|
env = {}
|
|
|
|
env["CC"] = sh.xcrun("-find", "-sdk", self.sdk, "clang").strip()
|
|
|
|
env["AR"] = sh.xcrun("-find", "-sdk", self.sdk, "ar").strip()
|
|
|
|
env["LD"] = sh.xcrun("-find", "-sdk", self.sdk, "ld").strip()
|
2015-02-06 01:53:21 +01:00
|
|
|
env["OTHER_CFLAGS"] = " ".join(include_dirs)
|
|
|
|
env["OTHER_LDFLAGS"] = " ".join([
|
|
|
|
"-L{}/{}".format(self.ctx.dist_dir, "lib"),
|
|
|
|
])
|
2015-02-03 18:42:17 +01:00
|
|
|
env["CFLAGS"] = " ".join([
|
|
|
|
"-arch", self.arch,
|
|
|
|
"-pipe", "-no-cpp-precomp",
|
2015-02-09 23:34:02 +01:00
|
|
|
"--sysroot", self.sysroot,
|
2015-02-06 01:53:21 +01:00
|
|
|
#"-I{}/common".format(self.ctx.include_dir),
|
|
|
|
#"-I{}/{}".format(self.ctx.include_dir, self.arch),
|
2015-02-03 18:42:17 +01:00
|
|
|
"-O3",
|
|
|
|
self.version_min
|
2015-02-06 01:53:21 +01:00
|
|
|
] + include_dirs)
|
2015-02-03 18:42:17 +01:00
|
|
|
env["LDFLAGS"] = " ".join([
|
|
|
|
"-arch", self.arch,
|
2015-02-09 23:34:02 +01:00
|
|
|
"--sysroot", self.sysroot,
|
2015-02-03 18:42:17 +01:00
|
|
|
"-L{}/{}".format(self.ctx.dist_dir, "lib"),
|
|
|
|
"-lsqlite3",
|
|
|
|
"-undefined", "dynamic_lookup",
|
|
|
|
self.version_min
|
|
|
|
])
|
|
|
|
return env
|
|
|
|
|
2015-02-03 00:19:53 +01:00
|
|
|
|
|
|
|
|
|
|
|
class ArchSimulator(Arch):
|
|
|
|
sdk = "iphonesimulator"
|
|
|
|
arch = "i386"
|
|
|
|
triple = "i386-apple-darwin11"
|
2015-02-03 18:42:17 +01:00
|
|
|
version_min = "-miphoneos-version-min=6.0.0"
|
2015-02-03 00:19:53 +01:00
|
|
|
sysroot = sh.xcrun("--sdk", "iphonesimulator", "--show-sdk-path").strip()
|
|
|
|
|
|
|
|
|
|
|
|
class Arch64Simulator(Arch):
|
|
|
|
sdk = "iphonesimulator"
|
|
|
|
arch = "x86_64"
|
|
|
|
triple = "x86_64-apple-darwin13"
|
|
|
|
version_min = "-miphoneos-version-min=7.0"
|
|
|
|
sysroot = sh.xcrun("--sdk", "iphonesimulator", "--show-sdk-path").strip()
|
|
|
|
|
|
|
|
|
|
|
|
class ArchIOS(Arch):
|
|
|
|
sdk = "iphoneos"
|
|
|
|
arch = "armv7"
|
|
|
|
triple = "arm-apple-darwin11"
|
2015-02-03 18:42:17 +01:00
|
|
|
version_min = "-miphoneos-version-min=6.0.0"
|
2015-02-03 00:19:53 +01:00
|
|
|
sysroot = sh.xcrun("--sdk", "iphoneos", "--show-sdk-path").strip()
|
|
|
|
|
|
|
|
|
|
|
|
class Arch64IOS(Arch):
|
|
|
|
sdk = "iphoneos"
|
|
|
|
arch = "arm64"
|
|
|
|
triple = "aarch64-apple-darwin13"
|
|
|
|
version_min = "-miphoneos-version-min=7.0"
|
|
|
|
sysroot = sh.xcrun("--sdk", "iphoneos", "--show-sdk-path").strip()
|
|
|
|
|
|
|
|
|
|
|
|
class Graph(object):
|
|
|
|
# Taken from python-for-android/depsort
|
|
|
|
def __init__(self):
|
|
|
|
# `graph`: dict that maps each package to a set of its dependencies.
|
|
|
|
self.graph = {}
|
|
|
|
|
|
|
|
def add(self, dependent, dependency):
|
|
|
|
"""Add a dependency relationship to the graph"""
|
|
|
|
self.graph.setdefault(dependent, set())
|
|
|
|
self.graph.setdefault(dependency, set())
|
|
|
|
if dependent != dependency:
|
|
|
|
self.graph[dependent].add(dependency)
|
|
|
|
|
|
|
|
def add_optional(self, dependent, dependency):
|
|
|
|
"""Add an optional (ordering only) dependency relationship to the graph
|
|
|
|
|
|
|
|
Only call this after all mandatory requirements are added
|
|
|
|
"""
|
|
|
|
if dependent in self.graph and dependency in self.graph:
|
|
|
|
self.add(dependent, dependency)
|
|
|
|
|
|
|
|
def find_order(self):
|
|
|
|
"""Do a topological sort on a dependency graph
|
|
|
|
|
|
|
|
:Parameters:
|
|
|
|
:Returns:
|
|
|
|
iterator, sorted items form first to last
|
|
|
|
"""
|
|
|
|
graph = dict((k, set(v)) for k, v in self.graph.items())
|
|
|
|
while graph:
|
|
|
|
# Find all items without a parent
|
|
|
|
leftmost = [l for l, s in graph.items() if not s]
|
|
|
|
if not leftmost:
|
|
|
|
raise ValueError('Dependency cycle detected! %s' % graph)
|
|
|
|
# If there is more than one, sort them for predictable order
|
|
|
|
leftmost.sort()
|
|
|
|
for result in leftmost:
|
|
|
|
# Yield and remove them from the graph
|
|
|
|
yield result
|
|
|
|
graph.pop(result)
|
|
|
|
for bset in graph.values():
|
|
|
|
bset.discard(result)
|
|
|
|
|
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
class Context(object):
|
|
|
|
env = environ.copy()
|
|
|
|
root_dir = None
|
|
|
|
cache_dir = None
|
|
|
|
build_dir = None
|
|
|
|
dist_dir = None
|
|
|
|
install_dir = None
|
|
|
|
ccache = None
|
|
|
|
cython = None
|
|
|
|
sdkver = None
|
|
|
|
sdksimver = None
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
super(Context, self).__init__()
|
2015-02-06 01:53:21 +01:00
|
|
|
self.include_dirs = []
|
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
ok = True
|
|
|
|
|
|
|
|
sdks = sh.xcodebuild("-showsdks").splitlines()
|
|
|
|
|
|
|
|
# get the latest iphoneos
|
|
|
|
iphoneos = [x for x in sdks if "iphoneos" in x]
|
|
|
|
if not iphoneos:
|
|
|
|
print("No iphone SDK installed")
|
|
|
|
ok = False
|
|
|
|
else:
|
2015-02-02 05:22:13 +01:00
|
|
|
iphoneos = iphoneos[0].split()[-1].replace("iphoneos", "")
|
2015-02-01 18:35:28 +01:00
|
|
|
self.sdkver = iphoneos
|
|
|
|
|
|
|
|
# get the latest iphonesimulator version
|
|
|
|
iphonesim = [x for x in sdks if "iphonesimulator" in x]
|
2015-02-11 16:28:18 +01:00
|
|
|
if not iphonesim:
|
2015-02-01 18:35:28 +01:00
|
|
|
ok = False
|
|
|
|
print("Error: No iphonesimulator SDK installed")
|
|
|
|
else:
|
2015-02-02 05:22:13 +01:00
|
|
|
iphonesim = iphonesim[0].split()[-1].replace("iphonesimulator", "")
|
2015-02-01 18:35:28 +01:00
|
|
|
self.sdksimver = iphonesim
|
|
|
|
|
|
|
|
# get the path for Developer
|
|
|
|
self.devroot = "{}/Platforms/iPhoneOS.platform/Developer".format(
|
|
|
|
sh.xcode_select("-print-path").strip())
|
|
|
|
|
|
|
|
# path to the iOS SDK
|
2015-02-02 05:22:13 +01:00
|
|
|
self.iossdkroot = "{}/SDKs/iPhoneOS{}.sdk".format(
|
2015-02-01 18:35:28 +01:00
|
|
|
self.devroot, self.sdkver)
|
|
|
|
|
|
|
|
# root of the toolchain
|
|
|
|
self.root_dir = realpath(dirname(__file__))
|
|
|
|
self.build_dir = "{}/build".format(self.root_dir)
|
|
|
|
self.cache_dir = "{}/.cache".format(self.root_dir)
|
|
|
|
self.dist_dir = "{}/dist".format(self.root_dir)
|
|
|
|
self.install_dir = "{}/dist/root".format(self.root_dir)
|
2015-02-04 16:11:53 +01:00
|
|
|
self.include_dir = "{}/dist/include".format(self.root_dir)
|
2015-02-03 18:42:17 +01:00
|
|
|
self.archs = (
|
|
|
|
ArchSimulator(self),
|
|
|
|
Arch64Simulator(self),
|
|
|
|
ArchIOS(self),
|
|
|
|
Arch64IOS(self))
|
2015-02-01 18:35:28 +01:00
|
|
|
|
|
|
|
# path to some tools
|
|
|
|
self.ccache = sh.which("ccache")
|
|
|
|
if not self.ccache:
|
2015-02-10 12:24:20 +01:00
|
|
|
#print("ccache is missing, the build will not be optimized in the future.")
|
|
|
|
pass
|
2015-02-01 18:35:28 +01:00
|
|
|
for cython_fn in ("cython-2.7", "cython"):
|
|
|
|
cython = sh.which(cython_fn)
|
|
|
|
if cython:
|
|
|
|
self.cython = cython
|
|
|
|
break
|
|
|
|
if not self.cython:
|
|
|
|
ok = False
|
|
|
|
print("Missing requirement: cython is not installed")
|
|
|
|
|
|
|
|
# check the basic tools
|
|
|
|
for tool in ("pkg-config", "autoconf", "automake", "libtool", "hg"):
|
|
|
|
if not sh.which(tool):
|
|
|
|
print("Missing requirement: {} is not installed".format(
|
|
|
|
tool))
|
|
|
|
|
|
|
|
if not ok:
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
ensure_dir(self.root_dir)
|
|
|
|
ensure_dir(self.build_dir)
|
|
|
|
ensure_dir(self.cache_dir)
|
|
|
|
ensure_dir(self.dist_dir)
|
|
|
|
ensure_dir(self.install_dir)
|
2015-02-04 16:11:53 +01:00
|
|
|
ensure_dir(self.include_dir)
|
|
|
|
ensure_dir(join(self.include_dir, "common"))
|
2015-02-01 18:35:28 +01:00
|
|
|
|
2015-02-02 05:22:13 +01:00
|
|
|
# remove the most obvious flags that can break the compilation
|
|
|
|
self.env.pop("MACOSX_DEPLOYMENT_TARGET", None)
|
|
|
|
self.env.pop("PYTHONDONTWRITEBYTECODE", None)
|
|
|
|
self.env.pop("ARCHFLAGS", None)
|
|
|
|
self.env.pop("CFLAGS", None)
|
|
|
|
self.env.pop("LDFLAGS", None)
|
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
# set the state
|
|
|
|
self.state = JsonStore(join(self.dist_dir, "state.db"))
|
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
|
|
|
|
class Recipe(object):
|
|
|
|
version = None
|
|
|
|
url = None
|
2015-02-03 00:19:53 +01:00
|
|
|
archs = []
|
2015-02-02 05:22:13 +01:00
|
|
|
depends = []
|
2015-02-03 18:42:17 +01:00
|
|
|
library = None
|
2015-02-23 11:34:36 +01:00
|
|
|
libraries = []
|
2015-02-06 01:53:21 +01:00
|
|
|
include_dir = None
|
|
|
|
include_per_arch = False
|
2015-02-23 11:34:36 +01:00
|
|
|
pbx_frameworks = []
|
|
|
|
pbx_libraries = []
|
2015-02-01 18:35:28 +01:00
|
|
|
|
|
|
|
# API available for recipes
|
|
|
|
def download_file(self, url, filename, cwd=None):
|
|
|
|
"""
|
|
|
|
Download an `url` to `outfn`
|
|
|
|
"""
|
|
|
|
def report_hook(index, blksize, size):
|
|
|
|
if size <= 0:
|
|
|
|
progression = '{0} bytes'.format(index * blksize)
|
|
|
|
else:
|
|
|
|
progression = '{0:.2f}%'.format(
|
|
|
|
index * blksize * 100. / float(size))
|
|
|
|
stdout.write('- Download {}\r'.format(progression))
|
|
|
|
stdout.flush()
|
|
|
|
|
|
|
|
if cwd:
|
|
|
|
filename = join(cwd, filename)
|
|
|
|
if exists(filename):
|
|
|
|
unlink(filename)
|
|
|
|
|
|
|
|
print('Downloading {0}'.format(url))
|
|
|
|
urlretrieve(url, filename, report_hook)
|
|
|
|
return filename
|
|
|
|
|
|
|
|
def extract_file(self, filename, cwd):
|
|
|
|
"""
|
|
|
|
Extract the `filename` into the directory `cwd`.
|
|
|
|
"""
|
|
|
|
print("Extract {} into {}".format(filename, cwd))
|
|
|
|
if filename.endswith(".tgz") or filename.endswith(".tar.gz"):
|
|
|
|
shprint(sh.tar, "-C", cwd, "-xvzf", filename)
|
|
|
|
|
|
|
|
elif filename.endswith(".tbz2") or filename.endswith(".tar.bz2"):
|
|
|
|
shprint(sh.tar, "-C", cwd, "-xvjf", filename)
|
|
|
|
|
|
|
|
elif filename.endswith(".zip"):
|
|
|
|
zf = zipfile.ZipFile(filename)
|
|
|
|
zf.extractall(path=cwd)
|
|
|
|
zf.close()
|
|
|
|
|
|
|
|
else:
|
|
|
|
print("Error: cannot extract, unreconized extension for {}".format(
|
|
|
|
filename))
|
|
|
|
raise Exception()
|
|
|
|
|
|
|
|
def get_archive_rootdir(self, filename):
|
|
|
|
if filename.endswith(".tgz") or filename.endswith(".tar.gz") or \
|
|
|
|
filename.endswith(".tbz2") or filename.endswith(".tar.bz2"):
|
|
|
|
archive = tarfile.open(filename)
|
|
|
|
root = archive.next().path.split("/")
|
|
|
|
return root[0]
|
2015-02-09 11:58:29 +01:00
|
|
|
elif filename.endswith(".zip"):
|
|
|
|
with zipfile.ZipFile(filename) as zf:
|
|
|
|
return dirname(zf.namelist()[0])
|
2015-02-01 18:35:28 +01:00
|
|
|
else:
|
2015-02-09 11:58:29 +01:00
|
|
|
print("Error: cannot detect root directory")
|
2015-02-01 18:35:28 +01:00
|
|
|
print("Unrecognized extension for {}".format(filename))
|
|
|
|
raise Exception()
|
|
|
|
|
|
|
|
def apply_patch(self, filename):
|
|
|
|
"""
|
|
|
|
Apply a patch from the current recipe directory into the current
|
|
|
|
build directory.
|
|
|
|
"""
|
|
|
|
print("Apply patch {}".format(filename))
|
|
|
|
filename = join(self.recipe_dir, filename)
|
|
|
|
sh.patch("-t", "-d", self.build_dir, "-p1", "-i", filename)
|
|
|
|
|
|
|
|
def copy_file(self, filename, dest):
|
2015-02-02 05:22:13 +01:00
|
|
|
print("Copy {} to {}".format(filename, dest))
|
2015-02-01 18:35:28 +01:00
|
|
|
filename = join(self.recipe_dir, filename)
|
|
|
|
dest = join(self.build_dir, dest)
|
|
|
|
shutil.copy(filename, dest)
|
|
|
|
|
2015-02-02 05:22:13 +01:00
|
|
|
def append_file(self, filename, dest):
|
|
|
|
print("Append {} to {}".format(filename, dest))
|
|
|
|
filename = join(self.recipe_dir, filename)
|
|
|
|
dest = join(self.build_dir, dest)
|
|
|
|
with open(filename, "rb") as fd:
|
|
|
|
data = fd.read()
|
|
|
|
with open(dest, "ab") as fd:
|
|
|
|
fd.write(data)
|
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
def has_marker(self, marker):
|
|
|
|
"""
|
|
|
|
Return True if the current build directory has the marker set
|
|
|
|
"""
|
|
|
|
return exists(join(self.build_dir, ".{}".format(marker)))
|
|
|
|
|
|
|
|
def set_marker(self, marker):
|
|
|
|
"""
|
|
|
|
Set a marker info the current build directory
|
|
|
|
"""
|
|
|
|
with open(join(self.build_dir, ".{}".format(marker)), "w") as fd:
|
|
|
|
fd.write("ok")
|
|
|
|
|
2015-02-02 05:22:13 +01:00
|
|
|
def delete_marker(self, marker):
|
2015-02-01 18:35:28 +01:00
|
|
|
"""
|
2015-02-02 05:22:13 +01:00
|
|
|
Delete a specific marker
|
2015-02-01 18:35:28 +01:00
|
|
|
"""
|
2015-02-02 05:22:13 +01:00
|
|
|
try:
|
|
|
|
unlink(join(self.build_dir, ".{}".format(marker)))
|
|
|
|
except:
|
|
|
|
pass
|
2015-02-01 18:35:28 +01:00
|
|
|
|
2015-02-06 01:53:21 +01:00
|
|
|
def get_include_dir(self):
|
|
|
|
"""
|
|
|
|
Return the common include dir for this recipe
|
|
|
|
"""
|
|
|
|
return join(self.ctx.include_dir, "common", self.name)
|
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
@property
|
|
|
|
def name(self):
|
|
|
|
modname = self.__class__.__module__
|
|
|
|
return modname.split(".", 1)[-1]
|
|
|
|
|
|
|
|
@property
|
|
|
|
def archive_fn(self):
|
2015-02-03 00:19:53 +01:00
|
|
|
bfn = basename(self.url.format(version=self.version))
|
|
|
|
fn = "{}/{}-{}".format(
|
2015-02-01 18:35:28 +01:00
|
|
|
self.ctx.cache_dir,
|
2015-02-03 00:19:53 +01:00
|
|
|
self.name, bfn)
|
2015-02-01 18:35:28 +01:00
|
|
|
return fn
|
|
|
|
|
2015-02-03 00:19:53 +01:00
|
|
|
@property
|
|
|
|
def filtered_archs(self):
|
|
|
|
for arch in self.ctx.archs:
|
|
|
|
if not self.archs or (arch.arch in self.archs):
|
|
|
|
yield arch
|
|
|
|
|
2015-02-23 11:34:36 +01:00
|
|
|
@property
|
|
|
|
def dist_libraries(self):
|
|
|
|
libraries = []
|
|
|
|
name = self.name
|
|
|
|
if not name.startswith("lib"):
|
|
|
|
name = "lib{}".format(name)
|
|
|
|
if self.library:
|
|
|
|
static_fn = join(self.ctx.dist_dir, "lib", "{}.a".format(name))
|
|
|
|
libraries.append(static_fn)
|
|
|
|
for library in self.libraries:
|
|
|
|
static_fn = join(self.ctx.dist_dir, "lib", basename(library))
|
|
|
|
libraries.append(static_fn)
|
|
|
|
return libraries
|
|
|
|
|
2015-02-03 00:19:53 +01:00
|
|
|
def get_build_dir(self, arch):
|
2015-02-03 18:42:17 +01:00
|
|
|
return join(self.ctx.build_dir, self.name, arch, self.archive_root)
|
2015-02-03 00:19:53 +01:00
|
|
|
|
2015-02-23 22:51:16 +01:00
|
|
|
def cythonize(self, filename):
|
|
|
|
if filename.startswith(self.build_dir):
|
|
|
|
filename = filename[len(self.build_dir) + 1:]
|
|
|
|
print("Cythonize {}".format(filename))
|
|
|
|
cmd = sh.Command(join(self.ctx.root_dir, "tools", "cythonize.py"))
|
|
|
|
shprint(cmd, filename)
|
|
|
|
|
|
|
|
def cythonize_build(self):
|
|
|
|
root_dir = self.build_dir
|
|
|
|
for root, dirnames, filenames in walk(root_dir):
|
|
|
|
for filename in fnmatch.filter(filenames, "*.pyx"):
|
|
|
|
self.cythonize(join(root, filename))
|
|
|
|
|
|
|
|
def biglink(self):
|
|
|
|
dirs = []
|
|
|
|
for root, dirnames, filenames in walk(self.build_dir):
|
|
|
|
if fnmatch.filter(filenames, "*.so.libs"):
|
|
|
|
dirs.append(root)
|
|
|
|
cmd = sh.Command(join(self.ctx.root_dir, "tools", "biglink"))
|
|
|
|
shprint(cmd, join(self.build_dir, "lib{}.a".format(self.name)), *dirs)
|
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
# Public Recipe API to be subclassed if needed
|
|
|
|
|
2015-02-02 05:22:13 +01:00
|
|
|
def init_with_ctx(self, ctx):
|
|
|
|
self.ctx = ctx
|
2015-02-06 01:53:21 +01:00
|
|
|
include_dir = None
|
|
|
|
if self.include_dir:
|
|
|
|
if self.include_per_arch:
|
|
|
|
include_dir = join("{arch.arch}", self.name)
|
|
|
|
else:
|
|
|
|
include_dir = join("common", self.name)
|
|
|
|
if include_dir:
|
2015-02-23 11:34:36 +01:00
|
|
|
#print("Include dir added: {}".format(include_dir))
|
2015-02-06 01:53:21 +01:00
|
|
|
self.ctx.include_dirs.append(include_dir)
|
2015-02-02 05:22:13 +01:00
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
@property
|
|
|
|
def archive_root(self):
|
|
|
|
key = "{}.archive_root".format(self.name)
|
|
|
|
value = self.ctx.state.get(key)
|
|
|
|
if not key:
|
|
|
|
value = self.get_archive_rootdir(self.archive_fn)
|
|
|
|
self.ctx.state[key] = value
|
|
|
|
return value
|
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
def execute(self):
|
2015-02-12 00:53:08 +01:00
|
|
|
if self.custom_dir:
|
|
|
|
self.ctx.state.remove_all(self.name)
|
2015-02-01 18:35:28 +01:00
|
|
|
self.download()
|
|
|
|
self.extract()
|
|
|
|
self.build_all()
|
|
|
|
|
2015-02-12 00:53:08 +01:00
|
|
|
@property
|
|
|
|
def custom_dir(self):
|
|
|
|
"""Check if there is a variable name to specify a custom version /
|
|
|
|
directory to use instead of the current url.
|
|
|
|
"""
|
|
|
|
d = environ.get("{}_DIR".format(self.name.upper()))
|
|
|
|
if not d:
|
|
|
|
return
|
|
|
|
if not exists(d):
|
|
|
|
return
|
|
|
|
return d
|
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
@cache_execution
|
2015-02-01 18:35:28 +01:00
|
|
|
def download(self):
|
2015-02-10 12:24:20 +01:00
|
|
|
key = "{}.archive_root".format(self.name)
|
2015-02-12 00:53:08 +01:00
|
|
|
if self.custom_dir:
|
|
|
|
self.ctx.state[key] = basename(self.custom_dir)
|
|
|
|
else:
|
|
|
|
src_dir = join(self.recipe_dir, self.url)
|
|
|
|
if exists(src_dir):
|
|
|
|
self.ctx.state[key] = basename(src_dir)
|
|
|
|
return
|
|
|
|
fn = self.archive_fn
|
|
|
|
if not exists(fn):
|
|
|
|
self.download_file(self.url.format(version=self.version), fn)
|
|
|
|
self.ctx.state[key] = self.get_archive_rootdir(self.archive_fn)
|
2015-02-01 18:35:28 +01:00
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
@cache_execution
|
2015-02-01 18:35:28 +01:00
|
|
|
def extract(self):
|
|
|
|
# recipe tmp directory
|
2015-02-03 00:19:53 +01:00
|
|
|
for arch in self.filtered_archs:
|
|
|
|
print("Extract {} for {}".format(self.name, arch.arch))
|
|
|
|
self.extract_arch(arch.arch)
|
2015-02-02 05:22:13 +01:00
|
|
|
|
2015-02-03 00:19:53 +01:00
|
|
|
def extract_arch(self, arch):
|
2015-02-03 18:42:17 +01:00
|
|
|
build_dir = join(self.ctx.build_dir, self.name, arch)
|
2015-02-12 00:53:08 +01:00
|
|
|
dest_dir = join(build_dir, self.archive_root)
|
|
|
|
if self.custom_dir:
|
|
|
|
if exists(dest_dir):
|
|
|
|
shutil.rmtree(dest_dir)
|
|
|
|
shutil.copytree(self.custom_dir, dest_dir)
|
|
|
|
else:
|
|
|
|
if exists(dest_dir):
|
|
|
|
return
|
|
|
|
src_dir = join(self.recipe_dir, self.url)
|
|
|
|
if exists(src_dir):
|
|
|
|
shutil.copytree(src_dir, dest_dir)
|
|
|
|
return
|
|
|
|
ensure_dir(build_dir)
|
|
|
|
self.extract_file(self.archive_fn, build_dir)
|
2015-02-01 18:35:28 +01:00
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
@cache_execution
|
|
|
|
def build(self, arch):
|
|
|
|
self.build_dir = self.get_build_dir(arch.arch)
|
|
|
|
if self.has_marker("building"):
|
|
|
|
print("Warning: {} build for {} has been incomplete".format(
|
|
|
|
self.name, arch.arch))
|
|
|
|
print("Warning: deleting the build and restarting.")
|
|
|
|
shutil.rmtree(self.build_dir)
|
|
|
|
self.extract_arch(arch.arch)
|
|
|
|
|
|
|
|
if self.has_marker("build_done"):
|
|
|
|
print("Build python for {} already done.".format(arch.arch))
|
|
|
|
return
|
|
|
|
|
|
|
|
self.set_marker("building")
|
|
|
|
|
|
|
|
chdir(self.build_dir)
|
|
|
|
print("Prebuild {} for {}".format(self.name, arch.arch))
|
|
|
|
self.prebuild_arch(arch)
|
|
|
|
print("Build {} for {}".format(self.name, arch.arch))
|
|
|
|
self.build_arch(arch)
|
|
|
|
print("Postbuild {} for {}".format(self.name, arch.arch))
|
|
|
|
self.postbuild_arch(arch)
|
|
|
|
self.delete_marker("building")
|
|
|
|
self.set_marker("build_done")
|
|
|
|
|
|
|
|
@cache_execution
|
2015-02-01 18:35:28 +01:00
|
|
|
def build_all(self):
|
2015-02-03 00:19:53 +01:00
|
|
|
filtered_archs = list(self.filtered_archs)
|
|
|
|
print("Build {} for {} (filtered)".format(
|
|
|
|
self.name,
|
|
|
|
", ".join([x.arch for x in filtered_archs])))
|
|
|
|
for arch in self.filtered_archs:
|
2015-02-10 12:24:20 +01:00
|
|
|
self.build(arch)
|
2015-02-03 00:19:53 +01:00
|
|
|
|
|
|
|
name = self.name
|
2015-02-20 16:12:20 +01:00
|
|
|
if self.library:
|
|
|
|
print("Create lipo library for {}".format(name))
|
|
|
|
if not name.startswith("lib"):
|
|
|
|
name = "lib{}".format(name)
|
|
|
|
static_fn = join(self.ctx.dist_dir, "lib", "{}.a".format(name))
|
|
|
|
ensure_dir(dirname(static_fn))
|
|
|
|
print("Lipo {} to {}".format(self.name, static_fn))
|
|
|
|
self.make_lipo(static_fn)
|
|
|
|
elif self.libraries:
|
|
|
|
print("Create multiple lipo for {}".format(name))
|
|
|
|
for library in self.libraries:
|
|
|
|
static_fn = join(self.ctx.dist_dir, "lib", basename(library))
|
|
|
|
ensure_dir(dirname(static_fn))
|
|
|
|
print(" - Lipo-ize {}".format(library))
|
|
|
|
self.make_lipo(static_fn, library)
|
2015-02-06 01:53:21 +01:00
|
|
|
print("Install include files for {}".format(self.name))
|
|
|
|
self.install_include()
|
2015-02-03 18:42:17 +01:00
|
|
|
print("Install {}".format(self.name))
|
2015-02-04 16:11:53 +01:00
|
|
|
self.install()
|
2015-02-01 18:35:28 +01:00
|
|
|
|
|
|
|
def prebuild_arch(self, arch):
|
2015-02-03 00:19:53 +01:00
|
|
|
prebuild = "prebuild_{}".format(arch.arch)
|
2015-02-01 18:35:28 +01:00
|
|
|
if hasattr(self, prebuild):
|
|
|
|
getattr(self, prebuild)()
|
|
|
|
|
|
|
|
def build_arch(self, arch):
|
2015-02-03 00:19:53 +01:00
|
|
|
build = "build_{}".format(arch.arch)
|
2015-02-01 18:35:28 +01:00
|
|
|
if hasattr(self, build):
|
|
|
|
getattr(self, build)()
|
|
|
|
|
|
|
|
def postbuild_arch(self, arch):
|
2015-02-03 00:19:53 +01:00
|
|
|
postbuild = "postbuild_{}".format(arch.arch)
|
2015-02-01 18:35:28 +01:00
|
|
|
if hasattr(self, postbuild):
|
|
|
|
getattr(self, postbuild)()
|
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
@cache_execution
|
2015-02-20 16:12:20 +01:00
|
|
|
def make_lipo(self, filename, library=None):
|
|
|
|
if library is None:
|
|
|
|
library = self.library
|
|
|
|
if not library:
|
2015-02-03 18:42:17 +01:00
|
|
|
return
|
|
|
|
args = []
|
|
|
|
for arch in self.filtered_archs:
|
2015-02-20 16:12:20 +01:00
|
|
|
library_fn = library.format(arch=arch)
|
2015-02-03 18:42:17 +01:00
|
|
|
args += [
|
|
|
|
"-arch", arch.arch,
|
2015-02-20 16:12:20 +01:00
|
|
|
join(self.get_build_dir(arch.arch), library_fn)]
|
2015-02-03 18:42:17 +01:00
|
|
|
shprint(sh.lipo, "-create", "-output", filename, *args)
|
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
@cache_execution
|
2015-02-06 01:53:21 +01:00
|
|
|
def install_include(self):
|
|
|
|
if not self.include_dir:
|
|
|
|
return
|
|
|
|
if self.include_per_arch:
|
2015-02-09 00:45:28 +01:00
|
|
|
archs = self.ctx.archs
|
2015-02-06 01:53:21 +01:00
|
|
|
else:
|
2015-02-09 00:45:28 +01:00
|
|
|
archs = [list(self.filtered_archs)[0]]
|
|
|
|
|
|
|
|
include_dirs = self.include_dir
|
|
|
|
if not isinstance(include_dirs, (list, tuple)):
|
|
|
|
include_dirs = list([include_dirs])
|
|
|
|
|
|
|
|
for arch in archs:
|
|
|
|
arch_dir = "common"
|
|
|
|
if self.include_per_arch:
|
|
|
|
arch_dir = arch.arch
|
|
|
|
dest_dir = join(self.ctx.include_dir, arch_dir, self.name)
|
2015-02-06 01:53:21 +01:00
|
|
|
if exists(dest_dir):
|
|
|
|
shutil.rmtree(dest_dir)
|
2015-02-09 00:45:28 +01:00
|
|
|
build_dir = self.get_build_dir(arch.arch)
|
|
|
|
|
|
|
|
for include_dir in include_dirs:
|
|
|
|
dest_name = None
|
|
|
|
if isinstance(include_dir, (list, tuple)):
|
|
|
|
include_dir, dest_name = include_dir
|
|
|
|
include_dir = include_dir.format(arch=arch, ctx=self.ctx)
|
|
|
|
src_dir = join(build_dir, include_dir)
|
|
|
|
if dest_name is None:
|
|
|
|
dest_name = basename(src_dir)
|
|
|
|
if isdir(src_dir):
|
|
|
|
shutil.copytree(src_dir, dest_dir)
|
|
|
|
else:
|
|
|
|
dest = join(dest_dir, dest_name)
|
|
|
|
print("Copy {} to {}".format(src_dir, dest))
|
|
|
|
ensure_dir(dirname(dest))
|
|
|
|
shutil.copy(src_dir, dest)
|
2015-02-06 01:53:21 +01:00
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
@cache_execution
|
2015-02-03 18:42:17 +01:00
|
|
|
def install(self):
|
|
|
|
pass
|
2015-02-03 00:19:53 +01:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def list_recipes(cls):
|
|
|
|
recipes_dir = join(dirname(__file__), "recipes")
|
|
|
|
for name in listdir(recipes_dir):
|
|
|
|
fn = join(recipes_dir, name)
|
|
|
|
if isdir(fn):
|
|
|
|
yield name
|
|
|
|
|
|
|
|
@classmethod
|
2015-02-10 12:24:20 +01:00
|
|
|
def get_recipe(cls, name, ctx):
|
2015-02-03 00:19:53 +01:00
|
|
|
if not hasattr(cls, "recipes"):
|
|
|
|
cls.recipes = {}
|
|
|
|
if name in cls.recipes:
|
|
|
|
return cls.recipes[name]
|
|
|
|
mod = importlib.import_module("recipes.{}".format(name))
|
|
|
|
recipe = mod.recipe
|
|
|
|
recipe.recipe_dir = join(ctx.root_dir, "recipes", name)
|
|
|
|
return recipe
|
|
|
|
|
|
|
|
|
|
|
|
def build_recipes(names, ctx):
|
|
|
|
# gather all the dependencies
|
|
|
|
print("Want to build {}".format(names))
|
|
|
|
graph = Graph()
|
|
|
|
recipe_to_load = names
|
|
|
|
recipe_loaded = []
|
|
|
|
while names:
|
|
|
|
name = recipe_to_load.pop(0)
|
|
|
|
if name in recipe_loaded:
|
|
|
|
continue
|
2015-02-10 12:24:20 +01:00
|
|
|
try:
|
|
|
|
recipe = Recipe.get_recipe(name, ctx)
|
|
|
|
except ImportError:
|
|
|
|
print("ERROR: No recipe named {}".format(name))
|
|
|
|
sys.exit(1)
|
2015-02-04 16:11:53 +01:00
|
|
|
graph.add(name, name)
|
2015-02-10 12:24:20 +01:00
|
|
|
print("Loaded recipe {} (depends of {})".format(name, recipe.depends))
|
2015-02-03 00:19:53 +01:00
|
|
|
for depend in recipe.depends:
|
|
|
|
graph.add(name, depend)
|
|
|
|
recipe_to_load += recipe.depends
|
|
|
|
recipe_loaded.append(name)
|
|
|
|
|
|
|
|
build_order = list(graph.find_order())
|
|
|
|
print("Build order is {}".format(build_order))
|
2015-02-10 12:24:20 +01:00
|
|
|
recipes = [Recipe.get_recipe(name, ctx) for name in build_order]
|
2015-02-06 01:53:21 +01:00
|
|
|
for recipe in recipes:
|
2015-02-03 00:19:53 +01:00
|
|
|
recipe.init_with_ctx(ctx)
|
2015-02-06 01:53:21 +01:00
|
|
|
for recipe in recipes:
|
2015-02-03 00:19:53 +01:00
|
|
|
recipe.execute()
|
2015-02-01 18:35:28 +01:00
|
|
|
|
2015-02-06 01:53:21 +01:00
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
def ensure_dir(filename):
|
|
|
|
if not exists(filename):
|
|
|
|
makedirs(filename)
|
|
|
|
|
|
|
|
|
2015-02-23 11:34:36 +01:00
|
|
|
def update_pbxproj(filename):
|
|
|
|
# list all the compiled recipes
|
|
|
|
ctx = Context()
|
|
|
|
pbx_libraries = []
|
|
|
|
pbx_frameworks = []
|
|
|
|
libraries = []
|
|
|
|
for recipe in Recipe.list_recipes():
|
|
|
|
key = "{}.build_all".format(recipe)
|
|
|
|
if key not in ctx.state:
|
|
|
|
continue
|
|
|
|
recipe = Recipe.get_recipe(recipe, ctx)
|
|
|
|
recipe.init_with_ctx(ctx)
|
|
|
|
pbx_frameworks.extend(recipe.pbx_frameworks)
|
|
|
|
pbx_libraries.extend(recipe.pbx_libraries)
|
|
|
|
libraries.extend(recipe.dist_libraries)
|
|
|
|
|
|
|
|
pbx_frameworks = list(set(pbx_frameworks))
|
|
|
|
pbx_libraries = list(set(pbx_libraries))
|
|
|
|
libraries = list(set(libraries))
|
|
|
|
|
|
|
|
print("-" * 70)
|
|
|
|
print("The project need to have:")
|
|
|
|
print("iOS Frameworks: {}".format(pbx_frameworks))
|
|
|
|
print("iOS Libraries: {}".format(pbx_libraries))
|
|
|
|
print("Libraries: {}".format(libraries))
|
|
|
|
|
|
|
|
print("-" * 70)
|
|
|
|
print("Analysis of {}".format(filename))
|
|
|
|
|
|
|
|
from mod_pbxproj import XcodeProject
|
|
|
|
project = XcodeProject.Load(filename)
|
|
|
|
sysroot = sh.xcrun("--sdk", "iphonesimulator", "--show-sdk-path").strip()
|
|
|
|
|
|
|
|
group = project.get_or_create_group("Frameworks")
|
|
|
|
for framework in pbx_frameworks:
|
|
|
|
print("Ensure {} is in the project".format(framework))
|
|
|
|
f_path = join(sysroot, "System", "Library", "Frameworks",
|
|
|
|
"{}.framework".format(framework))
|
|
|
|
project.add_file_if_doesnt_exist(f_path, parent=group, tree="DEVELOPER_DIR")
|
|
|
|
for library in pbx_libraries:
|
|
|
|
print("Ensure {} is in the project".format(library))
|
|
|
|
f_path = join(sysroot, "usr", "lib",
|
|
|
|
"{}.dylib".format(library))
|
|
|
|
project.add_file_if_doesnt_exist(f_path, parent=group, tree="DEVELOPER_DIR")
|
|
|
|
for library in libraries:
|
|
|
|
print("Ensure {} is in the project".format(library))
|
|
|
|
project.add_file_if_doesnt_exist(library, parent=group)
|
|
|
|
if project.modified:
|
|
|
|
project.backup()
|
|
|
|
project.save()
|
|
|
|
|
|
|
|
|
2015-02-01 18:35:28 +01:00
|
|
|
if __name__ == "__main__":
|
2015-02-10 12:24:20 +01:00
|
|
|
import argparse
|
|
|
|
|
|
|
|
class ToolchainCL(object):
|
|
|
|
def __init__(self):
|
|
|
|
parser = argparse.ArgumentParser(
|
2015-02-14 13:10:49 +01:00
|
|
|
description="Tool for managing the iOS / Python toolchain",
|
2015-02-10 12:24:20 +01:00
|
|
|
usage="""toolchain <command> [<args>]
|
|
|
|
|
|
|
|
Available commands:
|
|
|
|
build Build a specific recipe
|
|
|
|
clean Clean the build
|
|
|
|
distclean Clean the build and the result
|
2015-02-14 13:10:49 +01:00
|
|
|
recipes List all the available recipes
|
|
|
|
status List all the recipes and their build status
|
|
|
|
|
|
|
|
Xcode:
|
|
|
|
create Create a new xcode project
|
2015-02-23 11:34:36 +01:00
|
|
|
update Update an existing xcode project (frameworks, libraries..)
|
2015-02-10 12:24:20 +01:00
|
|
|
""")
|
|
|
|
parser.add_argument("command", help="Command to run")
|
|
|
|
args = parser.parse_args(sys.argv[1:2])
|
|
|
|
if not hasattr(self, args.command):
|
|
|
|
print 'Unrecognized command'
|
|
|
|
parser.print_help()
|
|
|
|
exit(1)
|
|
|
|
getattr(self, args.command)()
|
|
|
|
|
|
|
|
def build(self):
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description="Build the toolchain")
|
|
|
|
parser.add_argument("recipe", nargs="+", help="Recipe to compile")
|
2015-02-12 00:53:08 +01:00
|
|
|
parser.add_argument("--arch", help="Restrict compilation to this arch")
|
2015-02-10 12:24:20 +01:00
|
|
|
args = parser.parse_args(sys.argv[2:])
|
|
|
|
|
|
|
|
ctx = Context()
|
2015-02-12 00:53:08 +01:00
|
|
|
if args.arch:
|
|
|
|
archs = args.arch.split()
|
|
|
|
ctx.archs = [arch for arch in ctx.archs if arch.arch in archs]
|
|
|
|
print("Architectures restricted to: {}".format(archs))
|
2015-02-10 12:24:20 +01:00
|
|
|
build_recipes(args.recipe, ctx)
|
|
|
|
|
|
|
|
def recipes(self):
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description="List all the available recipes")
|
|
|
|
parser.add_argument(
|
|
|
|
"--compact", action="store_true",
|
|
|
|
help="Produce a compact list suitable for scripting")
|
|
|
|
args = parser.parse_args(sys.argv[2:])
|
|
|
|
|
|
|
|
if args.compact:
|
|
|
|
print(" ".join(list(Recipe.list_recipes())))
|
|
|
|
else:
|
|
|
|
ctx = Context()
|
|
|
|
for name in Recipe.list_recipes():
|
|
|
|
recipe = Recipe.get_recipe(name, ctx)
|
|
|
|
print("{recipe.name:<12} {recipe.version:<8}".format(
|
|
|
|
recipe=recipe))
|
|
|
|
|
|
|
|
def clean(self):
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description="Clean the build")
|
2015-02-11 12:52:46 +01:00
|
|
|
parser.add_argument("recipe", nargs="*", help="Recipe to clean")
|
2015-02-10 12:24:20 +01:00
|
|
|
args = parser.parse_args(sys.argv[2:])
|
|
|
|
ctx = Context()
|
2015-02-11 12:52:46 +01:00
|
|
|
if args.recipe:
|
|
|
|
for recipe in args.recipe:
|
|
|
|
print("Cleaning {} build".format(recipe))
|
|
|
|
ctx.state.remove_all("{}.".format(recipe))
|
|
|
|
build_dir = join(ctx.build_dir, recipe)
|
|
|
|
if exists(build_dir):
|
|
|
|
shutil.rmtree(build_dir)
|
|
|
|
else:
|
|
|
|
print("Delete build directory")
|
|
|
|
if exists(ctx.build_dir):
|
|
|
|
shutil.rmtree(ctx.build_dir)
|
2015-02-10 12:24:20 +01:00
|
|
|
|
|
|
|
def distclean(self):
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description="Clean the build, download and dist")
|
|
|
|
args = parser.parse_args(sys.argv[2:])
|
|
|
|
ctx = Context()
|
|
|
|
if exists(ctx.build_dir):
|
|
|
|
shutil.rmtree(ctx.build_dir)
|
|
|
|
if exists(ctx.dist_dir):
|
|
|
|
shutil.rmtree(ctx.dist_dir)
|
|
|
|
if exists(ctx.cache_dir):
|
|
|
|
shutil.rmtree(ctx.cache_dir)
|
|
|
|
|
|
|
|
def status(self):
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description="Give a status of the build")
|
|
|
|
args = parser.parse_args(sys.argv[2:])
|
|
|
|
ctx = Context()
|
|
|
|
for recipe in Recipe.list_recipes():
|
|
|
|
key = "{}.build_all".format(recipe)
|
|
|
|
keytime = "{}.build_all.at".format(recipe)
|
|
|
|
|
|
|
|
if key in ctx.state:
|
|
|
|
status = "Build OK (built at {})".format(ctx.state[keytime])
|
|
|
|
else:
|
|
|
|
status = "Not built"
|
|
|
|
print("{:<12} - {}".format(
|
|
|
|
recipe, status))
|
|
|
|
|
2015-02-11 12:52:46 +01:00
|
|
|
def create(self):
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description="Create a new xcode project")
|
2015-02-14 13:05:54 +01:00
|
|
|
parser.add_argument("name", help="Name of your project")
|
|
|
|
parser.add_argument("directory", help="Directory where your project live")
|
2015-02-11 12:52:46 +01:00
|
|
|
args = parser.parse_args(sys.argv[2:])
|
|
|
|
|
2015-02-14 13:05:54 +01:00
|
|
|
from cookiecutter.main import cookiecutter
|
|
|
|
ctx = Context()
|
|
|
|
template_dir = join(curdir, "tools", "templates")
|
|
|
|
context = {
|
|
|
|
"title": args.name,
|
|
|
|
"project_name": args.name.lower(),
|
|
|
|
"domain_name": "org.kivy.{}".format(args.name.lower()),
|
|
|
|
"project_dir": realpath(args.directory),
|
|
|
|
"version": "1.0.0",
|
|
|
|
"dist_dir": ctx.dist_dir,
|
|
|
|
}
|
|
|
|
cookiecutter(template_dir, no_input=True, extra_context=context)
|
2015-02-23 11:34:36 +01:00
|
|
|
filename = join(
|
|
|
|
getcwd(),
|
|
|
|
"{}-ios".format(args.name.lower()),
|
|
|
|
"{}.xcodeproj".format(args.name.lower()),
|
|
|
|
"project.pbxproj")
|
|
|
|
update_pbxproj(filename)
|
|
|
|
print("--")
|
|
|
|
print("Project directory : {}-ios".format(
|
|
|
|
args.name.lower()))
|
|
|
|
print("XCode project : {0}-ios/{0}.xcodeproj".format(
|
|
|
|
args.name.lower()))
|
|
|
|
|
|
|
|
def update(self):
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description="Update an existing xcode project")
|
|
|
|
parser.add_argument("filename", help="Path to your project or xcodeproj")
|
|
|
|
args = parser.parse_args(sys.argv[2:])
|
|
|
|
|
|
|
|
|
|
|
|
filename = args.filename
|
|
|
|
if not filename.endswith(".xcodeproj"):
|
|
|
|
# try to find the xcodeproj
|
|
|
|
from glob import glob
|
|
|
|
xcodeproj = glob(join(filename, "*.xcodeproj"))
|
|
|
|
if not xcodeproj:
|
|
|
|
print("ERROR: Unable to find a xcodeproj in {}".format(filename))
|
|
|
|
sys.exit(1)
|
|
|
|
filename = xcodeproj[0]
|
|
|
|
|
|
|
|
filename = join(filename, "project.pbxproj")
|
|
|
|
if not exists(filename):
|
|
|
|
print("ERROR: {} not found".format(filename))
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
update_pbxproj(filename)
|
|
|
|
print("--")
|
|
|
|
print("Project {} updated".format(filename))
|
|
|
|
|
2015-02-11 12:52:46 +01:00
|
|
|
|
2015-02-10 12:24:20 +01:00
|
|
|
ToolchainCL()
|