remove unused recipes and work on host python / python / lib
This commit is contained in:
parent
5c4f42b1d4
commit
d4c1ac88d7
10 changed files with 381 additions and 111 deletions
|
@ -1,7 +0,0 @@
|
||||||
from toolchain import Recipe
|
|
||||||
|
|
||||||
class AudiostreamRecipe(Recipe):
|
|
||||||
version = "master"
|
|
||||||
url = "https://github.com/kivy/audiostream/archive/{version}.zip"
|
|
||||||
|
|
||||||
recipe = AudiostreamRecipe()
|
|
52
recipes/hostpython/ModulesSetup
Normal file
52
recipes/hostpython/ModulesSetup
Normal file
|
@ -0,0 +1,52 @@
|
||||||
|
posix posixmodule.c # posix (UNIX) system calls
|
||||||
|
errno errnomodule.c # posix (UNIX) errno values
|
||||||
|
pwd pwdmodule.c # this is needed to find out the user's home dir
|
||||||
|
# if $HOME is not set
|
||||||
|
_sre _sre.c # Fredrik Lundh's new regular expressions
|
||||||
|
_codecs _codecsmodule.c # access to the builtin codecs and codec registry
|
||||||
|
zipimport zipimport.c
|
||||||
|
_symtable symtablemodule.c
|
||||||
|
array arraymodule.c # array objects
|
||||||
|
cmath cmathmodule.c # -lm # complex math library functions
|
||||||
|
math mathmodule.c # -lm # math library functions, e.g. sin()
|
||||||
|
_struct _struct.c # binary structure packing/unpacking
|
||||||
|
time timemodule.c # -lm # time operations and variables
|
||||||
|
operator operator.c # operator.add() and similar goodies
|
||||||
|
_weakref _weakref.c # basic weak reference support
|
||||||
|
_random _randommodule.c # Random number generator
|
||||||
|
_collections _collectionsmodule.c # Container types
|
||||||
|
itertools itertoolsmodule.c # Functions creating iterators for efficient looping
|
||||||
|
strop stropmodule.c # String manipulations
|
||||||
|
_functools _functoolsmodule.c # Tools for working with functions and callable objects
|
||||||
|
_elementtree -I$(srcdir)/Modules/expat -DHAVE_EXPAT_CONFIG_H -DUSE_PYEXPAT_CAPI _elementtree.c # elementtree accelerator
|
||||||
|
datetime datetimemodule.c # date/time type
|
||||||
|
_bisect _bisectmodule.c # Bisection algorithms
|
||||||
|
fcntl fcntlmodule.c # fcntl(2) and ioctl(2)
|
||||||
|
select selectmodule.c # select(2); not on ancient System V
|
||||||
|
_socket socketmodule.c
|
||||||
|
_md5 md5module.c md5.c
|
||||||
|
_sha shamodule.c
|
||||||
|
_sha256 sha256module.c
|
||||||
|
_sha512 sha512module.c
|
||||||
|
binascii binascii.c
|
||||||
|
parser parsermodule.c
|
||||||
|
cStringIO cStringIO.c
|
||||||
|
cPickle cPickle.c
|
||||||
|
zlib zlibmodule.c -I$(prefix)/include -L$(exec_prefix)/lib -lz
|
||||||
|
xxsubtype xxsubtype.c
|
||||||
|
unicodedata unicodedata.c # static Unicode character database
|
||||||
|
|
||||||
|
# Theses modules are used by Kivy inside other module
|
||||||
|
# json in Settings, _io by zipfile...
|
||||||
|
_json _json.c
|
||||||
|
_io _io/bufferedio.c _io/bytesio.c _io/fileio.c _io/iobase.c _io/_iomodule.c _io/stringio.c _io/textio.c
|
||||||
|
_heapq _heapqmodule.c
|
||||||
|
|
||||||
|
# Special inclusion for sqlite3
|
||||||
|
_sqlite3 -DSQLITE_OMIT_LOAD_EXTENSION _sqlite/cache.c _sqlite/microprotocols.c _sqlite/row.c _sqlite/connection.c _sqlite/module.c _sqlite/statement.c _sqlite/cursor.c _sqlite/prepare_protocol.c _sqlite/util.c
|
||||||
|
|
||||||
|
# Include expat
|
||||||
|
pyexpat expat/xmlparse.c expat/xmlrole.c expat/xmltok.c pyexpat.c -I$(srcdir)/Modules/expat -DHAVE_EXPAT_CONFIG_H -DUSE_PYEXPAT_CAPI
|
||||||
|
|
||||||
|
# Future (used by numpy)
|
||||||
|
future_builtins future_builtins.c
|
47
recipes/hostpython/__init__.py
Normal file
47
recipes/hostpython/__init__.py
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
from toolchain import Recipe, shprint
|
||||||
|
from os.path import join
|
||||||
|
import sh
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
|
||||||
|
class HostpythonRecipe(Recipe):
|
||||||
|
version = "2.7.1"
|
||||||
|
url = "https://www.python.org/ftp/python/{version}/Python-{version}.tar.bz2"
|
||||||
|
depends = ["libffi", ]
|
||||||
|
archs = ["i386"]
|
||||||
|
|
||||||
|
def download(self):
|
||||||
|
super(HostpythonRecipe, self).download()
|
||||||
|
self.ctx.hostpython = join(
|
||||||
|
self.ctx.build_dir, "i386", self.archive_root,
|
||||||
|
"hostpython")
|
||||||
|
self.ctx.hostpgen = join(
|
||||||
|
self.ctx.build_dir, "i386", self.archive_root,
|
||||||
|
"Parser", "hostpgen")
|
||||||
|
print("Global: hostpython located at {}".format(self.ctx.hostpython))
|
||||||
|
print("Global: hostpgen located at {}".format(self.ctx.hostpgen))
|
||||||
|
|
||||||
|
def prebuild_arch(self, arch):
|
||||||
|
if self.has_marker("patched"):
|
||||||
|
return
|
||||||
|
self.apply_patch("ssize-t-max.patch")
|
||||||
|
self.apply_patch("dynload.patch")
|
||||||
|
self.apply_patch("static-_sqlite3.patch")
|
||||||
|
self.copy_file("ModulesSetup", "Modules/Setup.local")
|
||||||
|
self.set_marker("patched")
|
||||||
|
|
||||||
|
def build_i386(self):
|
||||||
|
sdk_path = sh.xcrun("--sdk", "macosx", "--show-sdk-path").strip()
|
||||||
|
|
||||||
|
build_env = self.ctx.env.copy()
|
||||||
|
build_env["CC"] = "clang -Qunused-arguments -fcolor-diagnostics"
|
||||||
|
build_env["LDFLAGS"] = "-lsqlite3"
|
||||||
|
build_env["CFLAGS"] = "--sysroot={}".format(sdk_path)
|
||||||
|
configure = sh.Command(join(self.build_dir, "configure"))
|
||||||
|
shprint(configure, _env=build_env)
|
||||||
|
shprint(sh.make, "-C", self.build_dir, "-j4", "python.exe", "Parser/pgen",
|
||||||
|
_env=build_env)
|
||||||
|
shutil.move("python.exe", "hostpython")
|
||||||
|
shutil.move("Parser/pgen", "Parser/hostpgen")
|
||||||
|
|
||||||
|
recipe = HostpythonRecipe()
|
24
recipes/hostpython/dynload.patch
Normal file
24
recipes/hostpython/dynload.patch
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
--- Python-2.7.1/Python/dynload_shlib.c.orig 2011-12-05 00:00:00.000000000 +0100
|
||||||
|
+++ Python-2.7.1/Python/dynload_shlib.c 2011-12-05 00:02:51.000000000 +0100
|
||||||
|
@@ -84,6 +84,15 @@
|
||||||
|
PyOS_snprintf(funcname, sizeof(funcname),
|
||||||
|
LEAD_UNDERSCORE "init%.200s", shortname);
|
||||||
|
|
||||||
|
+ /* On IOS, dlopen crash as soon as we try to open one of our library.
|
||||||
|
+ * Instead, we have done a redirection of linking to convert our .so into a
|
||||||
|
+ * .a. Then the main executable is linked with theses symbol. So, instead
|
||||||
|
+ * of trying to dlopen, directly do the dlsym.
|
||||||
|
+ * -- Mathieu
|
||||||
|
+ */
|
||||||
|
+ return (dl_funcptr) dlsym(RTLD_MAIN_ONLY, funcname);
|
||||||
|
+
|
||||||
|
+#if 0
|
||||||
|
if (fp != NULL) {
|
||||||
|
int i;
|
||||||
|
struct stat statb;
|
||||||
|
@@ -140,4 +149,5 @@
|
||||||
|
handles[nhandles++].handle = handle;
|
||||||
|
p = (dl_funcptr) dlsym(handle, funcname);
|
||||||
|
return p;
|
||||||
|
+#endif
|
||||||
|
}
|
17
recipes/hostpython/ssize-t-max.patch
Normal file
17
recipes/hostpython/ssize-t-max.patch
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
diff -Naur Python-2.7.1.orig/Include/pyport.h Python-2.7.1/Include/pyport.h
|
||||||
|
--- Python-2.7.1.orig/Include/pyport.h 2010-09-14 18:10:22.000000000 +0200
|
||||||
|
+++ Python-2.7.1/Include/pyport.h 2011-05-13 12:24:53.000000000 +0200
|
||||||
|
@@ -186,9 +186,11 @@
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* Largest positive value of type Py_ssize_t. */
|
||||||
|
-#define PY_SSIZE_T_MAX ((Py_ssize_t)(((size_t)-1)>>1))
|
||||||
|
+//#define PY_SSIZE_T_MAX ((Py_ssize_t)(((size_t)-1)>>1))
|
||||||
|
/* Smallest negative value of type Py_ssize_t. */
|
||||||
|
-#define PY_SSIZE_T_MIN (-PY_SSIZE_T_MAX-1)
|
||||||
|
+//#define PY_SSIZE_T_MIN (-PY_SSIZE_T_MAX-1)
|
||||||
|
+#define PY_SSIZE_T_MAX TMP_MAX
|
||||||
|
+#define PY_SSIZE_T_MIN -TMP_MAX
|
||||||
|
|
||||||
|
#if SIZEOF_PID_T > SIZEOF_LONG
|
||||||
|
# error "Python doesn't support sizeof(pid_t) > sizeof(long)"
|
25
recipes/hostpython/static-_sqlite3.patch
Normal file
25
recipes/hostpython/static-_sqlite3.patch
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
--- Python-2.7.1/Modules/_sqlite/module.c.orig 2012-10-28 02:30:58.000000000 +0200
|
||||||
|
+++ Python-2.7.1/Modules/_sqlite/module.c 2012-10-28 02:28:12.000000000 +0200
|
||||||
|
@@ -28,6 +28,9 @@
|
||||||
|
#include "prepare_protocol.h"
|
||||||
|
#include "microprotocols.h"
|
||||||
|
#include "row.h"
|
||||||
|
+#ifndef MODULE_NAME
|
||||||
|
+#define MODULE_NAME "_sqlite3"
|
||||||
|
+#endif
|
||||||
|
|
||||||
|
#if SQLITE_VERSION_NUMBER >= 3003003
|
||||||
|
#define HAVE_SHARED_CACHE
|
||||||
|
--- Python-2.7.1/Modules/_sqlite/sqlitecompat.h.orig 2012-10-28 02:30:53.000000000 +0200
|
||||||
|
+++ Python-2.7.1/Modules/_sqlite/sqlitecompat.h 2012-10-28 02:28:14.000000000 +0200
|
||||||
|
@@ -26,6 +26,10 @@
|
||||||
|
#ifndef PYSQLITE_COMPAT_H
|
||||||
|
#define PYSQLITE_COMPAT_H
|
||||||
|
|
||||||
|
+#ifndef MODULE_NAME
|
||||||
|
+#define MODULE_NAME "_sqlite3"
|
||||||
|
+#endif
|
||||||
|
+
|
||||||
|
/* define Py_ssize_t for pre-2.5 versions of Python */
|
||||||
|
|
||||||
|
#if PY_VERSION_HEX < 0x02050000
|
|
@ -1,33 +1,49 @@
|
||||||
from toolchain import Recipe, shprint
|
from toolchain import Recipe, shprint
|
||||||
|
from os.path import join, exists
|
||||||
import sh
|
import sh
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
|
||||||
class LibffiRecipe(Recipe):
|
class LibffiRecipe(Recipe):
|
||||||
version = "3.0.13"
|
version = "3.2.1"
|
||||||
url = "ftp://sourceware.org/pub/libffi/libffi-{version}.tar.gz"
|
url = "ftp://sourceware.org/pub/libffi/libffi-{version}.tar.gz"
|
||||||
|
archs = ("armv7",)
|
||||||
|
|
||||||
def prebuild_arch(self, arch):
|
def prebuild_arch(self, arch):
|
||||||
if self.has_marker("patched"):
|
if self.has_marker("patched"):
|
||||||
return
|
return
|
||||||
self.apply_patch("ffi-3.0.13-sysv.S.patch")
|
# necessary as it doesn't compile with XCode 6.0. If we use 5.1.1, the
|
||||||
if arch in ("armv7", "armv7s", "arm64"):
|
# compiler for i386 is not working.
|
||||||
shprint(sh.sed,
|
shprint(sh.sed,
|
||||||
"-i.bak",
|
"-i.bak",
|
||||||
"s/-miphoneos-version-min=4.0/-miphoneos-version-min=6.0/g",
|
"s/-miphoneos-version-min=5.1.1/-miphoneos-version-min=6.0/g",
|
||||||
"generate-ios-source-and-headers.py")
|
"generate-darwin-source-and-headers.py")
|
||||||
self.set_marker("patched")
|
self.set_marker("patched")
|
||||||
|
|
||||||
def build_arch(self, arch):
|
def build_arch(self, arch):
|
||||||
if arch == "i386":
|
|
||||||
target_name = "libffi OS X"
|
|
||||||
else:
|
|
||||||
target_name = "libffi iOS"
|
|
||||||
|
|
||||||
shprint(sh.xcodebuild,
|
shprint(sh.xcodebuild,
|
||||||
"-project", "libffi.xcodeproj",
|
"-project", "libffi.xcodeproj",
|
||||||
"-target", target_name,
|
"-target", "libffi-iOS",
|
||||||
"-configuration", "Release",
|
"-configuration", "Release")
|
||||||
"-sdk", "iphoneos{}".format(self.ctx.sdkver),
|
|
||||||
"OTHER_CFLAGS=-no-integrated-as")
|
def assemble_to(self, filename):
|
||||||
|
shutil.copy(join(
|
||||||
|
self.get_build_dir("armv7"),
|
||||||
|
"build/Release-iphoneos/libffi.a"),
|
||||||
|
filename)
|
||||||
|
for sdkarch, arch in (
|
||||||
|
("iphoneos-arm64", "arm64"),
|
||||||
|
("iphoneos-armv7", "armv7"),
|
||||||
|
("iphonesimulator-i386", "i386"),
|
||||||
|
("iphonesimulator-x86_64", "x86_64")):
|
||||||
|
dest_dir = join(self.ctx.dist_dir, "include", arch, "ffi")
|
||||||
|
if exists(dest_dir):
|
||||||
|
continue
|
||||||
|
shutil.copytree(join(
|
||||||
|
self.get_build_dir("armv7"),
|
||||||
|
"build_{}/include".format(sdkarch)),
|
||||||
|
join(self.ctx.dist_dir, "include", arch, "ffi"))
|
||||||
|
|
||||||
|
|
||||||
recipe = LibffiRecipe()
|
recipe = LibffiRecipe()
|
||||||
|
|
||||||
|
|
|
@ -1,24 +1,12 @@
|
||||||
from toolchain import Recipe, shprint
|
from toolchain import Recipe, shprint
|
||||||
from os.path import join
|
from os.path import join
|
||||||
import sh
|
import sh
|
||||||
import shutil
|
|
||||||
|
|
||||||
|
|
||||||
class PythonRecipe(Recipe):
|
class PythonRecipe(Recipe):
|
||||||
version = "2.7.1"
|
version = "2.7.1"
|
||||||
url = "https://www.python.org/ftp/python/{version}/Python-{version}.tar.bz2"
|
url = "https://www.python.org/ftp/python/{version}/Python-{version}.tar.bz2"
|
||||||
depends = ["libffi", ]
|
depends = ["hostpython", "libffi", ]
|
||||||
|
|
||||||
def download(self):
|
|
||||||
super(PythonRecipe, self).download()
|
|
||||||
self.ctx.hostpython = join(
|
|
||||||
self.ctx.build_dir, "i386", self.archive_root,
|
|
||||||
"hostpython")
|
|
||||||
self.ctx.hostpgen = join(
|
|
||||||
self.ctx.build_dir, "i386", self.archive_root,
|
|
||||||
"Parser", "hostpgen")
|
|
||||||
print("Global: hostpython located at {}".format(self.ctx.hostpython))
|
|
||||||
print("Global: hostpgen located at {}".format(self.ctx.hostpgen))
|
|
||||||
|
|
||||||
def prebuild_arch(self, arch):
|
def prebuild_arch(self, arch):
|
||||||
# common to all archs
|
# common to all archs
|
||||||
|
@ -29,56 +17,30 @@ class PythonRecipe(Recipe):
|
||||||
self.apply_patch("static-_sqlite3.patch")
|
self.apply_patch("static-_sqlite3.patch")
|
||||||
self.copy_file("ModulesSetup", "Modules/Setup.local")
|
self.copy_file("ModulesSetup", "Modules/Setup.local")
|
||||||
self.copy_file("_scproxy.py", "Lib/_scproxy.py")
|
self.copy_file("_scproxy.py", "Lib/_scproxy.py")
|
||||||
#self.copy_file("Setup.dist", "Modules/Setup.dist")
|
self.apply_patch("xcompile.patch")
|
||||||
|
self.apply_patch("setuppath.patch")
|
||||||
if arch in ("armv7", "armv7s", "arm64"):
|
self.append_file("ModulesSetup.mobile", "Modules/Setup.local")
|
||||||
self.apply_patch("xcompile.patch")
|
|
||||||
self.apply_patch("setuppath.patch")
|
|
||||||
self.append_file("ModulesSetup.mobile", "Modules/Setup.local")
|
|
||||||
|
|
||||||
self.set_marker("patched")
|
self.set_marker("patched")
|
||||||
|
|
||||||
def build_i386(self):
|
|
||||||
sdk_path = sh.xcrun(
|
|
||||||
"--sdk", "macosx",
|
|
||||||
"--show-sdk-path").splitlines()[0]
|
|
||||||
|
|
||||||
build_env = self.ctx.env.copy()
|
|
||||||
build_env["CC"] = "clang -Qunused-arguments -fcolor-diagnostics"
|
|
||||||
build_env["LDFLAGS"] = "-lsqlite3"
|
|
||||||
build_env["CFLAGS"] = "--sysroot={}".format(sdk_path)
|
|
||||||
configure = sh.Command(join(self.build_dir, "configure"))
|
|
||||||
shprint(configure, _env=build_env)
|
|
||||||
shprint(sh.make, "-C", self.build_dir, "-j4", "python.exe", "Parser/pgen",
|
|
||||||
_env=build_env)
|
|
||||||
shutil.move("python.exe", "hostpython")
|
|
||||||
shutil.move("Parser/pgen", "Parser/hostpgen")
|
|
||||||
|
|
||||||
def build_arch(self, arch):
|
def build_arch(self, arch):
|
||||||
if self.has_marker("build"):
|
|
||||||
return
|
|
||||||
if arch == "i386":
|
|
||||||
super(PythonRecipe, self).build_arch(arch)
|
|
||||||
self.set_marker("build")
|
|
||||||
return
|
|
||||||
|
|
||||||
build_env = self.ctx.env.copy()
|
build_env = self.ctx.env.copy()
|
||||||
|
|
||||||
build_env["CC"] = sh.xcrun("-find", "-sdk", "iphoneos", "clang").splitlines()[0]
|
build_env["CC"] = sh.xcrun("-find", "-sdk", arch.sdk, "clang").strip()
|
||||||
build_env["AR"] = sh.xcrun("-find", "-sdk", "iphoneos", "ar").splitlines()[0]
|
build_env["AR"] = sh.xcrun("-find", "-sdk", arch.sdk, "ar").strip()
|
||||||
build_env["LD"] = sh.xcrun("-find", "-sdk", "iphoneos", "ld").splitlines()[0]
|
build_env["LD"] = sh.xcrun("-find", "-sdk", arch.sdk, "ld").strip()
|
||||||
build_env["CFLAGS"] = " ".join([
|
build_env["CFLAGS"] = " ".join([
|
||||||
"-arch", arch,
|
"-arch", arch.arch,
|
||||||
"-pipe", "-no-cpp-precomp",
|
"-pipe", "-no-cpp-precomp",
|
||||||
"-isysroot", self.ctx.iossdkroot,
|
"-isysroot", arch.sysroot,
|
||||||
"-O3",
|
"-O3",
|
||||||
"-miphoneos-version-min={}".format(self.ctx.sdkver)])
|
"-miphoneos-version-min={}".format(arch.version_min)])
|
||||||
build_env["LDFLAGS"] = " ".join([
|
build_env["LDFLAGS"] = " ".join([
|
||||||
"-arch", arch,
|
"-arch", arch.arch,
|
||||||
"-undefined dynamic_lookup",
|
"-undefined dynamic_lookup",
|
||||||
"-Lextralibs/",
|
"-Lextralibs/",
|
||||||
"-lsqlite3",
|
"-lsqlite3",
|
||||||
"-isysroot", self.ctx.iossdkroot])
|
"-isysroot", arch.sysroot])
|
||||||
|
|
||||||
configure = sh.Command(join(self.build_dir, "configure"))
|
configure = sh.Command(join(self.build_dir, "configure"))
|
||||||
shprint(configure,
|
shprint(configure,
|
||||||
|
|
Binary file not shown.
210
toolchain.py
210
toolchain.py
|
@ -37,6 +37,86 @@ class ChromeDownloader(FancyURLopener):
|
||||||
urlretrieve = ChromeDownloader().retrieve
|
urlretrieve = ChromeDownloader().retrieve
|
||||||
|
|
||||||
|
|
||||||
|
class Arch(object):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ArchSimulator(Arch):
|
||||||
|
sdk = "iphonesimulator"
|
||||||
|
arch = "i386"
|
||||||
|
triple = "i386-apple-darwin11"
|
||||||
|
version_min = "-miphoneos-version-min=5.1.1"
|
||||||
|
sysroot = sh.xcrun("--sdk", "iphonesimulator", "--show-sdk-path").strip()
|
||||||
|
|
||||||
|
|
||||||
|
class Arch64Simulator(Arch):
|
||||||
|
sdk = "iphonesimulator"
|
||||||
|
arch = "x86_64"
|
||||||
|
triple = "x86_64-apple-darwin13"
|
||||||
|
version_min = "-miphoneos-version-min=7.0"
|
||||||
|
sysroot = sh.xcrun("--sdk", "iphonesimulator", "--show-sdk-path").strip()
|
||||||
|
|
||||||
|
|
||||||
|
class ArchIOS(Arch):
|
||||||
|
sdk = "iphoneos"
|
||||||
|
arch = "armv7"
|
||||||
|
triple = "arm-apple-darwin11"
|
||||||
|
version_min = "-miphoneos-version-min=5.1.1"
|
||||||
|
sysroot = sh.xcrun("--sdk", "iphoneos", "--show-sdk-path").strip()
|
||||||
|
|
||||||
|
|
||||||
|
class Arch64IOS(Arch):
|
||||||
|
sdk = "iphoneos"
|
||||||
|
arch = "arm64"
|
||||||
|
triple = "aarch64-apple-darwin13"
|
||||||
|
version_min = "-miphoneos-version-min=7.0"
|
||||||
|
sysroot = sh.xcrun("--sdk", "iphoneos", "--show-sdk-path").strip()
|
||||||
|
|
||||||
|
|
||||||
|
class Graph(object):
|
||||||
|
# Taken from python-for-android/depsort
|
||||||
|
def __init__(self):
|
||||||
|
# `graph`: dict that maps each package to a set of its dependencies.
|
||||||
|
self.graph = {}
|
||||||
|
|
||||||
|
def add(self, dependent, dependency):
|
||||||
|
"""Add a dependency relationship to the graph"""
|
||||||
|
self.graph.setdefault(dependent, set())
|
||||||
|
self.graph.setdefault(dependency, set())
|
||||||
|
if dependent != dependency:
|
||||||
|
self.graph[dependent].add(dependency)
|
||||||
|
|
||||||
|
def add_optional(self, dependent, dependency):
|
||||||
|
"""Add an optional (ordering only) dependency relationship to the graph
|
||||||
|
|
||||||
|
Only call this after all mandatory requirements are added
|
||||||
|
"""
|
||||||
|
if dependent in self.graph and dependency in self.graph:
|
||||||
|
self.add(dependent, dependency)
|
||||||
|
|
||||||
|
def find_order(self):
|
||||||
|
"""Do a topological sort on a dependency graph
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
:Returns:
|
||||||
|
iterator, sorted items form first to last
|
||||||
|
"""
|
||||||
|
graph = dict((k, set(v)) for k, v in self.graph.items())
|
||||||
|
while graph:
|
||||||
|
# Find all items without a parent
|
||||||
|
leftmost = [l for l, s in graph.items() if not s]
|
||||||
|
if not leftmost:
|
||||||
|
raise ValueError('Dependency cycle detected! %s' % graph)
|
||||||
|
# If there is more than one, sort them for predictable order
|
||||||
|
leftmost.sort()
|
||||||
|
for result in leftmost:
|
||||||
|
# Yield and remove them from the graph
|
||||||
|
yield result
|
||||||
|
graph.pop(result)
|
||||||
|
for bset in graph.values():
|
||||||
|
bset.discard(result)
|
||||||
|
|
||||||
|
|
||||||
class Context(object):
|
class Context(object):
|
||||||
env = environ.copy()
|
env = environ.copy()
|
||||||
root_dir = None
|
root_dir = None
|
||||||
|
@ -87,7 +167,7 @@ class Context(object):
|
||||||
self.cache_dir = "{}/.cache".format(self.root_dir)
|
self.cache_dir = "{}/.cache".format(self.root_dir)
|
||||||
self.dist_dir = "{}/dist".format(self.root_dir)
|
self.dist_dir = "{}/dist".format(self.root_dir)
|
||||||
self.install_dir = "{}/dist/root".format(self.root_dir)
|
self.install_dir = "{}/dist/root".format(self.root_dir)
|
||||||
self.archs = ("i386", "armv7", "armv7s", "arm64")
|
self.archs = (ArchSimulator, Arch64Simulator, ArchIOS, Arch64IOS)
|
||||||
|
|
||||||
# path to some tools
|
# path to some tools
|
||||||
self.ccache = sh.which("ccache")
|
self.ccache = sh.which("ccache")
|
||||||
|
@ -128,6 +208,7 @@ class Context(object):
|
||||||
class Recipe(object):
|
class Recipe(object):
|
||||||
version = None
|
version = None
|
||||||
url = None
|
url = None
|
||||||
|
archs = []
|
||||||
depends = []
|
depends = []
|
||||||
|
|
||||||
# API available for recipes
|
# API available for recipes
|
||||||
|
@ -238,15 +319,21 @@ class Recipe(object):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def archive_fn(self):
|
def archive_fn(self):
|
||||||
if hasattr(self, "ext"):
|
bfn = basename(self.url.format(version=self.version))
|
||||||
ext = self.ext
|
fn = "{}/{}-{}".format(
|
||||||
else:
|
|
||||||
ext = basename(self.url).split(".", 1)[-1]
|
|
||||||
fn = "{}/{}.{}".format(
|
|
||||||
self.ctx.cache_dir,
|
self.ctx.cache_dir,
|
||||||
self.name, ext)
|
self.name, bfn)
|
||||||
return fn
|
return fn
|
||||||
|
|
||||||
|
@property
|
||||||
|
def filtered_archs(self):
|
||||||
|
for arch in self.ctx.archs:
|
||||||
|
if not self.archs or (arch.arch in self.archs):
|
||||||
|
yield arch
|
||||||
|
|
||||||
|
def get_build_dir(self, arch):
|
||||||
|
return join(self.ctx.build_dir, arch, self.archive_root)
|
||||||
|
|
||||||
# Public Recipe API to be subclassed if needed
|
# Public Recipe API to be subclassed if needed
|
||||||
|
|
||||||
def init_with_ctx(self, ctx):
|
def init_with_ctx(self, ctx):
|
||||||
|
@ -268,70 +355,118 @@ class Recipe(object):
|
||||||
|
|
||||||
def extract(self):
|
def extract(self):
|
||||||
# recipe tmp directory
|
# recipe tmp directory
|
||||||
archive_root = self.get_archive_rootdir(self.archive_fn)
|
for arch in self.filtered_archs:
|
||||||
for arch in self.ctx.archs:
|
print("Extract {} for {}".format(self.name, arch.arch))
|
||||||
print("Extract {} for {}".format(self.name, arch))
|
self.extract_arch(arch.arch)
|
||||||
self.extract_arch(arch, archive_root)
|
|
||||||
|
|
||||||
def extract_arch(self, arch, archive_root):
|
def extract_arch(self, arch):
|
||||||
build_dir = join(self.ctx.build_dir, arch)
|
build_dir = join(self.ctx.build_dir, arch)
|
||||||
if exists(join(build_dir, archive_root)):
|
if exists(join(build_dir, self.archive_root)):
|
||||||
return
|
return
|
||||||
ensure_dir(build_dir)
|
ensure_dir(build_dir)
|
||||||
self.extract_file(self.archive_fn, build_dir)
|
self.extract_file(self.archive_fn, build_dir)
|
||||||
|
|
||||||
def build_all(self):
|
def build_all(self):
|
||||||
archive_root = self.get_archive_rootdir(self.archive_fn)
|
filtered_archs = list(self.filtered_archs)
|
||||||
for arch in self.ctx.archs:
|
print("Build {} for {} (filtered)".format(
|
||||||
self.build_dir = join(self.ctx.build_dir, arch, archive_root)
|
self.name,
|
||||||
|
", ".join([x.arch for x in filtered_archs])))
|
||||||
|
for arch in self.filtered_archs:
|
||||||
|
self.build_dir = join(self.ctx.build_dir, arch.arch, self.archive_root)
|
||||||
if self.has_marker("building"):
|
if self.has_marker("building"):
|
||||||
print("Warning: {} build for {} has been incomplete".format(
|
print("Warning: {} build for {} has been incomplete".format(
|
||||||
self.name, arch))
|
self.name, arch.arch))
|
||||||
print("Warning: deleting the build and restarting.")
|
print("Warning: deleting the build and restarting.")
|
||||||
shutil.rmtree(self.build_dir)
|
shutil.rmtree(self.build_dir)
|
||||||
self.extract_arch(arch, archive_root)
|
self.extract_arch(arch.arch)
|
||||||
|
|
||||||
|
if self.has_marker("build_done"):
|
||||||
|
print("Build already done.")
|
||||||
|
continue
|
||||||
|
|
||||||
self.set_marker("building")
|
self.set_marker("building")
|
||||||
|
|
||||||
chdir(self.build_dir)
|
chdir(self.build_dir)
|
||||||
print("Prebuild {} for {}".format(self.name, arch))
|
print("Prebuild {} for {}".format(self.name, arch.arch))
|
||||||
self.prebuild_arch(arch)
|
self.prebuild_arch(arch)
|
||||||
print("Build {} for {}".format(self.name, arch))
|
print("Build {} for {}".format(self.name, arch.arch))
|
||||||
self.build_arch(arch)
|
self.build_arch(arch)
|
||||||
print("Postbuild {} for {}".format(self.name, arch))
|
print("Postbuild {} for {}".format(self.name, arch.arch))
|
||||||
self.postbuild_arch(arch)
|
self.postbuild_arch(arch)
|
||||||
self.delete_marker("building")
|
self.delete_marker("building")
|
||||||
|
self.set_marker("build_done")
|
||||||
|
|
||||||
|
name = self.name
|
||||||
|
if not name.startswith("lib"):
|
||||||
|
name = "lib{}".format(name)
|
||||||
|
static_fn = join(self.ctx.dist_dir, "lib", "{}.a".format(name))
|
||||||
|
ensure_dir(dirname(static_fn))
|
||||||
|
print("Assemble {} to {}".format(self.name, static_fn))
|
||||||
|
self.assemble_to(static_fn)
|
||||||
|
|
||||||
def prebuild_arch(self, arch):
|
def prebuild_arch(self, arch):
|
||||||
prebuild = "prebuild_{}".format(arch)
|
prebuild = "prebuild_{}".format(arch.arch)
|
||||||
if hasattr(self, prebuild):
|
if hasattr(self, prebuild):
|
||||||
getattr(self, prebuild)()
|
getattr(self, prebuild)()
|
||||||
|
|
||||||
def build_arch(self, arch):
|
def build_arch(self, arch):
|
||||||
build = "build_{}".format(arch)
|
build = "build_{}".format(arch.arch)
|
||||||
if hasattr(self, build):
|
if hasattr(self, build):
|
||||||
getattr(self, build)()
|
getattr(self, build)()
|
||||||
|
|
||||||
def postbuild_arch(self, arch):
|
def postbuild_arch(self, arch):
|
||||||
postbuild = "postbuild_{}".format(arch)
|
postbuild = "postbuild_{}".format(arch.arch)
|
||||||
if hasattr(self, postbuild):
|
if hasattr(self, postbuild):
|
||||||
getattr(self, postbuild)()
|
getattr(self, postbuild)()
|
||||||
|
|
||||||
|
def assemble_to(self, filename):
|
||||||
|
return
|
||||||
|
|
||||||
def list_recipes():
|
@classmethod
|
||||||
recipes_dir = join(dirname(__file__), "recipes")
|
def list_recipes(cls):
|
||||||
for name in listdir(recipes_dir):
|
recipes_dir = join(dirname(__file__), "recipes")
|
||||||
fn = join(recipes_dir, name)
|
for name in listdir(recipes_dir):
|
||||||
if isdir(fn):
|
fn = join(recipes_dir, name)
|
||||||
yield name
|
if isdir(fn):
|
||||||
|
yield name
|
||||||
|
|
||||||
def build_recipe(name, ctx):
|
@classmethod
|
||||||
mod = importlib.import_module("recipes.{}".format(name))
|
def get_recipe(cls, name):
|
||||||
recipe = mod.recipe
|
if not hasattr(cls, "recipes"):
|
||||||
recipe.recipe_dir = join(ctx.root_dir, "recipes", name)
|
cls.recipes = {}
|
||||||
recipe.init_with_ctx(ctx)
|
if name in cls.recipes:
|
||||||
recipe.execute()
|
return cls.recipes[name]
|
||||||
|
mod = importlib.import_module("recipes.{}".format(name))
|
||||||
|
recipe = mod.recipe
|
||||||
|
recipe.recipe_dir = join(ctx.root_dir, "recipes", name)
|
||||||
|
return recipe
|
||||||
|
|
||||||
|
|
||||||
|
def build_recipes(names, ctx):
|
||||||
|
# gather all the dependencies
|
||||||
|
print("Want to build {}".format(names))
|
||||||
|
graph = Graph()
|
||||||
|
recipe_to_load = names
|
||||||
|
recipe_loaded = []
|
||||||
|
while names:
|
||||||
|
name = recipe_to_load.pop(0)
|
||||||
|
if name in recipe_loaded:
|
||||||
|
continue
|
||||||
|
print("Load recipe {}".format(name))
|
||||||
|
recipe = Recipe.get_recipe(name)
|
||||||
|
print("Recipe {} depends of {}".format(name, recipe.depends))
|
||||||
|
for depend in recipe.depends:
|
||||||
|
graph.add(name, depend)
|
||||||
|
recipe_to_load += recipe.depends
|
||||||
|
recipe_loaded.append(name)
|
||||||
|
|
||||||
|
build_order = list(graph.find_order())
|
||||||
|
print("Build order is {}".format(build_order))
|
||||||
|
for name in build_order:
|
||||||
|
recipe = Recipe.get_recipe(name)
|
||||||
|
recipe.init_with_ctx(ctx)
|
||||||
|
recipe.execute()
|
||||||
|
|
||||||
def ensure_dir(filename):
|
def ensure_dir(filename):
|
||||||
if not exists(filename):
|
if not exists(filename):
|
||||||
makedirs(filename)
|
makedirs(filename)
|
||||||
|
@ -343,5 +478,4 @@ if __name__ == "__main__":
|
||||||
description='Compile Python and others extensions for iOS')
|
description='Compile Python and others extensions for iOS')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
ctx = Context()
|
ctx = Context()
|
||||||
print list(list_recipes())
|
build_recipes(["python"], ctx)
|
||||||
build_recipe("libffi", ctx)
|
|
||||||
|
|
Loading…
Reference in a new issue