Compare commits
4 commits
master
...
notificati
Author | SHA1 | Date | |
---|---|---|---|
|
25b2f29371 | ||
|
0a568d3c8c | ||
|
8655bf279b | ||
|
c74994eb14 |
25 changed files with 9967 additions and 706 deletions
|
@ -11,10 +11,27 @@ class HostSetuptools(Recipe):
|
||||||
archs = ["x86_64"]
|
archs = ["x86_64"]
|
||||||
url = "setuptools"
|
url = "setuptools"
|
||||||
|
|
||||||
|
def get_stt_env(self, arch):
|
||||||
|
build_env = arch.get_env()
|
||||||
|
build_env["IOSROOT"] = self.ctx.root_dir
|
||||||
|
build_env["IOSSDKROOT"] = arch.sysroot
|
||||||
|
build_env["LDSHARED"] = join(self.ctx.root_dir, "tools", "liblink")
|
||||||
|
build_env["ARM_LD"] = build_env["LD"]
|
||||||
|
build_env["ARCH"] = arch.arch
|
||||||
|
build_env["C_INCLUDE_PATH"] = join(arch.sysroot, "usr", "include")
|
||||||
|
build_env["LIBRARY_PATH"] = join(arch.sysroot, "usr", "lib")
|
||||||
|
build_env["CFLAGS"] = " ".join([
|
||||||
|
"-I{}".format(join(self.ctx.dist_dir, "include", arch.arch, "freetype")) +
|
||||||
|
" -I{}".format(join(self.ctx.dist_dir, "include", arch.arch, "libjpeg")) +
|
||||||
|
" -arch {}".format(arch.arch)
|
||||||
|
])
|
||||||
|
return build_env
|
||||||
|
|
||||||
|
|
||||||
def prebuild_arch(self, arch):
|
def prebuild_arch(self, arch):
|
||||||
hostpython = sh.Command(self.ctx.hostpython)
|
hostpython = sh.Command(self.ctx.hostpython)
|
||||||
sh.curl("-O", "https://bootstrap.pypa.io/ez_setup.py")
|
sh.curl("-O", "https://bootstrap.pypa.io/ez_setup.py")
|
||||||
shprint(hostpython, "./ez_setup.py")
|
shprint(hostpython, "./ez_setup.py", _env=self.get_stt_env(arch))
|
||||||
# Extract setuptools egg and remove .pth files. Otherwise subsequent
|
# Extract setuptools egg and remove .pth files. Otherwise subsequent
|
||||||
# python package installations using setuptools will raise exceptions.
|
# python package installations using setuptools will raise exceptions.
|
||||||
# Setuptools version 28.3.0
|
# Setuptools version 28.3.0
|
||||||
|
@ -26,9 +43,9 @@ class HostSetuptools(Recipe):
|
||||||
setuptools_egg_path = f.read().strip('./').strip('\n')
|
setuptools_egg_path = f.read().strip('./').strip('\n')
|
||||||
unzip = sh.Command('unzip')
|
unzip = sh.Command('unzip')
|
||||||
shprint(unzip, setuptools_egg_path)
|
shprint(unzip, setuptools_egg_path)
|
||||||
os.remove(setuptools_egg_path)
|
#os.remove(setuptools_egg_path)
|
||||||
os.remove('setuptools.pth')
|
#os.remove('setuptools.pth')
|
||||||
os.remove('easy-install.pth')
|
#os.remove('easy-install.pth')
|
||||||
shutil.rmtree('EGG-INFO')
|
#shutil.rmtree('EGG-INFO')
|
||||||
|
|
||||||
recipe = HostSetuptools()
|
recipe = HostSetuptools()
|
||||||
|
|
|
@ -4,7 +4,8 @@ import sh
|
||||||
|
|
||||||
class LibffiRecipe(Recipe):
|
class LibffiRecipe(Recipe):
|
||||||
version = "3.2.1"
|
version = "3.2.1"
|
||||||
url = "ftp://sourceware.org/pub/libffi/libffi-{version}.tar.gz"
|
#url = "ftp://sourceware.org/pub/libffi/libffi-{version}.tar.gz"
|
||||||
|
url = "https://www.mirrorservice.org/sites/sourceware.org/pub/libffi/libffi-{version}.tar.gz"
|
||||||
library = "build/Release-{arch.sdk}/libffi.a"
|
library = "build/Release-{arch.sdk}/libffi.a"
|
||||||
include_per_arch = True
|
include_per_arch = True
|
||||||
include_dir = "build_{arch.sdk}-{arch.arch}/include"
|
include_dir = "build_{arch.sdk}-{arch.arch}/include"
|
||||||
|
|
17
recipes/notifications/__init__.py
Executable file
17
recipes/notifications/__init__.py
Executable file
|
@ -0,0 +1,17 @@
|
||||||
|
from toolchain import CythonRecipe
|
||||||
|
|
||||||
|
|
||||||
|
class IosNotifRecipe(CythonRecipe):
|
||||||
|
version = "master"
|
||||||
|
url = "src"
|
||||||
|
library = "libnotifications.a"
|
||||||
|
pbx_frameworks = ["UserNotifications"]
|
||||||
|
depends = ["python"]
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
self.install_python_package(name="notifications.so", is_dir=False)
|
||||||
|
|
||||||
|
|
||||||
|
recipe = IosNotifRecipe()
|
||||||
|
|
||||||
|
|
6
recipes/notifications/src/ios_notif.h
Normal file
6
recipes/notifications/src/ios_notif.h
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
#ifndef __IOS_NOTIF
|
||||||
|
#define __IOS_NOTIF
|
||||||
|
|
||||||
|
void notif(char *title, char *body);
|
||||||
|
|
||||||
|
#endif
|
51
recipes/notifications/src/ios_notif.m
Normal file
51
recipes/notifications/src/ios_notif.m
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
#import <UserNotifications/UserNotifications.h>
|
||||||
|
#include "ios_notif.h"
|
||||||
|
|
||||||
|
void notif(char *title, char *body){
|
||||||
|
UNUserNotificationCenter *center = [UNUserNotificationCenter currentNotificationCenter];
|
||||||
|
|
||||||
|
UNAuthorizationOptions options = UNAuthorizationOptionAlert + UNAuthorizationOptionSound;
|
||||||
|
[center requestAuthorizationWithOptions:options
|
||||||
|
completionHandler:^(BOOL granted, NSError * _Nullable error) {
|
||||||
|
if (!granted) {
|
||||||
|
NSLog(@"Something went wrong");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
NSLog(@"Already access granted");
|
||||||
|
}];
|
||||||
|
|
||||||
|
UNMutableNotificationContent *content = [UNMutableNotificationContent new];
|
||||||
|
NSString *nstitle = [NSString stringWithCString:(char *)title encoding:NSUTF8StringEncoding];
|
||||||
|
NSString *nsbody = [NSString stringWithCString:(char *)body encoding:NSUTF8StringEncoding];
|
||||||
|
NSString *nsid = @"LocalNotification";
|
||||||
|
content.title = nstitle;
|
||||||
|
content.body = nsbody;
|
||||||
|
content.sound = [UNNotificationSound defaultSound];
|
||||||
|
|
||||||
|
UNNotificationAction *action = [UNNotificationAction
|
||||||
|
actionWithIdentifier:@"LAUNCH_ACTION"
|
||||||
|
title:@"Launch App"
|
||||||
|
options:UNNotificationActionOptionForeground];
|
||||||
|
UNNotificationCategory *category = [UNNotificationCategory categoryWithIdentifier:@"CAT_LAUNCH_ACTION"
|
||||||
|
actions:@[action] intentIdentifiers:@[]
|
||||||
|
options:UNNotificationCategoryOptionNone];
|
||||||
|
NSSet *categories = [NSSet setWithObject:category];
|
||||||
|
[center setNotificationCategories:categories];
|
||||||
|
content.categoryIdentifier = @"CAT_LAUNCH_ACTION";
|
||||||
|
|
||||||
|
UNTimeIntervalNotificationTrigger *trigger = [UNTimeIntervalNotificationTrigger triggerWithTimeInterval:5
|
||||||
|
repeats:false];
|
||||||
|
NSLog(@"Done adding trigger");
|
||||||
|
UNNotificationRequest *request = [UNNotificationRequest requestWithIdentifier:nsid
|
||||||
|
content:content trigger:trigger];
|
||||||
|
|
||||||
|
NSLog(@"Done initing notif request");
|
||||||
|
[center addNotificationRequest:request withCompletionHandler:^(NSError * _Nullable error) {
|
||||||
|
if (error != nil) {
|
||||||
|
NSLog(@"Something went wrong while adding to center");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
NSLog(@"Done adding notif request");
|
||||||
|
}];
|
||||||
|
|
||||||
|
}
|
30
recipes/notifications/src/notifications.pyx
Normal file
30
recipes/notifications/src/notifications.pyx
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
'''
|
||||||
|
Notifications module
|
||||||
|
====================
|
||||||
|
|
||||||
|
Wrapper for local notifications in iOS
|
||||||
|
'''
|
||||||
|
|
||||||
|
cdef extern from "ios_notif.h":
|
||||||
|
void notif(char *title, char *body)
|
||||||
|
|
||||||
|
class IOSNotif(object):
|
||||||
|
def show(self, title, body):
|
||||||
|
show_notif(title, body)
|
||||||
|
|
||||||
|
def show_notif(title, body):
|
||||||
|
'''
|
||||||
|
Show local notifications
|
||||||
|
:Parameters:
|
||||||
|
`title`: str
|
||||||
|
Title string
|
||||||
|
`body`: str
|
||||||
|
Body of the notification
|
||||||
|
|
||||||
|
Example for showing a local notification::
|
||||||
|
import notifications
|
||||||
|
title = "Title"
|
||||||
|
body = "Body"
|
||||||
|
notifications.IOSNotif().show(title, body)
|
||||||
|
'''
|
||||||
|
notif(title, body)
|
13
recipes/notifications/src/setup.py
Normal file
13
recipes/notifications/src/setup.py
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
from distutils.core import setup, Extension
|
||||||
|
import os
|
||||||
|
|
||||||
|
setup(name='notifications',
|
||||||
|
version='1.0',
|
||||||
|
ext_modules=[
|
||||||
|
Extension(
|
||||||
|
'notifications', ['notifications.c', 'ios_notif.m'],
|
||||||
|
libraries=[],
|
||||||
|
library_dirs=[],
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
|
@ -6,9 +6,9 @@ import fnmatch
|
||||||
|
|
||||||
|
|
||||||
class PillowRecipe(Recipe):
|
class PillowRecipe(Recipe):
|
||||||
version = "2.8.2"
|
version = "3.4.2"
|
||||||
url = "https://pypi.python.org/packages/source/P/Pillow/Pillow-{version}.tar.gz"
|
#url = "https://pypi.python.org/packages/source/P/Pillow/Pillow-{version}.tar.gz"
|
||||||
#url = "https://github.com/python-pillow/Pillow/archive/{version}.tar.gz"
|
url = "https://github.com/python-pillow/Pillow/archive/{version}.tar.gz"
|
||||||
library = "libpil.a"
|
library = "libpil.a"
|
||||||
depends = ["hostpython", "host_setuptools", "pkgresources", "freetype", "libjpeg", "python", "ios"]
|
depends = ["hostpython", "host_setuptools", "pkgresources", "freetype", "libjpeg", "python", "ios"]
|
||||||
pbx_libraries = ["libz", "libbz2"]
|
pbx_libraries = ["libz", "libbz2"]
|
||||||
|
@ -49,7 +49,9 @@ class PillowRecipe(Recipe):
|
||||||
build_env = self.get_pil_env(arch)
|
build_env = self.get_pil_env(arch)
|
||||||
dest_dir = join(self.ctx.dist_dir, "root", "python")
|
dest_dir = join(self.ctx.dist_dir, "root", "python")
|
||||||
build_env['PYTHONPATH'] = join(dest_dir, 'lib', 'python2.7', 'site-packages')
|
build_env['PYTHONPATH'] = join(dest_dir, 'lib', 'python2.7', 'site-packages')
|
||||||
shprint(hostpython, "-m", "easy_install",
|
es = self.ctx.dist_dir + '/hostpython/bin/easy_install'
|
||||||
|
easy_install = sh.Command(es)
|
||||||
|
shprint(easy_install,
|
||||||
"--prefix", dest_dir, "-Z", "./",
|
"--prefix", dest_dir, "-Z", "./",
|
||||||
_env=build_env)
|
_env=build_env)
|
||||||
|
|
||||||
|
|
|
@ -1,60 +1,94 @@
|
||||||
--- Pillow-2.7.0/setup.py 2014-12-31 20:42:56.000000000 +0530
|
--- Pillow-3.4.2/setup.py 2016-12-19 19:10:30.000000000 +0530
|
||||||
+++ Pillow-2.7.0/setup.py 2015-02-26 19:38:59.000000000 +0530
|
+++ Pillow-2.4.2/setup.py 2016-12-19 23:07:15.000000000 +0530
|
||||||
@@ -20,7 +20,6 @@
|
@@ -6,7 +6,9 @@
|
||||||
|
# Final rating: 10/10
|
||||||
|
# Your cheese is so fresh most people think it's a cream: Mascarpone
|
||||||
|
# ------------------------------
|
||||||
|
+
|
||||||
|
from __future__ import print_function
|
||||||
|
+BUILD_IOS = True
|
||||||
|
import glob
|
||||||
|
import os
|
||||||
|
import platform as plat
|
||||||
|
@@ -21,7 +23,8 @@
|
||||||
|
|
||||||
# monkey patch import hook. Even though flake8 says it's not used, it is.
|
# monkey patch import hook. Even though flake8 says it's not used, it is.
|
||||||
# comment this out to disable multi threaded builds.
|
# comment this out to disable multi threaded builds.
|
||||||
-import mp_compile
|
-import mp_compile
|
||||||
|
+if not BUILD_IOS:
|
||||||
|
+ import mp_compile
|
||||||
|
|
||||||
_IMAGING = (
|
_IMAGING = ("decode", "encode", "map", "display", "outline", "path")
|
||||||
"decode", "encode", "map", "display", "outline", "path")
|
|
||||||
@@ -83,7 +82,7 @@
|
@@ -103,11 +106,13 @@
|
||||||
return open(file, 'rb').read()
|
return open(file, 'rb').read()
|
||||||
|
|
||||||
try:
|
|
||||||
|
-try:
|
||||||
- import _tkinter
|
- import _tkinter
|
||||||
+ _tkinter = None
|
-except (ImportError, OSError):
|
||||||
except (ImportError, OSError):
|
- # pypy emits an oserror
|
||||||
# pypy emits an oserror
|
- _tkinter = None
|
||||||
_tkinter = None
|
+_tkinter = None
|
||||||
@@ -186,7 +185,7 @@
|
+if not BUILD_IOS:
|
||||||
for d in os.environ[k].split(os.path.pathsep):
|
+ try:
|
||||||
|
+ import _tkinter
|
||||||
|
+ except (ImportError, OSError):
|
||||||
|
+ # pypy emits an oserror
|
||||||
|
+ pass
|
||||||
|
|
||||||
|
NAME = 'Pillow'
|
||||||
|
PILLOW_VERSION = '3.4.2'
|
||||||
|
@@ -216,7 +221,7 @@
|
||||||
_add_directory(library_dirs, d)
|
_add_directory(library_dirs, d)
|
||||||
|
|
||||||
- prefix = sysconfig.get_config_var("prefix")
|
prefix = sysconfig.get_config_var("prefix")
|
||||||
+ prefix = False
|
- if prefix:
|
||||||
if prefix:
|
+ if not BUILD_IOS and prefix:
|
||||||
_add_directory(library_dirs, os.path.join(prefix, "lib"))
|
_add_directory(library_dirs, os.path.join(prefix, "lib"))
|
||||||
_add_directory(include_dirs, os.path.join(prefix, "include"))
|
_add_directory(include_dirs, os.path.join(prefix, "include"))
|
||||||
@@ -199,7 +198,9 @@
|
|
||||||
_add_directory(library_dirs, os.path.join(
|
@@ -232,7 +237,7 @@
|
||||||
"/usr/lib", "python%s" % sys.version[:3], "config"))
|
os.path.join("/usr/lib", "python%s" %
|
||||||
|
sys.version[:3], "config"))
|
||||||
|
|
||||||
- elif sys.platform == "darwin":
|
- elif sys.platform == "darwin":
|
||||||
+ elif True:
|
+ elif not BUILD_IOS and sys.platform == "darwin":
|
||||||
+ pass
|
|
||||||
+ if False:
|
|
||||||
# attempt to make sure we pick freetype2 over other versions
|
# attempt to make sure we pick freetype2 over other versions
|
||||||
_add_directory(include_dirs, "/sw/include/freetype2")
|
_add_directory(include_dirs, "/sw/include/freetype2")
|
||||||
_add_directory(include_dirs, "/sw/lib/freetype2/include")
|
_add_directory(include_dirs, "/sw/lib/freetype2/include")
|
||||||
@@ -346,11 +347,7 @@
|
@@ -346,7 +351,7 @@
|
||||||
_add_directory(include_dirs, tcl_dir)
|
# FIXME: check /opt/stuff directories here?
|
||||||
|
|
||||||
# standard locations
|
# standard locations
|
||||||
- _add_directory(library_dirs, "/usr/local/lib")
|
- if not self.disable_platform_guessing:
|
||||||
- _add_directory(include_dirs, "/usr/local/include")
|
+ if not BUILD_IOS and not self.disable_platform_guessing:
|
||||||
|
_add_directory(library_dirs, "/usr/local/lib")
|
||||||
|
_add_directory(include_dirs, "/usr/local/include")
|
||||||
|
|
||||||
- _add_directory(library_dirs, "/usr/lib")
|
@@ -389,7 +394,9 @@
|
||||||
- _add_directory(include_dirs, "/usr/include")
|
|
||||||
|
|
||||||
# on Windows, look for the OpenJPEG libraries in the location that
|
if feature.want('zlib'):
|
||||||
# the official installer puts them
|
_dbg('Looking for zlib')
|
||||||
@@ -575,7 +572,7 @@
|
- if _find_include_file(self, "zlib.h"):
|
||||||
exts.append(Extension(
|
+ if BUILD_IOS:
|
||||||
"PIL._webp", ["_webp.c"], libraries=libs, define_macros=defs))
|
+ feature.zlib = 'z'
|
||||||
|
+ elif _find_include_file(self, "zlib.h"):
|
||||||
|
if _find_library_file(self, "z"):
|
||||||
|
feature.zlib = "z"
|
||||||
|
elif (sys.platform == "win32" and
|
||||||
|
@@ -592,10 +599,10 @@
|
||||||
|
define_macros=defs))
|
||||||
|
|
||||||
- if sys.platform == "darwin":
|
tk_libs = ['psapi'] if sys.platform == 'win32' else []
|
||||||
+ if False:
|
- exts.append(Extension("PIL._imagingtk",
|
||||||
# locate Tcl/Tk frameworks
|
- ["_imagingtk.c", "Tk/tkImaging.c"],
|
||||||
frameworks = []
|
- include_dirs=['Tk'],
|
||||||
framework_roots = [
|
- libraries=tk_libs))
|
||||||
|
+# exts.append(Extension("PIL._imagingtk",
|
||||||
|
+# ["_imagingtk.c", "Tk/tkImaging.c"],
|
||||||
|
+# include_dirs=['Tk'],
|
||||||
|
+# libraries=tk_libs))
|
||||||
|
|
||||||
|
exts.append(Extension("PIL._imagingmath", ["_imagingmath.c"]))
|
||||||
|
exts.append(Extension("PIL._imagingmorph", ["_imagingmorph.c"]))
|
||||||
|
|
|
@ -7,10 +7,15 @@ import os
|
||||||
class pkg_resources(Recipe):
|
class pkg_resources(Recipe):
|
||||||
depends = ["hostpython", "python"]
|
depends = ["hostpython", "python"]
|
||||||
archs = ['i386']
|
archs = ['i386']
|
||||||
url = ""
|
url = "pkgr"
|
||||||
|
|
||||||
def prebuild_arch(self, arch):
|
def prebuild_arch(self, arch):
|
||||||
sh.cp("pkg_resources.py", join(self.ctx.dist_dir, "root", "python", "lib", "python2.7", "site-packages", "pkg_resources.py"))
|
pkgdir = join(self.ctx.dist_dir, "root", "python", "lib", "python2.7", "site-packages", "pkg_resources")
|
||||||
|
if not os.path.exists(pkgdir):
|
||||||
|
sh.mkdir(pkgdir)
|
||||||
|
sh.cp("-a", "./", pkgdir)
|
||||||
|
#sh.pip('install', '-t', join(self.ctx.dist_dir, "root", "python", "lib", "python2.7", "site-packages"))
|
||||||
|
|
||||||
|
|
||||||
recipe = pkg_resources()
|
recipe = pkg_resources()
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load diff
0
recipes/pkgresources/pkgr/_vendor/__init__.py
Normal file
0
recipes/pkgresources/pkgr/_vendor/__init__.py
Normal file
552
recipes/pkgresources/pkgr/_vendor/appdirs.py
Normal file
552
recipes/pkgresources/pkgr/_vendor/appdirs.py
Normal file
|
@ -0,0 +1,552 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright (c) 2005-2010 ActiveState Software Inc.
|
||||||
|
# Copyright (c) 2013 Eddy Petrișor
|
||||||
|
|
||||||
|
"""Utilities for determining application-specific dirs.
|
||||||
|
|
||||||
|
See <http://github.com/ActiveState/appdirs> for details and usage.
|
||||||
|
"""
|
||||||
|
# Dev Notes:
|
||||||
|
# - MSDN on where to store app data files:
|
||||||
|
# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120
|
||||||
|
# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
|
||||||
|
# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
|
||||||
|
|
||||||
|
__version_info__ = (1, 4, 0)
|
||||||
|
__version__ = '.'.join(map(str, __version_info__))
|
||||||
|
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
PY3 = sys.version_info[0] == 3
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
unicode = str
|
||||||
|
|
||||||
|
if sys.platform.startswith('java'):
|
||||||
|
import platform
|
||||||
|
os_name = platform.java_ver()[3][0]
|
||||||
|
if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.
|
||||||
|
system = 'win32'
|
||||||
|
elif os_name.startswith('Mac'): # "Mac OS X", etc.
|
||||||
|
system = 'darwin'
|
||||||
|
else: # "Linux", "SunOS", "FreeBSD", etc.
|
||||||
|
# Setting this to "linux2" is not ideal, but only Windows or Mac
|
||||||
|
# are actually checked for and the rest of the module expects
|
||||||
|
# *sys.platform* style strings.
|
||||||
|
system = 'linux2'
|
||||||
|
else:
|
||||||
|
system = sys.platform
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
|
||||||
|
r"""Return full path to the user-specific data dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"roaming" (boolean, default False) can be set True to use the Windows
|
||||||
|
roaming appdata directory. That means that for users on a Windows
|
||||||
|
network setup for roaming profiles, this user data will be
|
||||||
|
sync'd on login. See
|
||||||
|
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||||
|
for a discussion of issues.
|
||||||
|
|
||||||
|
Typical user data directories are:
|
||||||
|
Mac OS X: ~/Library/Application Support/<AppName>
|
||||||
|
Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined
|
||||||
|
Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>
|
||||||
|
Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>
|
||||||
|
Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>
|
||||||
|
Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>
|
||||||
|
|
||||||
|
For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
|
||||||
|
That means, by default "~/.local/share/<AppName>".
|
||||||
|
"""
|
||||||
|
if system == "win32":
|
||||||
|
if appauthor is None:
|
||||||
|
appauthor = appname
|
||||||
|
const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
|
||||||
|
path = os.path.normpath(_get_win_folder(const))
|
||||||
|
if appname:
|
||||||
|
if appauthor is not False:
|
||||||
|
path = os.path.join(path, appauthor, appname)
|
||||||
|
else:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
elif system == 'darwin':
|
||||||
|
path = os.path.expanduser('~/Library/Application Support/')
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
else:
|
||||||
|
path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share"))
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
|
||||||
|
"""Return full path to the user-shared data dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"multipath" is an optional parameter only applicable to *nix
|
||||||
|
which indicates that the entire list of data dirs should be
|
||||||
|
returned. By default, the first item from XDG_DATA_DIRS is
|
||||||
|
returned, or '/usr/local/share/<AppName>',
|
||||||
|
if XDG_DATA_DIRS is not set
|
||||||
|
|
||||||
|
Typical user data directories are:
|
||||||
|
Mac OS X: /Library/Application Support/<AppName>
|
||||||
|
Unix: /usr/local/share/<AppName> or /usr/share/<AppName>
|
||||||
|
Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
|
||||||
|
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
|
||||||
|
Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7.
|
||||||
|
|
||||||
|
For Unix, this is using the $XDG_DATA_DIRS[0] default.
|
||||||
|
|
||||||
|
WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
|
||||||
|
"""
|
||||||
|
if system == "win32":
|
||||||
|
if appauthor is None:
|
||||||
|
appauthor = appname
|
||||||
|
path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
|
||||||
|
if appname:
|
||||||
|
if appauthor is not False:
|
||||||
|
path = os.path.join(path, appauthor, appname)
|
||||||
|
else:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
elif system == 'darwin':
|
||||||
|
path = os.path.expanduser('/Library/Application Support')
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
else:
|
||||||
|
# XDG default for $XDG_DATA_DIRS
|
||||||
|
# only first, if multipath is False
|
||||||
|
path = os.getenv('XDG_DATA_DIRS',
|
||||||
|
os.pathsep.join(['/usr/local/share', '/usr/share']))
|
||||||
|
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
|
||||||
|
if appname:
|
||||||
|
if version:
|
||||||
|
appname = os.path.join(appname, version)
|
||||||
|
pathlist = [os.sep.join([x, appname]) for x in pathlist]
|
||||||
|
|
||||||
|
if multipath:
|
||||||
|
path = os.pathsep.join(pathlist)
|
||||||
|
else:
|
||||||
|
path = pathlist[0]
|
||||||
|
return path
|
||||||
|
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
|
||||||
|
r"""Return full path to the user-specific config dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"roaming" (boolean, default False) can be set True to use the Windows
|
||||||
|
roaming appdata directory. That means that for users on a Windows
|
||||||
|
network setup for roaming profiles, this user data will be
|
||||||
|
sync'd on login. See
|
||||||
|
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||||
|
for a discussion of issues.
|
||||||
|
|
||||||
|
Typical user data directories are:
|
||||||
|
Mac OS X: same as user_data_dir
|
||||||
|
Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined
|
||||||
|
Win *: same as user_data_dir
|
||||||
|
|
||||||
|
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
|
||||||
|
That means, by deafult "~/.config/<AppName>".
|
||||||
|
"""
|
||||||
|
if system in ["win32", "darwin"]:
|
||||||
|
path = user_data_dir(appname, appauthor, None, roaming)
|
||||||
|
else:
|
||||||
|
path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def site_config_dir(appname=None, appauthor=None, version=None, multipath=False):
|
||||||
|
"""Return full path to the user-shared data dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"multipath" is an optional parameter only applicable to *nix
|
||||||
|
which indicates that the entire list of config dirs should be
|
||||||
|
returned. By default, the first item from XDG_CONFIG_DIRS is
|
||||||
|
returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set
|
||||||
|
|
||||||
|
Typical user data directories are:
|
||||||
|
Mac OS X: same as site_data_dir
|
||||||
|
Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
|
||||||
|
$XDG_CONFIG_DIRS
|
||||||
|
Win *: same as site_data_dir
|
||||||
|
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
|
||||||
|
|
||||||
|
For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False
|
||||||
|
|
||||||
|
WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
|
||||||
|
"""
|
||||||
|
if system in ["win32", "darwin"]:
|
||||||
|
path = site_data_dir(appname, appauthor)
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
else:
|
||||||
|
# XDG default for $XDG_CONFIG_DIRS
|
||||||
|
# only first, if multipath is False
|
||||||
|
path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
|
||||||
|
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
|
||||||
|
if appname:
|
||||||
|
if version:
|
||||||
|
appname = os.path.join(appname, version)
|
||||||
|
pathlist = [os.sep.join([x, appname]) for x in pathlist]
|
||||||
|
|
||||||
|
if multipath:
|
||||||
|
path = os.pathsep.join(pathlist)
|
||||||
|
else:
|
||||||
|
path = pathlist[0]
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):
|
||||||
|
r"""Return full path to the user-specific cache dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"opinion" (boolean) can be False to disable the appending of
|
||||||
|
"Cache" to the base app data dir for Windows. See
|
||||||
|
discussion below.
|
||||||
|
|
||||||
|
Typical user cache directories are:
|
||||||
|
Mac OS X: ~/Library/Caches/<AppName>
|
||||||
|
Unix: ~/.cache/<AppName> (XDG default)
|
||||||
|
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache
|
||||||
|
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache
|
||||||
|
|
||||||
|
On Windows the only suggestion in the MSDN docs is that local settings go in
|
||||||
|
the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming
|
||||||
|
app data dir (the default returned by `user_data_dir` above). Apps typically
|
||||||
|
put cache data somewhere *under* the given dir here. Some examples:
|
||||||
|
...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
|
||||||
|
...\Acme\SuperApp\Cache\1.0
|
||||||
|
OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
|
||||||
|
This can be disabled with the `opinion=False` option.
|
||||||
|
"""
|
||||||
|
if system == "win32":
|
||||||
|
if appauthor is None:
|
||||||
|
appauthor = appname
|
||||||
|
path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
|
||||||
|
if appname:
|
||||||
|
if appauthor is not False:
|
||||||
|
path = os.path.join(path, appauthor, appname)
|
||||||
|
else:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
if opinion:
|
||||||
|
path = os.path.join(path, "Cache")
|
||||||
|
elif system == 'darwin':
|
||||||
|
path = os.path.expanduser('~/Library/Caches')
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
else:
|
||||||
|
path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
|
||||||
|
r"""Return full path to the user-specific log dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"opinion" (boolean) can be False to disable the appending of
|
||||||
|
"Logs" to the base app data dir for Windows, and "log" to the
|
||||||
|
base cache dir for Unix. See discussion below.
|
||||||
|
|
||||||
|
Typical user cache directories are:
|
||||||
|
Mac OS X: ~/Library/Logs/<AppName>
|
||||||
|
Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined
|
||||||
|
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs
|
||||||
|
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs
|
||||||
|
|
||||||
|
On Windows the only suggestion in the MSDN docs is that local settings
|
||||||
|
go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
|
||||||
|
examples of what some windows apps use for a logs dir.)
|
||||||
|
|
||||||
|
OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
|
||||||
|
value for Windows and appends "log" to the user cache dir for Unix.
|
||||||
|
This can be disabled with the `opinion=False` option.
|
||||||
|
"""
|
||||||
|
if system == "darwin":
|
||||||
|
path = os.path.join(
|
||||||
|
os.path.expanduser('~/Library/Logs'),
|
||||||
|
appname)
|
||||||
|
elif system == "win32":
|
||||||
|
path = user_data_dir(appname, appauthor, version)
|
||||||
|
version = False
|
||||||
|
if opinion:
|
||||||
|
path = os.path.join(path, "Logs")
|
||||||
|
else:
|
||||||
|
path = user_cache_dir(appname, appauthor, version)
|
||||||
|
version = False
|
||||||
|
if opinion:
|
||||||
|
path = os.path.join(path, "log")
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
class AppDirs(object):
|
||||||
|
"""Convenience wrapper for getting application dirs."""
|
||||||
|
def __init__(self, appname, appauthor=None, version=None, roaming=False,
|
||||||
|
multipath=False):
|
||||||
|
self.appname = appname
|
||||||
|
self.appauthor = appauthor
|
||||||
|
self.version = version
|
||||||
|
self.roaming = roaming
|
||||||
|
self.multipath = multipath
|
||||||
|
|
||||||
|
@property
|
||||||
|
def user_data_dir(self):
|
||||||
|
return user_data_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version, roaming=self.roaming)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def site_data_dir(self):
|
||||||
|
return site_data_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version, multipath=self.multipath)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def user_config_dir(self):
|
||||||
|
return user_config_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version, roaming=self.roaming)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def site_config_dir(self):
|
||||||
|
return site_config_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version, multipath=self.multipath)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def user_cache_dir(self):
|
||||||
|
return user_cache_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def user_log_dir(self):
|
||||||
|
return user_log_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version)
|
||||||
|
|
||||||
|
|
||||||
|
#---- internal support stuff
|
||||||
|
|
||||||
|
def _get_win_folder_from_registry(csidl_name):
|
||||||
|
"""This is a fallback technique at best. I'm not sure if using the
|
||||||
|
registry for this guarantees us the correct answer for all CSIDL_*
|
||||||
|
names.
|
||||||
|
"""
|
||||||
|
import _winreg
|
||||||
|
|
||||||
|
shell_folder_name = {
|
||||||
|
"CSIDL_APPDATA": "AppData",
|
||||||
|
"CSIDL_COMMON_APPDATA": "Common AppData",
|
||||||
|
"CSIDL_LOCAL_APPDATA": "Local AppData",
|
||||||
|
}[csidl_name]
|
||||||
|
|
||||||
|
key = _winreg.OpenKey(
|
||||||
|
_winreg.HKEY_CURRENT_USER,
|
||||||
|
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
|
||||||
|
)
|
||||||
|
dir, type = _winreg.QueryValueEx(key, shell_folder_name)
|
||||||
|
return dir
|
||||||
|
|
||||||
|
|
||||||
|
def _get_win_folder_with_pywin32(csidl_name):
|
||||||
|
from win32com.shell import shellcon, shell
|
||||||
|
dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)
|
||||||
|
# Try to make this a unicode path because SHGetFolderPath does
|
||||||
|
# not return unicode strings when there is unicode data in the
|
||||||
|
# path.
|
||||||
|
try:
|
||||||
|
dir = unicode(dir)
|
||||||
|
|
||||||
|
# Downgrade to short path name if have highbit chars. See
|
||||||
|
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
||||||
|
has_high_char = False
|
||||||
|
for c in dir:
|
||||||
|
if ord(c) > 255:
|
||||||
|
has_high_char = True
|
||||||
|
break
|
||||||
|
if has_high_char:
|
||||||
|
try:
|
||||||
|
import win32api
|
||||||
|
dir = win32api.GetShortPathName(dir)
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
except UnicodeError:
|
||||||
|
pass
|
||||||
|
return dir
|
||||||
|
|
||||||
|
|
||||||
|
def _get_win_folder_with_ctypes(csidl_name):
|
||||||
|
import ctypes
|
||||||
|
|
||||||
|
csidl_const = {
|
||||||
|
"CSIDL_APPDATA": 26,
|
||||||
|
"CSIDL_COMMON_APPDATA": 35,
|
||||||
|
"CSIDL_LOCAL_APPDATA": 28,
|
||||||
|
}[csidl_name]
|
||||||
|
|
||||||
|
buf = ctypes.create_unicode_buffer(1024)
|
||||||
|
ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
|
||||||
|
|
||||||
|
# Downgrade to short path name if have highbit chars. See
|
||||||
|
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
||||||
|
has_high_char = False
|
||||||
|
for c in buf:
|
||||||
|
if ord(c) > 255:
|
||||||
|
has_high_char = True
|
||||||
|
break
|
||||||
|
if has_high_char:
|
||||||
|
buf2 = ctypes.create_unicode_buffer(1024)
|
||||||
|
if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
|
||||||
|
buf = buf2
|
||||||
|
|
||||||
|
return buf.value
|
||||||
|
|
||||||
|
def _get_win_folder_with_jna(csidl_name):
|
||||||
|
import array
|
||||||
|
from com.sun import jna
|
||||||
|
from com.sun.jna.platform import win32
|
||||||
|
|
||||||
|
buf_size = win32.WinDef.MAX_PATH * 2
|
||||||
|
buf = array.zeros('c', buf_size)
|
||||||
|
shell = win32.Shell32.INSTANCE
|
||||||
|
shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf)
|
||||||
|
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
|
||||||
|
|
||||||
|
# Downgrade to short path name if have highbit chars. See
|
||||||
|
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
||||||
|
has_high_char = False
|
||||||
|
for c in dir:
|
||||||
|
if ord(c) > 255:
|
||||||
|
has_high_char = True
|
||||||
|
break
|
||||||
|
if has_high_char:
|
||||||
|
buf = array.zeros('c', buf_size)
|
||||||
|
kernel = win32.Kernel32.INSTANCE
|
||||||
|
if kernal.GetShortPathName(dir, buf, buf_size):
|
||||||
|
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
|
||||||
|
|
||||||
|
return dir
|
||||||
|
|
||||||
|
if system == "win32":
|
||||||
|
try:
|
||||||
|
import win32com.shell
|
||||||
|
_get_win_folder = _get_win_folder_with_pywin32
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
from ctypes import windll
|
||||||
|
_get_win_folder = _get_win_folder_with_ctypes
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
import com.sun.jna
|
||||||
|
_get_win_folder = _get_win_folder_with_jna
|
||||||
|
except ImportError:
|
||||||
|
_get_win_folder = _get_win_folder_from_registry
|
||||||
|
|
||||||
|
|
||||||
|
#---- self test code
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
appname = "MyApp"
|
||||||
|
appauthor = "MyCompany"
|
||||||
|
|
||||||
|
props = ("user_data_dir", "site_data_dir",
|
||||||
|
"user_config_dir", "site_config_dir",
|
||||||
|
"user_cache_dir", "user_log_dir")
|
||||||
|
|
||||||
|
print("-- app dirs (with optional 'version')")
|
||||||
|
dirs = AppDirs(appname, appauthor, version="1.0")
|
||||||
|
for prop in props:
|
||||||
|
print("%s: %s" % (prop, getattr(dirs, prop)))
|
||||||
|
|
||||||
|
print("\n-- app dirs (without optional 'version')")
|
||||||
|
dirs = AppDirs(appname, appauthor)
|
||||||
|
for prop in props:
|
||||||
|
print("%s: %s" % (prop, getattr(dirs, prop)))
|
||||||
|
|
||||||
|
print("\n-- app dirs (without optional 'appauthor')")
|
||||||
|
dirs = AppDirs(appname)
|
||||||
|
for prop in props:
|
||||||
|
print("%s: %s" % (prop, getattr(dirs, prop)))
|
||||||
|
|
||||||
|
print("\n-- app dirs (with disabled 'appauthor')")
|
||||||
|
dirs = AppDirs(appname, appauthor=False)
|
||||||
|
for prop in props:
|
||||||
|
print("%s: %s" % (prop, getattr(dirs, prop)))
|
21
recipes/pkgresources/pkgr/_vendor/packaging/__about__.py
Normal file
21
recipes/pkgresources/pkgr/_vendor/packaging/__about__.py
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"__title__", "__summary__", "__uri__", "__version__", "__author__",
|
||||||
|
"__email__", "__license__", "__copyright__",
|
||||||
|
]
|
||||||
|
|
||||||
|
__title__ = "packaging"
|
||||||
|
__summary__ = "Core utilities for Python packages"
|
||||||
|
__uri__ = "https://github.com/pypa/packaging"
|
||||||
|
|
||||||
|
__version__ = "16.8"
|
||||||
|
|
||||||
|
__author__ = "Donald Stufft and individual contributors"
|
||||||
|
__email__ = "donald@stufft.io"
|
||||||
|
|
||||||
|
__license__ = "BSD or Apache License, Version 2.0"
|
||||||
|
__copyright__ = "Copyright 2014-2016 %s" % __author__
|
14
recipes/pkgresources/pkgr/_vendor/packaging/__init__.py
Normal file
14
recipes/pkgresources/pkgr/_vendor/packaging/__init__.py
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
from .__about__ import (
|
||||||
|
__author__, __copyright__, __email__, __license__, __summary__, __title__,
|
||||||
|
__uri__, __version__
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"__title__", "__summary__", "__uri__", "__version__", "__author__",
|
||||||
|
"__email__", "__license__", "__copyright__",
|
||||||
|
]
|
30
recipes/pkgresources/pkgr/_vendor/packaging/_compat.py
Normal file
30
recipes/pkgresources/pkgr/_vendor/packaging/_compat.py
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
PY2 = sys.version_info[0] == 2
|
||||||
|
PY3 = sys.version_info[0] == 3
|
||||||
|
|
||||||
|
# flake8: noqa
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
string_types = str,
|
||||||
|
else:
|
||||||
|
string_types = basestring,
|
||||||
|
|
||||||
|
|
||||||
|
def with_metaclass(meta, *bases):
|
||||||
|
"""
|
||||||
|
Create a base class with a metaclass.
|
||||||
|
"""
|
||||||
|
# This requires a bit of explanation: the basic idea is to make a dummy
|
||||||
|
# metaclass for one level of class instantiation that replaces itself with
|
||||||
|
# the actual metaclass.
|
||||||
|
class metaclass(meta):
|
||||||
|
def __new__(cls, name, this_bases, d):
|
||||||
|
return meta(name, bases, d)
|
||||||
|
return type.__new__(metaclass, 'temporary_class', (), {})
|
68
recipes/pkgresources/pkgr/_vendor/packaging/_structures.py
Normal file
68
recipes/pkgresources/pkgr/_vendor/packaging/_structures.py
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
|
||||||
|
class Infinity(object):
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "Infinity"
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash(repr(self))
|
||||||
|
|
||||||
|
def __lt__(self, other):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __le__(self, other):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, self.__class__)
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not isinstance(other, self.__class__)
|
||||||
|
|
||||||
|
def __gt__(self, other):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def __ge__(self, other):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def __neg__(self):
|
||||||
|
return NegativeInfinity
|
||||||
|
|
||||||
|
Infinity = Infinity()
|
||||||
|
|
||||||
|
|
||||||
|
class NegativeInfinity(object):
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "-Infinity"
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash(repr(self))
|
||||||
|
|
||||||
|
def __lt__(self, other):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def __le__(self, other):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, self.__class__)
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not isinstance(other, self.__class__)
|
||||||
|
|
||||||
|
def __gt__(self, other):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __ge__(self, other):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __neg__(self):
|
||||||
|
return Infinity
|
||||||
|
|
||||||
|
NegativeInfinity = NegativeInfinity()
|
301
recipes/pkgresources/pkgr/_vendor/packaging/markers.py
Normal file
301
recipes/pkgresources/pkgr/_vendor/packaging/markers.py
Normal file
|
@ -0,0 +1,301 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import operator
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pkg_resources.extern.pyparsing import ParseException, ParseResults, stringStart, stringEnd
|
||||||
|
from pkg_resources.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedString
|
||||||
|
from pkg_resources.extern.pyparsing import Literal as L # noqa
|
||||||
|
|
||||||
|
from ._compat import string_types
|
||||||
|
from .specifiers import Specifier, InvalidSpecifier
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName",
|
||||||
|
"Marker", "default_environment",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidMarker(ValueError):
|
||||||
|
"""
|
||||||
|
An invalid marker was found, users should refer to PEP 508.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class UndefinedComparison(ValueError):
|
||||||
|
"""
|
||||||
|
An invalid operation was attempted on a value that doesn't support it.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class UndefinedEnvironmentName(ValueError):
|
||||||
|
"""
|
||||||
|
A name was attempted to be used that does not exist inside of the
|
||||||
|
environment.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class Node(object):
|
||||||
|
|
||||||
|
def __init__(self, value):
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return str(self.value)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<{0}({1!r})>".format(self.__class__.__name__, str(self))
|
||||||
|
|
||||||
|
def serialize(self):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class Variable(Node):
|
||||||
|
|
||||||
|
def serialize(self):
|
||||||
|
return str(self)
|
||||||
|
|
||||||
|
|
||||||
|
class Value(Node):
|
||||||
|
|
||||||
|
def serialize(self):
|
||||||
|
return '"{0}"'.format(self)
|
||||||
|
|
||||||
|
|
||||||
|
class Op(Node):
|
||||||
|
|
||||||
|
def serialize(self):
|
||||||
|
return str(self)
|
||||||
|
|
||||||
|
|
||||||
|
VARIABLE = (
|
||||||
|
L("implementation_version") |
|
||||||
|
L("platform_python_implementation") |
|
||||||
|
L("implementation_name") |
|
||||||
|
L("python_full_version") |
|
||||||
|
L("platform_release") |
|
||||||
|
L("platform_version") |
|
||||||
|
L("platform_machine") |
|
||||||
|
L("platform_system") |
|
||||||
|
L("python_version") |
|
||||||
|
L("sys_platform") |
|
||||||
|
L("os_name") |
|
||||||
|
L("os.name") | # PEP-345
|
||||||
|
L("sys.platform") | # PEP-345
|
||||||
|
L("platform.version") | # PEP-345
|
||||||
|
L("platform.machine") | # PEP-345
|
||||||
|
L("platform.python_implementation") | # PEP-345
|
||||||
|
L("python_implementation") | # undocumented setuptools legacy
|
||||||
|
L("extra")
|
||||||
|
)
|
||||||
|
ALIASES = {
|
||||||
|
'os.name': 'os_name',
|
||||||
|
'sys.platform': 'sys_platform',
|
||||||
|
'platform.version': 'platform_version',
|
||||||
|
'platform.machine': 'platform_machine',
|
||||||
|
'platform.python_implementation': 'platform_python_implementation',
|
||||||
|
'python_implementation': 'platform_python_implementation'
|
||||||
|
}
|
||||||
|
VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
|
||||||
|
|
||||||
|
VERSION_CMP = (
|
||||||
|
L("===") |
|
||||||
|
L("==") |
|
||||||
|
L(">=") |
|
||||||
|
L("<=") |
|
||||||
|
L("!=") |
|
||||||
|
L("~=") |
|
||||||
|
L(">") |
|
||||||
|
L("<")
|
||||||
|
)
|
||||||
|
|
||||||
|
MARKER_OP = VERSION_CMP | L("not in") | L("in")
|
||||||
|
MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))
|
||||||
|
|
||||||
|
MARKER_VALUE = QuotedString("'") | QuotedString('"')
|
||||||
|
MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))
|
||||||
|
|
||||||
|
BOOLOP = L("and") | L("or")
|
||||||
|
|
||||||
|
MARKER_VAR = VARIABLE | MARKER_VALUE
|
||||||
|
|
||||||
|
MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
|
||||||
|
MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))
|
||||||
|
|
||||||
|
LPAREN = L("(").suppress()
|
||||||
|
RPAREN = L(")").suppress()
|
||||||
|
|
||||||
|
MARKER_EXPR = Forward()
|
||||||
|
MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
|
||||||
|
MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
|
||||||
|
|
||||||
|
MARKER = stringStart + MARKER_EXPR + stringEnd
|
||||||
|
|
||||||
|
|
||||||
|
def _coerce_parse_result(results):
|
||||||
|
if isinstance(results, ParseResults):
|
||||||
|
return [_coerce_parse_result(i) for i in results]
|
||||||
|
else:
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def _format_marker(marker, first=True):
|
||||||
|
assert isinstance(marker, (list, tuple, string_types))
|
||||||
|
|
||||||
|
# Sometimes we have a structure like [[...]] which is a single item list
|
||||||
|
# where the single item is itself it's own list. In that case we want skip
|
||||||
|
# the rest of this function so that we don't get extraneous () on the
|
||||||
|
# outside.
|
||||||
|
if (isinstance(marker, list) and len(marker) == 1 and
|
||||||
|
isinstance(marker[0], (list, tuple))):
|
||||||
|
return _format_marker(marker[0])
|
||||||
|
|
||||||
|
if isinstance(marker, list):
|
||||||
|
inner = (_format_marker(m, first=False) for m in marker)
|
||||||
|
if first:
|
||||||
|
return " ".join(inner)
|
||||||
|
else:
|
||||||
|
return "(" + " ".join(inner) + ")"
|
||||||
|
elif isinstance(marker, tuple):
|
||||||
|
return " ".join([m.serialize() for m in marker])
|
||||||
|
else:
|
||||||
|
return marker
|
||||||
|
|
||||||
|
|
||||||
|
_operators = {
|
||||||
|
"in": lambda lhs, rhs: lhs in rhs,
|
||||||
|
"not in": lambda lhs, rhs: lhs not in rhs,
|
||||||
|
"<": operator.lt,
|
||||||
|
"<=": operator.le,
|
||||||
|
"==": operator.eq,
|
||||||
|
"!=": operator.ne,
|
||||||
|
">=": operator.ge,
|
||||||
|
">": operator.gt,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _eval_op(lhs, op, rhs):
|
||||||
|
try:
|
||||||
|
spec = Specifier("".join([op.serialize(), rhs]))
|
||||||
|
except InvalidSpecifier:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
return spec.contains(lhs)
|
||||||
|
|
||||||
|
oper = _operators.get(op.serialize())
|
||||||
|
if oper is None:
|
||||||
|
raise UndefinedComparison(
|
||||||
|
"Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs)
|
||||||
|
)
|
||||||
|
|
||||||
|
return oper(lhs, rhs)
|
||||||
|
|
||||||
|
|
||||||
|
_undefined = object()
|
||||||
|
|
||||||
|
|
||||||
|
def _get_env(environment, name):
|
||||||
|
value = environment.get(name, _undefined)
|
||||||
|
|
||||||
|
if value is _undefined:
|
||||||
|
raise UndefinedEnvironmentName(
|
||||||
|
"{0!r} does not exist in evaluation environment.".format(name)
|
||||||
|
)
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def _evaluate_markers(markers, environment):
|
||||||
|
groups = [[]]
|
||||||
|
|
||||||
|
for marker in markers:
|
||||||
|
assert isinstance(marker, (list, tuple, string_types))
|
||||||
|
|
||||||
|
if isinstance(marker, list):
|
||||||
|
groups[-1].append(_evaluate_markers(marker, environment))
|
||||||
|
elif isinstance(marker, tuple):
|
||||||
|
lhs, op, rhs = marker
|
||||||
|
|
||||||
|
if isinstance(lhs, Variable):
|
||||||
|
lhs_value = _get_env(environment, lhs.value)
|
||||||
|
rhs_value = rhs.value
|
||||||
|
else:
|
||||||
|
lhs_value = lhs.value
|
||||||
|
rhs_value = _get_env(environment, rhs.value)
|
||||||
|
|
||||||
|
groups[-1].append(_eval_op(lhs_value, op, rhs_value))
|
||||||
|
else:
|
||||||
|
assert marker in ["and", "or"]
|
||||||
|
if marker == "or":
|
||||||
|
groups.append([])
|
||||||
|
|
||||||
|
return any(all(item) for item in groups)
|
||||||
|
|
||||||
|
|
||||||
|
def format_full_version(info):
|
||||||
|
version = '{0.major}.{0.minor}.{0.micro}'.format(info)
|
||||||
|
kind = info.releaselevel
|
||||||
|
if kind != 'final':
|
||||||
|
version += kind[0] + str(info.serial)
|
||||||
|
return version
|
||||||
|
|
||||||
|
|
||||||
|
def default_environment():
|
||||||
|
if hasattr(sys, 'implementation'):
|
||||||
|
iver = format_full_version(sys.implementation.version)
|
||||||
|
implementation_name = sys.implementation.name
|
||||||
|
else:
|
||||||
|
iver = '0'
|
||||||
|
implementation_name = ''
|
||||||
|
|
||||||
|
return {
|
||||||
|
"implementation_name": implementation_name,
|
||||||
|
"implementation_version": iver,
|
||||||
|
"os_name": os.name,
|
||||||
|
"platform_machine": platform.machine(),
|
||||||
|
"platform_release": platform.release(),
|
||||||
|
"platform_system": platform.system(),
|
||||||
|
"platform_version": platform.version(),
|
||||||
|
"python_full_version": platform.python_version(),
|
||||||
|
"platform_python_implementation": platform.python_implementation(),
|
||||||
|
"python_version": platform.python_version()[:3],
|
||||||
|
"sys_platform": sys.platform,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class Marker(object):
|
||||||
|
|
||||||
|
def __init__(self, marker):
|
||||||
|
try:
|
||||||
|
self._markers = _coerce_parse_result(MARKER.parseString(marker))
|
||||||
|
except ParseException as e:
|
||||||
|
err_str = "Invalid marker: {0!r}, parse error at {1!r}".format(
|
||||||
|
marker, marker[e.loc:e.loc + 8])
|
||||||
|
raise InvalidMarker(err_str)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return _format_marker(self._markers)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<Marker({0!r})>".format(str(self))
|
||||||
|
|
||||||
|
def evaluate(self, environment=None):
|
||||||
|
"""Evaluate a marker.
|
||||||
|
|
||||||
|
Return the boolean from evaluating the given marker against the
|
||||||
|
environment. environment is an optional argument to override all or
|
||||||
|
part of the determined environment.
|
||||||
|
|
||||||
|
The environment is determined from the current Python process.
|
||||||
|
"""
|
||||||
|
current_environment = default_environment()
|
||||||
|
if environment is not None:
|
||||||
|
current_environment.update(environment)
|
||||||
|
|
||||||
|
return _evaluate_markers(self._markers, current_environment)
|
127
recipes/pkgresources/pkgr/_vendor/packaging/requirements.py
Normal file
127
recipes/pkgresources/pkgr/_vendor/packaging/requirements.py
Normal file
|
@ -0,0 +1,127 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import string
|
||||||
|
import re
|
||||||
|
|
||||||
|
from pkg_resources.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException
|
||||||
|
from pkg_resources.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine
|
||||||
|
from pkg_resources.extern.pyparsing import Literal as L # noqa
|
||||||
|
from pkg_resources.extern.six.moves.urllib import parse as urlparse
|
||||||
|
|
||||||
|
from .markers import MARKER_EXPR, Marker
|
||||||
|
from .specifiers import LegacySpecifier, Specifier, SpecifierSet
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidRequirement(ValueError):
|
||||||
|
"""
|
||||||
|
An invalid requirement was found, users should refer to PEP 508.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
ALPHANUM = Word(string.ascii_letters + string.digits)
|
||||||
|
|
||||||
|
LBRACKET = L("[").suppress()
|
||||||
|
RBRACKET = L("]").suppress()
|
||||||
|
LPAREN = L("(").suppress()
|
||||||
|
RPAREN = L(")").suppress()
|
||||||
|
COMMA = L(",").suppress()
|
||||||
|
SEMICOLON = L(";").suppress()
|
||||||
|
AT = L("@").suppress()
|
||||||
|
|
||||||
|
PUNCTUATION = Word("-_.")
|
||||||
|
IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)
|
||||||
|
IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
|
||||||
|
|
||||||
|
NAME = IDENTIFIER("name")
|
||||||
|
EXTRA = IDENTIFIER
|
||||||
|
|
||||||
|
URI = Regex(r'[^ ]+')("url")
|
||||||
|
URL = (AT + URI)
|
||||||
|
|
||||||
|
EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
|
||||||
|
EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
|
||||||
|
|
||||||
|
VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
|
||||||
|
VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
|
||||||
|
|
||||||
|
VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
|
||||||
|
VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE),
|
||||||
|
joinString=",", adjacent=False)("_raw_spec")
|
||||||
|
_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY))
|
||||||
|
_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '')
|
||||||
|
|
||||||
|
VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
|
||||||
|
VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
|
||||||
|
|
||||||
|
MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
|
||||||
|
MARKER_EXPR.setParseAction(
|
||||||
|
lambda s, l, t: Marker(s[t._original_start:t._original_end])
|
||||||
|
)
|
||||||
|
MARKER_SEPERATOR = SEMICOLON
|
||||||
|
MARKER = MARKER_SEPERATOR + MARKER_EXPR
|
||||||
|
|
||||||
|
VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
|
||||||
|
URL_AND_MARKER = URL + Optional(MARKER)
|
||||||
|
|
||||||
|
NAMED_REQUIREMENT = \
|
||||||
|
NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
|
||||||
|
|
||||||
|
REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
|
||||||
|
|
||||||
|
|
||||||
|
class Requirement(object):
|
||||||
|
"""Parse a requirement.
|
||||||
|
|
||||||
|
Parse a given requirement string into its parts, such as name, specifier,
|
||||||
|
URL, and extras. Raises InvalidRequirement on a badly-formed requirement
|
||||||
|
string.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# TODO: Can we test whether something is contained within a requirement?
|
||||||
|
# If so how do we do that? Do we need to test against the _name_ of
|
||||||
|
# the thing as well as the version? What about the markers?
|
||||||
|
# TODO: Can we normalize the name and extra name?
|
||||||
|
|
||||||
|
def __init__(self, requirement_string):
|
||||||
|
try:
|
||||||
|
req = REQUIREMENT.parseString(requirement_string)
|
||||||
|
except ParseException as e:
|
||||||
|
raise InvalidRequirement(
|
||||||
|
"Invalid requirement, parse error at \"{0!r}\"".format(
|
||||||
|
requirement_string[e.loc:e.loc + 8]))
|
||||||
|
|
||||||
|
self.name = req.name
|
||||||
|
if req.url:
|
||||||
|
parsed_url = urlparse.urlparse(req.url)
|
||||||
|
if not (parsed_url.scheme and parsed_url.netloc) or (
|
||||||
|
not parsed_url.scheme and not parsed_url.netloc):
|
||||||
|
raise InvalidRequirement("Invalid URL given")
|
||||||
|
self.url = req.url
|
||||||
|
else:
|
||||||
|
self.url = None
|
||||||
|
self.extras = set(req.extras.asList() if req.extras else [])
|
||||||
|
self.specifier = SpecifierSet(req.specifier)
|
||||||
|
self.marker = req.marker if req.marker else None
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
parts = [self.name]
|
||||||
|
|
||||||
|
if self.extras:
|
||||||
|
parts.append("[{0}]".format(",".join(sorted(self.extras))))
|
||||||
|
|
||||||
|
if self.specifier:
|
||||||
|
parts.append(str(self.specifier))
|
||||||
|
|
||||||
|
if self.url:
|
||||||
|
parts.append("@ {0}".format(self.url))
|
||||||
|
|
||||||
|
if self.marker:
|
||||||
|
parts.append("; {0}".format(self.marker))
|
||||||
|
|
||||||
|
return "".join(parts)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<Requirement({0!r})>".format(str(self))
|
774
recipes/pkgresources/pkgr/_vendor/packaging/specifiers.py
Normal file
774
recipes/pkgresources/pkgr/_vendor/packaging/specifiers.py
Normal file
|
@ -0,0 +1,774 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import functools
|
||||||
|
import itertools
|
||||||
|
import re
|
||||||
|
|
||||||
|
from ._compat import string_types, with_metaclass
|
||||||
|
from .version import Version, LegacyVersion, parse
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidSpecifier(ValueError):
|
||||||
|
"""
|
||||||
|
An invalid specifier was found, users should refer to PEP 440.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def __str__(self):
|
||||||
|
"""
|
||||||
|
Returns the str representation of this Specifier like object. This
|
||||||
|
should be representative of the Specifier itself.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def __hash__(self):
|
||||||
|
"""
|
||||||
|
Returns a hash value for this Specifier like object.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def __eq__(self, other):
|
||||||
|
"""
|
||||||
|
Returns a boolean representing whether or not the two Specifier like
|
||||||
|
objects are equal.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def __ne__(self, other):
|
||||||
|
"""
|
||||||
|
Returns a boolean representing whether or not the two Specifier like
|
||||||
|
objects are not equal.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractproperty
|
||||||
|
def prereleases(self):
|
||||||
|
"""
|
||||||
|
Returns whether or not pre-releases as a whole are allowed by this
|
||||||
|
specifier.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@prereleases.setter
|
||||||
|
def prereleases(self, value):
|
||||||
|
"""
|
||||||
|
Sets whether or not pre-releases as a whole are allowed by this
|
||||||
|
specifier.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def contains(self, item, prereleases=None):
|
||||||
|
"""
|
||||||
|
Determines if the given item is contained within this specifier.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def filter(self, iterable, prereleases=None):
|
||||||
|
"""
|
||||||
|
Takes an iterable of items and filters them so that only items which
|
||||||
|
are contained within this specifier are allowed in it.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class _IndividualSpecifier(BaseSpecifier):
|
||||||
|
|
||||||
|
_operators = {}
|
||||||
|
|
||||||
|
def __init__(self, spec="", prereleases=None):
|
||||||
|
match = self._regex.search(spec)
|
||||||
|
if not match:
|
||||||
|
raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))
|
||||||
|
|
||||||
|
self._spec = (
|
||||||
|
match.group("operator").strip(),
|
||||||
|
match.group("version").strip(),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Store whether or not this Specifier should accept prereleases
|
||||||
|
self._prereleases = prereleases
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
pre = (
|
||||||
|
", prereleases={0!r}".format(self.prereleases)
|
||||||
|
if self._prereleases is not None
|
||||||
|
else ""
|
||||||
|
)
|
||||||
|
|
||||||
|
return "<{0}({1!r}{2})>".format(
|
||||||
|
self.__class__.__name__,
|
||||||
|
str(self),
|
||||||
|
pre,
|
||||||
|
)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "{0}{1}".format(*self._spec)
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash(self._spec)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, string_types):
|
||||||
|
try:
|
||||||
|
other = self.__class__(other)
|
||||||
|
except InvalidSpecifier:
|
||||||
|
return NotImplemented
|
||||||
|
elif not isinstance(other, self.__class__):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
return self._spec == other._spec
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
if isinstance(other, string_types):
|
||||||
|
try:
|
||||||
|
other = self.__class__(other)
|
||||||
|
except InvalidSpecifier:
|
||||||
|
return NotImplemented
|
||||||
|
elif not isinstance(other, self.__class__):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
return self._spec != other._spec
|
||||||
|
|
||||||
|
def _get_operator(self, op):
|
||||||
|
return getattr(self, "_compare_{0}".format(self._operators[op]))
|
||||||
|
|
||||||
|
def _coerce_version(self, version):
|
||||||
|
if not isinstance(version, (LegacyVersion, Version)):
|
||||||
|
version = parse(version)
|
||||||
|
return version
|
||||||
|
|
||||||
|
@property
|
||||||
|
def operator(self):
|
||||||
|
return self._spec[0]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def version(self):
|
||||||
|
return self._spec[1]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def prereleases(self):
|
||||||
|
return self._prereleases
|
||||||
|
|
||||||
|
@prereleases.setter
|
||||||
|
def prereleases(self, value):
|
||||||
|
self._prereleases = value
|
||||||
|
|
||||||
|
def __contains__(self, item):
|
||||||
|
return self.contains(item)
|
||||||
|
|
||||||
|
def contains(self, item, prereleases=None):
|
||||||
|
# Determine if prereleases are to be allowed or not.
|
||||||
|
if prereleases is None:
|
||||||
|
prereleases = self.prereleases
|
||||||
|
|
||||||
|
# Normalize item to a Version or LegacyVersion, this allows us to have
|
||||||
|
# a shortcut for ``"2.0" in Specifier(">=2")
|
||||||
|
item = self._coerce_version(item)
|
||||||
|
|
||||||
|
# Determine if we should be supporting prereleases in this specifier
|
||||||
|
# or not, if we do not support prereleases than we can short circuit
|
||||||
|
# logic if this version is a prereleases.
|
||||||
|
if item.is_prerelease and not prereleases:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Actually do the comparison to determine if this item is contained
|
||||||
|
# within this Specifier or not.
|
||||||
|
return self._get_operator(self.operator)(item, self.version)
|
||||||
|
|
||||||
|
def filter(self, iterable, prereleases=None):
|
||||||
|
yielded = False
|
||||||
|
found_prereleases = []
|
||||||
|
|
||||||
|
kw = {"prereleases": prereleases if prereleases is not None else True}
|
||||||
|
|
||||||
|
# Attempt to iterate over all the values in the iterable and if any of
|
||||||
|
# them match, yield them.
|
||||||
|
for version in iterable:
|
||||||
|
parsed_version = self._coerce_version(version)
|
||||||
|
|
||||||
|
if self.contains(parsed_version, **kw):
|
||||||
|
# If our version is a prerelease, and we were not set to allow
|
||||||
|
# prereleases, then we'll store it for later incase nothing
|
||||||
|
# else matches this specifier.
|
||||||
|
if (parsed_version.is_prerelease and not
|
||||||
|
(prereleases or self.prereleases)):
|
||||||
|
found_prereleases.append(version)
|
||||||
|
# Either this is not a prerelease, or we should have been
|
||||||
|
# accepting prereleases from the begining.
|
||||||
|
else:
|
||||||
|
yielded = True
|
||||||
|
yield version
|
||||||
|
|
||||||
|
# Now that we've iterated over everything, determine if we've yielded
|
||||||
|
# any values, and if we have not and we have any prereleases stored up
|
||||||
|
# then we will go ahead and yield the prereleases.
|
||||||
|
if not yielded and found_prereleases:
|
||||||
|
for version in found_prereleases:
|
||||||
|
yield version
|
||||||
|
|
||||||
|
|
||||||
|
class LegacySpecifier(_IndividualSpecifier):
|
||||||
|
|
||||||
|
_regex_str = (
|
||||||
|
r"""
|
||||||
|
(?P<operator>(==|!=|<=|>=|<|>))
|
||||||
|
\s*
|
||||||
|
(?P<version>
|
||||||
|
[^,;\s)]* # Since this is a "legacy" specifier, and the version
|
||||||
|
# string can be just about anything, we match everything
|
||||||
|
# except for whitespace, a semi-colon for marker support,
|
||||||
|
# a closing paren since versions can be enclosed in
|
||||||
|
# them, and a comma since it's a version separator.
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
_regex = re.compile(
|
||||||
|
r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
||||||
|
|
||||||
|
_operators = {
|
||||||
|
"==": "equal",
|
||||||
|
"!=": "not_equal",
|
||||||
|
"<=": "less_than_equal",
|
||||||
|
">=": "greater_than_equal",
|
||||||
|
"<": "less_than",
|
||||||
|
">": "greater_than",
|
||||||
|
}
|
||||||
|
|
||||||
|
def _coerce_version(self, version):
|
||||||
|
if not isinstance(version, LegacyVersion):
|
||||||
|
version = LegacyVersion(str(version))
|
||||||
|
return version
|
||||||
|
|
||||||
|
def _compare_equal(self, prospective, spec):
|
||||||
|
return prospective == self._coerce_version(spec)
|
||||||
|
|
||||||
|
def _compare_not_equal(self, prospective, spec):
|
||||||
|
return prospective != self._coerce_version(spec)
|
||||||
|
|
||||||
|
def _compare_less_than_equal(self, prospective, spec):
|
||||||
|
return prospective <= self._coerce_version(spec)
|
||||||
|
|
||||||
|
def _compare_greater_than_equal(self, prospective, spec):
|
||||||
|
return prospective >= self._coerce_version(spec)
|
||||||
|
|
||||||
|
def _compare_less_than(self, prospective, spec):
|
||||||
|
return prospective < self._coerce_version(spec)
|
||||||
|
|
||||||
|
def _compare_greater_than(self, prospective, spec):
|
||||||
|
return prospective > self._coerce_version(spec)
|
||||||
|
|
||||||
|
|
||||||
|
def _require_version_compare(fn):
|
||||||
|
@functools.wraps(fn)
|
||||||
|
def wrapped(self, prospective, spec):
|
||||||
|
if not isinstance(prospective, Version):
|
||||||
|
return False
|
||||||
|
return fn(self, prospective, spec)
|
||||||
|
return wrapped
|
||||||
|
|
||||||
|
|
||||||
|
class Specifier(_IndividualSpecifier):
|
||||||
|
|
||||||
|
_regex_str = (
|
||||||
|
r"""
|
||||||
|
(?P<operator>(~=|==|!=|<=|>=|<|>|===))
|
||||||
|
(?P<version>
|
||||||
|
(?:
|
||||||
|
# The identity operators allow for an escape hatch that will
|
||||||
|
# do an exact string match of the version you wish to install.
|
||||||
|
# This will not be parsed by PEP 440 and we cannot determine
|
||||||
|
# any semantic meaning from it. This operator is discouraged
|
||||||
|
# but included entirely as an escape hatch.
|
||||||
|
(?<====) # Only match for the identity operator
|
||||||
|
\s*
|
||||||
|
[^\s]* # We just match everything, except for whitespace
|
||||||
|
# since we are only testing for strict identity.
|
||||||
|
)
|
||||||
|
|
|
||||||
|
(?:
|
||||||
|
# The (non)equality operators allow for wild card and local
|
||||||
|
# versions to be specified so we have to define these two
|
||||||
|
# operators separately to enable that.
|
||||||
|
(?<===|!=) # Only match for equals and not equals
|
||||||
|
|
||||||
|
\s*
|
||||||
|
v?
|
||||||
|
(?:[0-9]+!)? # epoch
|
||||||
|
[0-9]+(?:\.[0-9]+)* # release
|
||||||
|
(?: # pre release
|
||||||
|
[-_\.]?
|
||||||
|
(a|b|c|rc|alpha|beta|pre|preview)
|
||||||
|
[-_\.]?
|
||||||
|
[0-9]*
|
||||||
|
)?
|
||||||
|
(?: # post release
|
||||||
|
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
||||||
|
)?
|
||||||
|
|
||||||
|
# You cannot use a wild card and a dev or local version
|
||||||
|
# together so group them with a | and make them optional.
|
||||||
|
(?:
|
||||||
|
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
||||||
|
(?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
|
||||||
|
|
|
||||||
|
\.\* # Wild card syntax of .*
|
||||||
|
)?
|
||||||
|
)
|
||||||
|
|
|
||||||
|
(?:
|
||||||
|
# The compatible operator requires at least two digits in the
|
||||||
|
# release segment.
|
||||||
|
(?<=~=) # Only match for the compatible operator
|
||||||
|
|
||||||
|
\s*
|
||||||
|
v?
|
||||||
|
(?:[0-9]+!)? # epoch
|
||||||
|
[0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
|
||||||
|
(?: # pre release
|
||||||
|
[-_\.]?
|
||||||
|
(a|b|c|rc|alpha|beta|pre|preview)
|
||||||
|
[-_\.]?
|
||||||
|
[0-9]*
|
||||||
|
)?
|
||||||
|
(?: # post release
|
||||||
|
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
||||||
|
)?
|
||||||
|
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
||||||
|
)
|
||||||
|
|
|
||||||
|
(?:
|
||||||
|
# All other operators only allow a sub set of what the
|
||||||
|
# (non)equality operators do. Specifically they do not allow
|
||||||
|
# local versions to be specified nor do they allow the prefix
|
||||||
|
# matching wild cards.
|
||||||
|
(?<!==|!=|~=) # We have special cases for these
|
||||||
|
# operators so we want to make sure they
|
||||||
|
# don't match here.
|
||||||
|
|
||||||
|
\s*
|
||||||
|
v?
|
||||||
|
(?:[0-9]+!)? # epoch
|
||||||
|
[0-9]+(?:\.[0-9]+)* # release
|
||||||
|
(?: # pre release
|
||||||
|
[-_\.]?
|
||||||
|
(a|b|c|rc|alpha|beta|pre|preview)
|
||||||
|
[-_\.]?
|
||||||
|
[0-9]*
|
||||||
|
)?
|
||||||
|
(?: # post release
|
||||||
|
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
||||||
|
)?
|
||||||
|
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
||||||
|
)
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
_regex = re.compile(
|
||||||
|
r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
||||||
|
|
||||||
|
_operators = {
|
||||||
|
"~=": "compatible",
|
||||||
|
"==": "equal",
|
||||||
|
"!=": "not_equal",
|
||||||
|
"<=": "less_than_equal",
|
||||||
|
">=": "greater_than_equal",
|
||||||
|
"<": "less_than",
|
||||||
|
">": "greater_than",
|
||||||
|
"===": "arbitrary",
|
||||||
|
}
|
||||||
|
|
||||||
|
@_require_version_compare
|
||||||
|
def _compare_compatible(self, prospective, spec):
|
||||||
|
# Compatible releases have an equivalent combination of >= and ==. That
|
||||||
|
# is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
|
||||||
|
# implement this in terms of the other specifiers instead of
|
||||||
|
# implementing it ourselves. The only thing we need to do is construct
|
||||||
|
# the other specifiers.
|
||||||
|
|
||||||
|
# We want everything but the last item in the version, but we want to
|
||||||
|
# ignore post and dev releases and we want to treat the pre-release as
|
||||||
|
# it's own separate segment.
|
||||||
|
prefix = ".".join(
|
||||||
|
list(
|
||||||
|
itertools.takewhile(
|
||||||
|
lambda x: (not x.startswith("post") and not
|
||||||
|
x.startswith("dev")),
|
||||||
|
_version_split(spec),
|
||||||
|
)
|
||||||
|
)[:-1]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add the prefix notation to the end of our string
|
||||||
|
prefix += ".*"
|
||||||
|
|
||||||
|
return (self._get_operator(">=")(prospective, spec) and
|
||||||
|
self._get_operator("==")(prospective, prefix))
|
||||||
|
|
||||||
|
@_require_version_compare
|
||||||
|
def _compare_equal(self, prospective, spec):
|
||||||
|
# We need special logic to handle prefix matching
|
||||||
|
if spec.endswith(".*"):
|
||||||
|
# In the case of prefix matching we want to ignore local segment.
|
||||||
|
prospective = Version(prospective.public)
|
||||||
|
# Split the spec out by dots, and pretend that there is an implicit
|
||||||
|
# dot in between a release segment and a pre-release segment.
|
||||||
|
spec = _version_split(spec[:-2]) # Remove the trailing .*
|
||||||
|
|
||||||
|
# Split the prospective version out by dots, and pretend that there
|
||||||
|
# is an implicit dot in between a release segment and a pre-release
|
||||||
|
# segment.
|
||||||
|
prospective = _version_split(str(prospective))
|
||||||
|
|
||||||
|
# Shorten the prospective version to be the same length as the spec
|
||||||
|
# so that we can determine if the specifier is a prefix of the
|
||||||
|
# prospective version or not.
|
||||||
|
prospective = prospective[:len(spec)]
|
||||||
|
|
||||||
|
# Pad out our two sides with zeros so that they both equal the same
|
||||||
|
# length.
|
||||||
|
spec, prospective = _pad_version(spec, prospective)
|
||||||
|
else:
|
||||||
|
# Convert our spec string into a Version
|
||||||
|
spec = Version(spec)
|
||||||
|
|
||||||
|
# If the specifier does not have a local segment, then we want to
|
||||||
|
# act as if the prospective version also does not have a local
|
||||||
|
# segment.
|
||||||
|
if not spec.local:
|
||||||
|
prospective = Version(prospective.public)
|
||||||
|
|
||||||
|
return prospective == spec
|
||||||
|
|
||||||
|
@_require_version_compare
|
||||||
|
def _compare_not_equal(self, prospective, spec):
|
||||||
|
return not self._compare_equal(prospective, spec)
|
||||||
|
|
||||||
|
@_require_version_compare
|
||||||
|
def _compare_less_than_equal(self, prospective, spec):
|
||||||
|
return prospective <= Version(spec)
|
||||||
|
|
||||||
|
@_require_version_compare
|
||||||
|
def _compare_greater_than_equal(self, prospective, spec):
|
||||||
|
return prospective >= Version(spec)
|
||||||
|
|
||||||
|
@_require_version_compare
|
||||||
|
def _compare_less_than(self, prospective, spec):
|
||||||
|
# Convert our spec to a Version instance, since we'll want to work with
|
||||||
|
# it as a version.
|
||||||
|
spec = Version(spec)
|
||||||
|
|
||||||
|
# Check to see if the prospective version is less than the spec
|
||||||
|
# version. If it's not we can short circuit and just return False now
|
||||||
|
# instead of doing extra unneeded work.
|
||||||
|
if not prospective < spec:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# This special case is here so that, unless the specifier itself
|
||||||
|
# includes is a pre-release version, that we do not accept pre-release
|
||||||
|
# versions for the version mentioned in the specifier (e.g. <3.1 should
|
||||||
|
# not match 3.1.dev0, but should match 3.0.dev0).
|
||||||
|
if not spec.is_prerelease and prospective.is_prerelease:
|
||||||
|
if Version(prospective.base_version) == Version(spec.base_version):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# If we've gotten to here, it means that prospective version is both
|
||||||
|
# less than the spec version *and* it's not a pre-release of the same
|
||||||
|
# version in the spec.
|
||||||
|
return True
|
||||||
|
|
||||||
|
@_require_version_compare
|
||||||
|
def _compare_greater_than(self, prospective, spec):
|
||||||
|
# Convert our spec to a Version instance, since we'll want to work with
|
||||||
|
# it as a version.
|
||||||
|
spec = Version(spec)
|
||||||
|
|
||||||
|
# Check to see if the prospective version is greater than the spec
|
||||||
|
# version. If it's not we can short circuit and just return False now
|
||||||
|
# instead of doing extra unneeded work.
|
||||||
|
if not prospective > spec:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# This special case is here so that, unless the specifier itself
|
||||||
|
# includes is a post-release version, that we do not accept
|
||||||
|
# post-release versions for the version mentioned in the specifier
|
||||||
|
# (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
|
||||||
|
if not spec.is_postrelease and prospective.is_postrelease:
|
||||||
|
if Version(prospective.base_version) == Version(spec.base_version):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Ensure that we do not allow a local version of the version mentioned
|
||||||
|
# in the specifier, which is techincally greater than, to match.
|
||||||
|
if prospective.local is not None:
|
||||||
|
if Version(prospective.base_version) == Version(spec.base_version):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# If we've gotten to here, it means that prospective version is both
|
||||||
|
# greater than the spec version *and* it's not a pre-release of the
|
||||||
|
# same version in the spec.
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _compare_arbitrary(self, prospective, spec):
|
||||||
|
return str(prospective).lower() == str(spec).lower()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def prereleases(self):
|
||||||
|
# If there is an explicit prereleases set for this, then we'll just
|
||||||
|
# blindly use that.
|
||||||
|
if self._prereleases is not None:
|
||||||
|
return self._prereleases
|
||||||
|
|
||||||
|
# Look at all of our specifiers and determine if they are inclusive
|
||||||
|
# operators, and if they are if they are including an explicit
|
||||||
|
# prerelease.
|
||||||
|
operator, version = self._spec
|
||||||
|
if operator in ["==", ">=", "<=", "~=", "==="]:
|
||||||
|
# The == specifier can include a trailing .*, if it does we
|
||||||
|
# want to remove before parsing.
|
||||||
|
if operator == "==" and version.endswith(".*"):
|
||||||
|
version = version[:-2]
|
||||||
|
|
||||||
|
# Parse the version, and if it is a pre-release than this
|
||||||
|
# specifier allows pre-releases.
|
||||||
|
if parse(version).is_prerelease:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
@prereleases.setter
|
||||||
|
def prereleases(self, value):
|
||||||
|
self._prereleases = value
|
||||||
|
|
||||||
|
|
||||||
|
_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
|
||||||
|
|
||||||
|
|
||||||
|
def _version_split(version):
|
||||||
|
result = []
|
||||||
|
for item in version.split("."):
|
||||||
|
match = _prefix_regex.search(item)
|
||||||
|
if match:
|
||||||
|
result.extend(match.groups())
|
||||||
|
else:
|
||||||
|
result.append(item)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def _pad_version(left, right):
|
||||||
|
left_split, right_split = [], []
|
||||||
|
|
||||||
|
# Get the release segment of our versions
|
||||||
|
left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
|
||||||
|
right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
|
||||||
|
|
||||||
|
# Get the rest of our versions
|
||||||
|
left_split.append(left[len(left_split[0]):])
|
||||||
|
right_split.append(right[len(right_split[0]):])
|
||||||
|
|
||||||
|
# Insert our padding
|
||||||
|
left_split.insert(
|
||||||
|
1,
|
||||||
|
["0"] * max(0, len(right_split[0]) - len(left_split[0])),
|
||||||
|
)
|
||||||
|
right_split.insert(
|
||||||
|
1,
|
||||||
|
["0"] * max(0, len(left_split[0]) - len(right_split[0])),
|
||||||
|
)
|
||||||
|
|
||||||
|
return (
|
||||||
|
list(itertools.chain(*left_split)),
|
||||||
|
list(itertools.chain(*right_split)),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class SpecifierSet(BaseSpecifier):
|
||||||
|
|
||||||
|
def __init__(self, specifiers="", prereleases=None):
|
||||||
|
# Split on , to break each indidivual specifier into it's own item, and
|
||||||
|
# strip each item to remove leading/trailing whitespace.
|
||||||
|
specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
|
||||||
|
|
||||||
|
# Parsed each individual specifier, attempting first to make it a
|
||||||
|
# Specifier and falling back to a LegacySpecifier.
|
||||||
|
parsed = set()
|
||||||
|
for specifier in specifiers:
|
||||||
|
try:
|
||||||
|
parsed.add(Specifier(specifier))
|
||||||
|
except InvalidSpecifier:
|
||||||
|
parsed.add(LegacySpecifier(specifier))
|
||||||
|
|
||||||
|
# Turn our parsed specifiers into a frozen set and save them for later.
|
||||||
|
self._specs = frozenset(parsed)
|
||||||
|
|
||||||
|
# Store our prereleases value so we can use it later to determine if
|
||||||
|
# we accept prereleases or not.
|
||||||
|
self._prereleases = prereleases
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
pre = (
|
||||||
|
", prereleases={0!r}".format(self.prereleases)
|
||||||
|
if self._prereleases is not None
|
||||||
|
else ""
|
||||||
|
)
|
||||||
|
|
||||||
|
return "<SpecifierSet({0!r}{1})>".format(str(self), pre)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return ",".join(sorted(str(s) for s in self._specs))
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash(self._specs)
|
||||||
|
|
||||||
|
def __and__(self, other):
|
||||||
|
if isinstance(other, string_types):
|
||||||
|
other = SpecifierSet(other)
|
||||||
|
elif not isinstance(other, SpecifierSet):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
specifier = SpecifierSet()
|
||||||
|
specifier._specs = frozenset(self._specs | other._specs)
|
||||||
|
|
||||||
|
if self._prereleases is None and other._prereleases is not None:
|
||||||
|
specifier._prereleases = other._prereleases
|
||||||
|
elif self._prereleases is not None and other._prereleases is None:
|
||||||
|
specifier._prereleases = self._prereleases
|
||||||
|
elif self._prereleases == other._prereleases:
|
||||||
|
specifier._prereleases = self._prereleases
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
"Cannot combine SpecifierSets with True and False prerelease "
|
||||||
|
"overrides."
|
||||||
|
)
|
||||||
|
|
||||||
|
return specifier
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, string_types):
|
||||||
|
other = SpecifierSet(other)
|
||||||
|
elif isinstance(other, _IndividualSpecifier):
|
||||||
|
other = SpecifierSet(str(other))
|
||||||
|
elif not isinstance(other, SpecifierSet):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
return self._specs == other._specs
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
if isinstance(other, string_types):
|
||||||
|
other = SpecifierSet(other)
|
||||||
|
elif isinstance(other, _IndividualSpecifier):
|
||||||
|
other = SpecifierSet(str(other))
|
||||||
|
elif not isinstance(other, SpecifierSet):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
return self._specs != other._specs
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self._specs)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(self._specs)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def prereleases(self):
|
||||||
|
# If we have been given an explicit prerelease modifier, then we'll
|
||||||
|
# pass that through here.
|
||||||
|
if self._prereleases is not None:
|
||||||
|
return self._prereleases
|
||||||
|
|
||||||
|
# If we don't have any specifiers, and we don't have a forced value,
|
||||||
|
# then we'll just return None since we don't know if this should have
|
||||||
|
# pre-releases or not.
|
||||||
|
if not self._specs:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Otherwise we'll see if any of the given specifiers accept
|
||||||
|
# prereleases, if any of them do we'll return True, otherwise False.
|
||||||
|
return any(s.prereleases for s in self._specs)
|
||||||
|
|
||||||
|
@prereleases.setter
|
||||||
|
def prereleases(self, value):
|
||||||
|
self._prereleases = value
|
||||||
|
|
||||||
|
def __contains__(self, item):
|
||||||
|
return self.contains(item)
|
||||||
|
|
||||||
|
def contains(self, item, prereleases=None):
|
||||||
|
# Ensure that our item is a Version or LegacyVersion instance.
|
||||||
|
if not isinstance(item, (LegacyVersion, Version)):
|
||||||
|
item = parse(item)
|
||||||
|
|
||||||
|
# Determine if we're forcing a prerelease or not, if we're not forcing
|
||||||
|
# one for this particular filter call, then we'll use whatever the
|
||||||
|
# SpecifierSet thinks for whether or not we should support prereleases.
|
||||||
|
if prereleases is None:
|
||||||
|
prereleases = self.prereleases
|
||||||
|
|
||||||
|
# We can determine if we're going to allow pre-releases by looking to
|
||||||
|
# see if any of the underlying items supports them. If none of them do
|
||||||
|
# and this item is a pre-release then we do not allow it and we can
|
||||||
|
# short circuit that here.
|
||||||
|
# Note: This means that 1.0.dev1 would not be contained in something
|
||||||
|
# like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
|
||||||
|
if not prereleases and item.is_prerelease:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# We simply dispatch to the underlying specs here to make sure that the
|
||||||
|
# given version is contained within all of them.
|
||||||
|
# Note: This use of all() here means that an empty set of specifiers
|
||||||
|
# will always return True, this is an explicit design decision.
|
||||||
|
return all(
|
||||||
|
s.contains(item, prereleases=prereleases)
|
||||||
|
for s in self._specs
|
||||||
|
)
|
||||||
|
|
||||||
|
def filter(self, iterable, prereleases=None):
|
||||||
|
# Determine if we're forcing a prerelease or not, if we're not forcing
|
||||||
|
# one for this particular filter call, then we'll use whatever the
|
||||||
|
# SpecifierSet thinks for whether or not we should support prereleases.
|
||||||
|
if prereleases is None:
|
||||||
|
prereleases = self.prereleases
|
||||||
|
|
||||||
|
# If we have any specifiers, then we want to wrap our iterable in the
|
||||||
|
# filter method for each one, this will act as a logical AND amongst
|
||||||
|
# each specifier.
|
||||||
|
if self._specs:
|
||||||
|
for spec in self._specs:
|
||||||
|
iterable = spec.filter(iterable, prereleases=bool(prereleases))
|
||||||
|
return iterable
|
||||||
|
# If we do not have any specifiers, then we need to have a rough filter
|
||||||
|
# which will filter out any pre-releases, unless there are no final
|
||||||
|
# releases, and which will filter out LegacyVersion in general.
|
||||||
|
else:
|
||||||
|
filtered = []
|
||||||
|
found_prereleases = []
|
||||||
|
|
||||||
|
for item in iterable:
|
||||||
|
# Ensure that we some kind of Version class for this item.
|
||||||
|
if not isinstance(item, (LegacyVersion, Version)):
|
||||||
|
parsed_version = parse(item)
|
||||||
|
else:
|
||||||
|
parsed_version = item
|
||||||
|
|
||||||
|
# Filter out any item which is parsed as a LegacyVersion
|
||||||
|
if isinstance(parsed_version, LegacyVersion):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Store any item which is a pre-release for later unless we've
|
||||||
|
# already found a final version or we are accepting prereleases
|
||||||
|
if parsed_version.is_prerelease and not prereleases:
|
||||||
|
if not filtered:
|
||||||
|
found_prereleases.append(item)
|
||||||
|
else:
|
||||||
|
filtered.append(item)
|
||||||
|
|
||||||
|
# If we've found no items except for pre-releases, then we'll go
|
||||||
|
# ahead and use the pre-releases
|
||||||
|
if not filtered and found_prereleases and prereleases is None:
|
||||||
|
return found_prereleases
|
||||||
|
|
||||||
|
return filtered
|
14
recipes/pkgresources/pkgr/_vendor/packaging/utils.py
Normal file
14
recipes/pkgresources/pkgr/_vendor/packaging/utils.py
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
_canonicalize_regex = re.compile(r"[-_.]+")
|
||||||
|
|
||||||
|
|
||||||
|
def canonicalize_name(name):
|
||||||
|
# This is taken from PEP 503.
|
||||||
|
return _canonicalize_regex.sub("-", name).lower()
|
393
recipes/pkgresources/pkgr/_vendor/packaging/version.py
Normal file
393
recipes/pkgresources/pkgr/_vendor/packaging/version.py
Normal file
|
@ -0,0 +1,393 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import collections
|
||||||
|
import itertools
|
||||||
|
import re
|
||||||
|
|
||||||
|
from ._structures import Infinity
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
_Version = collections.namedtuple(
|
||||||
|
"_Version",
|
||||||
|
["epoch", "release", "dev", "pre", "post", "local"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def parse(version):
|
||||||
|
"""
|
||||||
|
Parse the given version string and return either a :class:`Version` object
|
||||||
|
or a :class:`LegacyVersion` object depending on if the given version is
|
||||||
|
a valid PEP 440 version or a legacy version.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return Version(version)
|
||||||
|
except InvalidVersion:
|
||||||
|
return LegacyVersion(version)
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidVersion(ValueError):
|
||||||
|
"""
|
||||||
|
An invalid version was found, users should refer to PEP 440.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class _BaseVersion(object):
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash(self._key)
|
||||||
|
|
||||||
|
def __lt__(self, other):
|
||||||
|
return self._compare(other, lambda s, o: s < o)
|
||||||
|
|
||||||
|
def __le__(self, other):
|
||||||
|
return self._compare(other, lambda s, o: s <= o)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return self._compare(other, lambda s, o: s == o)
|
||||||
|
|
||||||
|
def __ge__(self, other):
|
||||||
|
return self._compare(other, lambda s, o: s >= o)
|
||||||
|
|
||||||
|
def __gt__(self, other):
|
||||||
|
return self._compare(other, lambda s, o: s > o)
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return self._compare(other, lambda s, o: s != o)
|
||||||
|
|
||||||
|
def _compare(self, other, method):
|
||||||
|
if not isinstance(other, _BaseVersion):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
return method(self._key, other._key)
|
||||||
|
|
||||||
|
|
||||||
|
class LegacyVersion(_BaseVersion):
|
||||||
|
|
||||||
|
def __init__(self, version):
|
||||||
|
self._version = str(version)
|
||||||
|
self._key = _legacy_cmpkey(self._version)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self._version
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<LegacyVersion({0})>".format(repr(str(self)))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def public(self):
|
||||||
|
return self._version
|
||||||
|
|
||||||
|
@property
|
||||||
|
def base_version(self):
|
||||||
|
return self._version
|
||||||
|
|
||||||
|
@property
|
||||||
|
def local(self):
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_prerelease(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_postrelease(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
_legacy_version_component_re = re.compile(
|
||||||
|
r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,
|
||||||
|
)
|
||||||
|
|
||||||
|
_legacy_version_replacement_map = {
|
||||||
|
"pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_version_parts(s):
|
||||||
|
for part in _legacy_version_component_re.split(s):
|
||||||
|
part = _legacy_version_replacement_map.get(part, part)
|
||||||
|
|
||||||
|
if not part or part == ".":
|
||||||
|
continue
|
||||||
|
|
||||||
|
if part[:1] in "0123456789":
|
||||||
|
# pad for numeric comparison
|
||||||
|
yield part.zfill(8)
|
||||||
|
else:
|
||||||
|
yield "*" + part
|
||||||
|
|
||||||
|
# ensure that alpha/beta/candidate are before final
|
||||||
|
yield "*final"
|
||||||
|
|
||||||
|
|
||||||
|
def _legacy_cmpkey(version):
|
||||||
|
# We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
|
||||||
|
# greater than or equal to 0. This will effectively put the LegacyVersion,
|
||||||
|
# which uses the defacto standard originally implemented by setuptools,
|
||||||
|
# as before all PEP 440 versions.
|
||||||
|
epoch = -1
|
||||||
|
|
||||||
|
# This scheme is taken from pkg_resources.parse_version setuptools prior to
|
||||||
|
# it's adoption of the packaging library.
|
||||||
|
parts = []
|
||||||
|
for part in _parse_version_parts(version.lower()):
|
||||||
|
if part.startswith("*"):
|
||||||
|
# remove "-" before a prerelease tag
|
||||||
|
if part < "*final":
|
||||||
|
while parts and parts[-1] == "*final-":
|
||||||
|
parts.pop()
|
||||||
|
|
||||||
|
# remove trailing zeros from each series of numeric parts
|
||||||
|
while parts and parts[-1] == "00000000":
|
||||||
|
parts.pop()
|
||||||
|
|
||||||
|
parts.append(part)
|
||||||
|
parts = tuple(parts)
|
||||||
|
|
||||||
|
return epoch, parts
|
||||||
|
|
||||||
|
# Deliberately not anchored to the start and end of the string, to make it
|
||||||
|
# easier for 3rd party code to reuse
|
||||||
|
VERSION_PATTERN = r"""
|
||||||
|
v?
|
||||||
|
(?:
|
||||||
|
(?:(?P<epoch>[0-9]+)!)? # epoch
|
||||||
|
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
|
||||||
|
(?P<pre> # pre-release
|
||||||
|
[-_\.]?
|
||||||
|
(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
|
||||||
|
[-_\.]?
|
||||||
|
(?P<pre_n>[0-9]+)?
|
||||||
|
)?
|
||||||
|
(?P<post> # post release
|
||||||
|
(?:-(?P<post_n1>[0-9]+))
|
||||||
|
|
|
||||||
|
(?:
|
||||||
|
[-_\.]?
|
||||||
|
(?P<post_l>post|rev|r)
|
||||||
|
[-_\.]?
|
||||||
|
(?P<post_n2>[0-9]+)?
|
||||||
|
)
|
||||||
|
)?
|
||||||
|
(?P<dev> # dev release
|
||||||
|
[-_\.]?
|
||||||
|
(?P<dev_l>dev)
|
||||||
|
[-_\.]?
|
||||||
|
(?P<dev_n>[0-9]+)?
|
||||||
|
)?
|
||||||
|
)
|
||||||
|
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class Version(_BaseVersion):
|
||||||
|
|
||||||
|
_regex = re.compile(
|
||||||
|
r"^\s*" + VERSION_PATTERN + r"\s*$",
|
||||||
|
re.VERBOSE | re.IGNORECASE,
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, version):
|
||||||
|
# Validate the version and parse it into pieces
|
||||||
|
match = self._regex.search(version)
|
||||||
|
if not match:
|
||||||
|
raise InvalidVersion("Invalid version: '{0}'".format(version))
|
||||||
|
|
||||||
|
# Store the parsed out pieces of the version
|
||||||
|
self._version = _Version(
|
||||||
|
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
|
||||||
|
release=tuple(int(i) for i in match.group("release").split(".")),
|
||||||
|
pre=_parse_letter_version(
|
||||||
|
match.group("pre_l"),
|
||||||
|
match.group("pre_n"),
|
||||||
|
),
|
||||||
|
post=_parse_letter_version(
|
||||||
|
match.group("post_l"),
|
||||||
|
match.group("post_n1") or match.group("post_n2"),
|
||||||
|
),
|
||||||
|
dev=_parse_letter_version(
|
||||||
|
match.group("dev_l"),
|
||||||
|
match.group("dev_n"),
|
||||||
|
),
|
||||||
|
local=_parse_local_version(match.group("local")),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Generate a key which will be used for sorting
|
||||||
|
self._key = _cmpkey(
|
||||||
|
self._version.epoch,
|
||||||
|
self._version.release,
|
||||||
|
self._version.pre,
|
||||||
|
self._version.post,
|
||||||
|
self._version.dev,
|
||||||
|
self._version.local,
|
||||||
|
)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<Version({0})>".format(repr(str(self)))
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
parts = []
|
||||||
|
|
||||||
|
# Epoch
|
||||||
|
if self._version.epoch != 0:
|
||||||
|
parts.append("{0}!".format(self._version.epoch))
|
||||||
|
|
||||||
|
# Release segment
|
||||||
|
parts.append(".".join(str(x) for x in self._version.release))
|
||||||
|
|
||||||
|
# Pre-release
|
||||||
|
if self._version.pre is not None:
|
||||||
|
parts.append("".join(str(x) for x in self._version.pre))
|
||||||
|
|
||||||
|
# Post-release
|
||||||
|
if self._version.post is not None:
|
||||||
|
parts.append(".post{0}".format(self._version.post[1]))
|
||||||
|
|
||||||
|
# Development release
|
||||||
|
if self._version.dev is not None:
|
||||||
|
parts.append(".dev{0}".format(self._version.dev[1]))
|
||||||
|
|
||||||
|
# Local version segment
|
||||||
|
if self._version.local is not None:
|
||||||
|
parts.append(
|
||||||
|
"+{0}".format(".".join(str(x) for x in self._version.local))
|
||||||
|
)
|
||||||
|
|
||||||
|
return "".join(parts)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def public(self):
|
||||||
|
return str(self).split("+", 1)[0]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def base_version(self):
|
||||||
|
parts = []
|
||||||
|
|
||||||
|
# Epoch
|
||||||
|
if self._version.epoch != 0:
|
||||||
|
parts.append("{0}!".format(self._version.epoch))
|
||||||
|
|
||||||
|
# Release segment
|
||||||
|
parts.append(".".join(str(x) for x in self._version.release))
|
||||||
|
|
||||||
|
return "".join(parts)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def local(self):
|
||||||
|
version_string = str(self)
|
||||||
|
if "+" in version_string:
|
||||||
|
return version_string.split("+", 1)[1]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_prerelease(self):
|
||||||
|
return bool(self._version.dev or self._version.pre)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_postrelease(self):
|
||||||
|
return bool(self._version.post)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_letter_version(letter, number):
|
||||||
|
if letter:
|
||||||
|
# We consider there to be an implicit 0 in a pre-release if there is
|
||||||
|
# not a numeral associated with it.
|
||||||
|
if number is None:
|
||||||
|
number = 0
|
||||||
|
|
||||||
|
# We normalize any letters to their lower case form
|
||||||
|
letter = letter.lower()
|
||||||
|
|
||||||
|
# We consider some words to be alternate spellings of other words and
|
||||||
|
# in those cases we want to normalize the spellings to our preferred
|
||||||
|
# spelling.
|
||||||
|
if letter == "alpha":
|
||||||
|
letter = "a"
|
||||||
|
elif letter == "beta":
|
||||||
|
letter = "b"
|
||||||
|
elif letter in ["c", "pre", "preview"]:
|
||||||
|
letter = "rc"
|
||||||
|
elif letter in ["rev", "r"]:
|
||||||
|
letter = "post"
|
||||||
|
|
||||||
|
return letter, int(number)
|
||||||
|
if not letter and number:
|
||||||
|
# We assume if we are given a number, but we are not given a letter
|
||||||
|
# then this is using the implicit post release syntax (e.g. 1.0-1)
|
||||||
|
letter = "post"
|
||||||
|
|
||||||
|
return letter, int(number)
|
||||||
|
|
||||||
|
|
||||||
|
_local_version_seperators = re.compile(r"[\._-]")
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_local_version(local):
|
||||||
|
"""
|
||||||
|
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
|
||||||
|
"""
|
||||||
|
if local is not None:
|
||||||
|
return tuple(
|
||||||
|
part.lower() if not part.isdigit() else int(part)
|
||||||
|
for part in _local_version_seperators.split(local)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _cmpkey(epoch, release, pre, post, dev, local):
|
||||||
|
# When we compare a release version, we want to compare it with all of the
|
||||||
|
# trailing zeros removed. So we'll use a reverse the list, drop all the now
|
||||||
|
# leading zeros until we come to something non zero, then take the rest
|
||||||
|
# re-reverse it back into the correct order and make it a tuple and use
|
||||||
|
# that for our sorting key.
|
||||||
|
release = tuple(
|
||||||
|
reversed(list(
|
||||||
|
itertools.dropwhile(
|
||||||
|
lambda x: x == 0,
|
||||||
|
reversed(release),
|
||||||
|
)
|
||||||
|
))
|
||||||
|
)
|
||||||
|
|
||||||
|
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
|
||||||
|
# We'll do this by abusing the pre segment, but we _only_ want to do this
|
||||||
|
# if there is not a pre or a post segment. If we have one of those then
|
||||||
|
# the normal sorting rules will handle this case correctly.
|
||||||
|
if pre is None and post is None and dev is not None:
|
||||||
|
pre = -Infinity
|
||||||
|
# Versions without a pre-release (except as noted above) should sort after
|
||||||
|
# those with one.
|
||||||
|
elif pre is None:
|
||||||
|
pre = Infinity
|
||||||
|
|
||||||
|
# Versions without a post segment should sort before those with one.
|
||||||
|
if post is None:
|
||||||
|
post = -Infinity
|
||||||
|
|
||||||
|
# Versions without a development segment should sort after those with one.
|
||||||
|
if dev is None:
|
||||||
|
dev = Infinity
|
||||||
|
|
||||||
|
if local is None:
|
||||||
|
# Versions without a local segment should sort before those with one.
|
||||||
|
local = -Infinity
|
||||||
|
else:
|
||||||
|
# Versions with a local segment need that segment parsed to implement
|
||||||
|
# the sorting rules in PEP440.
|
||||||
|
# - Alpha numeric segments sort before numeric segments
|
||||||
|
# - Alpha numeric segments sort lexicographically
|
||||||
|
# - Numeric segments sort numerically
|
||||||
|
# - Shorter versions sort before longer versions when the prefixes
|
||||||
|
# match exactly
|
||||||
|
local = tuple(
|
||||||
|
(i, "") if isinstance(i, int) else (-Infinity, i)
|
||||||
|
for i in local
|
||||||
|
)
|
||||||
|
|
||||||
|
return epoch, release, pre, post, dev, local
|
5696
recipes/pkgresources/pkgr/_vendor/pyparsing.py
Normal file
5696
recipes/pkgresources/pkgr/_vendor/pyparsing.py
Normal file
File diff suppressed because it is too large
Load diff
868
recipes/pkgresources/pkgr/_vendor/six.py
Normal file
868
recipes/pkgresources/pkgr/_vendor/six.py
Normal file
|
@ -0,0 +1,868 @@
|
||||||
|
"""Utilities for writing code that runs on Python 2 and 3"""
|
||||||
|
|
||||||
|
# Copyright (c) 2010-2015 Benjamin Peterson
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in all
|
||||||
|
# copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
# SOFTWARE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import functools
|
||||||
|
import itertools
|
||||||
|
import operator
|
||||||
|
import sys
|
||||||
|
import types
|
||||||
|
|
||||||
|
__author__ = "Benjamin Peterson <benjamin@python.org>"
|
||||||
|
__version__ = "1.10.0"
|
||||||
|
|
||||||
|
|
||||||
|
# Useful for very coarse version differentiation.
|
||||||
|
PY2 = sys.version_info[0] == 2
|
||||||
|
PY3 = sys.version_info[0] == 3
|
||||||
|
PY34 = sys.version_info[0:2] >= (3, 4)
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
string_types = str,
|
||||||
|
integer_types = int,
|
||||||
|
class_types = type,
|
||||||
|
text_type = str
|
||||||
|
binary_type = bytes
|
||||||
|
|
||||||
|
MAXSIZE = sys.maxsize
|
||||||
|
else:
|
||||||
|
string_types = basestring,
|
||||||
|
integer_types = (int, long)
|
||||||
|
class_types = (type, types.ClassType)
|
||||||
|
text_type = unicode
|
||||||
|
binary_type = str
|
||||||
|
|
||||||
|
if sys.platform.startswith("java"):
|
||||||
|
# Jython always uses 32 bits.
|
||||||
|
MAXSIZE = int((1 << 31) - 1)
|
||||||
|
else:
|
||||||
|
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
|
||||||
|
class X(object):
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return 1 << 31
|
||||||
|
try:
|
||||||
|
len(X())
|
||||||
|
except OverflowError:
|
||||||
|
# 32-bit
|
||||||
|
MAXSIZE = int((1 << 31) - 1)
|
||||||
|
else:
|
||||||
|
# 64-bit
|
||||||
|
MAXSIZE = int((1 << 63) - 1)
|
||||||
|
del X
|
||||||
|
|
||||||
|
|
||||||
|
def _add_doc(func, doc):
|
||||||
|
"""Add documentation to a function."""
|
||||||
|
func.__doc__ = doc
|
||||||
|
|
||||||
|
|
||||||
|
def _import_module(name):
|
||||||
|
"""Import module, returning the module after the last dot."""
|
||||||
|
__import__(name)
|
||||||
|
return sys.modules[name]
|
||||||
|
|
||||||
|
|
||||||
|
class _LazyDescr(object):
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
def __get__(self, obj, tp):
|
||||||
|
result = self._resolve()
|
||||||
|
setattr(obj, self.name, result) # Invokes __set__.
|
||||||
|
try:
|
||||||
|
# This is a bit ugly, but it avoids running this again by
|
||||||
|
# removing this descriptor.
|
||||||
|
delattr(obj.__class__, self.name)
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class MovedModule(_LazyDescr):
|
||||||
|
|
||||||
|
def __init__(self, name, old, new=None):
|
||||||
|
super(MovedModule, self).__init__(name)
|
||||||
|
if PY3:
|
||||||
|
if new is None:
|
||||||
|
new = name
|
||||||
|
self.mod = new
|
||||||
|
else:
|
||||||
|
self.mod = old
|
||||||
|
|
||||||
|
def _resolve(self):
|
||||||
|
return _import_module(self.mod)
|
||||||
|
|
||||||
|
def __getattr__(self, attr):
|
||||||
|
_module = self._resolve()
|
||||||
|
value = getattr(_module, attr)
|
||||||
|
setattr(self, attr, value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class _LazyModule(types.ModuleType):
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
super(_LazyModule, self).__init__(name)
|
||||||
|
self.__doc__ = self.__class__.__doc__
|
||||||
|
|
||||||
|
def __dir__(self):
|
||||||
|
attrs = ["__doc__", "__name__"]
|
||||||
|
attrs += [attr.name for attr in self._moved_attributes]
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
# Subclasses should override this
|
||||||
|
_moved_attributes = []
|
||||||
|
|
||||||
|
|
||||||
|
class MovedAttribute(_LazyDescr):
|
||||||
|
|
||||||
|
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
|
||||||
|
super(MovedAttribute, self).__init__(name)
|
||||||
|
if PY3:
|
||||||
|
if new_mod is None:
|
||||||
|
new_mod = name
|
||||||
|
self.mod = new_mod
|
||||||
|
if new_attr is None:
|
||||||
|
if old_attr is None:
|
||||||
|
new_attr = name
|
||||||
|
else:
|
||||||
|
new_attr = old_attr
|
||||||
|
self.attr = new_attr
|
||||||
|
else:
|
||||||
|
self.mod = old_mod
|
||||||
|
if old_attr is None:
|
||||||
|
old_attr = name
|
||||||
|
self.attr = old_attr
|
||||||
|
|
||||||
|
def _resolve(self):
|
||||||
|
module = _import_module(self.mod)
|
||||||
|
return getattr(module, self.attr)
|
||||||
|
|
||||||
|
|
||||||
|
class _SixMetaPathImporter(object):
|
||||||
|
|
||||||
|
"""
|
||||||
|
A meta path importer to import six.moves and its submodules.
|
||||||
|
|
||||||
|
This class implements a PEP302 finder and loader. It should be compatible
|
||||||
|
with Python 2.5 and all existing versions of Python3
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, six_module_name):
|
||||||
|
self.name = six_module_name
|
||||||
|
self.known_modules = {}
|
||||||
|
|
||||||
|
def _add_module(self, mod, *fullnames):
|
||||||
|
for fullname in fullnames:
|
||||||
|
self.known_modules[self.name + "." + fullname] = mod
|
||||||
|
|
||||||
|
def _get_module(self, fullname):
|
||||||
|
return self.known_modules[self.name + "." + fullname]
|
||||||
|
|
||||||
|
def find_module(self, fullname, path=None):
|
||||||
|
if fullname in self.known_modules:
|
||||||
|
return self
|
||||||
|
return None
|
||||||
|
|
||||||
|
def __get_module(self, fullname):
|
||||||
|
try:
|
||||||
|
return self.known_modules[fullname]
|
||||||
|
except KeyError:
|
||||||
|
raise ImportError("This loader does not know module " + fullname)
|
||||||
|
|
||||||
|
def load_module(self, fullname):
|
||||||
|
try:
|
||||||
|
# in case of a reload
|
||||||
|
return sys.modules[fullname]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
mod = self.__get_module(fullname)
|
||||||
|
if isinstance(mod, MovedModule):
|
||||||
|
mod = mod._resolve()
|
||||||
|
else:
|
||||||
|
mod.__loader__ = self
|
||||||
|
sys.modules[fullname] = mod
|
||||||
|
return mod
|
||||||
|
|
||||||
|
def is_package(self, fullname):
|
||||||
|
"""
|
||||||
|
Return true, if the named module is a package.
|
||||||
|
|
||||||
|
We need this method to get correct spec objects with
|
||||||
|
Python 3.4 (see PEP451)
|
||||||
|
"""
|
||||||
|
return hasattr(self.__get_module(fullname), "__path__")
|
||||||
|
|
||||||
|
def get_code(self, fullname):
|
||||||
|
"""Return None
|
||||||
|
|
||||||
|
Required, if is_package is implemented"""
|
||||||
|
self.__get_module(fullname) # eventually raises ImportError
|
||||||
|
return None
|
||||||
|
get_source = get_code # same as get_code
|
||||||
|
|
||||||
|
_importer = _SixMetaPathImporter(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class _MovedItems(_LazyModule):
|
||||||
|
|
||||||
|
"""Lazy loading of moved objects"""
|
||||||
|
__path__ = [] # mark as package
|
||||||
|
|
||||||
|
|
||||||
|
_moved_attributes = [
|
||||||
|
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
|
||||||
|
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
|
||||||
|
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
|
||||||
|
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
|
||||||
|
MovedAttribute("intern", "__builtin__", "sys"),
|
||||||
|
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
|
||||||
|
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
|
||||||
|
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
|
||||||
|
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
|
||||||
|
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
|
||||||
|
MovedAttribute("reduce", "__builtin__", "functools"),
|
||||||
|
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
|
||||||
|
MovedAttribute("StringIO", "StringIO", "io"),
|
||||||
|
MovedAttribute("UserDict", "UserDict", "collections"),
|
||||||
|
MovedAttribute("UserList", "UserList", "collections"),
|
||||||
|
MovedAttribute("UserString", "UserString", "collections"),
|
||||||
|
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
|
||||||
|
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
|
||||||
|
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
|
||||||
|
MovedModule("builtins", "__builtin__"),
|
||||||
|
MovedModule("configparser", "ConfigParser"),
|
||||||
|
MovedModule("copyreg", "copy_reg"),
|
||||||
|
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
|
||||||
|
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
|
||||||
|
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
|
||||||
|
MovedModule("http_cookies", "Cookie", "http.cookies"),
|
||||||
|
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
|
||||||
|
MovedModule("html_parser", "HTMLParser", "html.parser"),
|
||||||
|
MovedModule("http_client", "httplib", "http.client"),
|
||||||
|
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
|
||||||
|
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
|
||||||
|
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
|
||||||
|
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
|
||||||
|
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
|
||||||
|
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
|
||||||
|
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
|
||||||
|
MovedModule("cPickle", "cPickle", "pickle"),
|
||||||
|
MovedModule("queue", "Queue"),
|
||||||
|
MovedModule("reprlib", "repr"),
|
||||||
|
MovedModule("socketserver", "SocketServer"),
|
||||||
|
MovedModule("_thread", "thread", "_thread"),
|
||||||
|
MovedModule("tkinter", "Tkinter"),
|
||||||
|
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
|
||||||
|
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
|
||||||
|
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
|
||||||
|
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
|
||||||
|
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
|
||||||
|
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
|
||||||
|
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
|
||||||
|
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
|
||||||
|
MovedModule("tkinter_colorchooser", "tkColorChooser",
|
||||||
|
"tkinter.colorchooser"),
|
||||||
|
MovedModule("tkinter_commondialog", "tkCommonDialog",
|
||||||
|
"tkinter.commondialog"),
|
||||||
|
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
|
||||||
|
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
|
||||||
|
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
|
||||||
|
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
|
||||||
|
"tkinter.simpledialog"),
|
||||||
|
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
|
||||||
|
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
|
||||||
|
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
|
||||||
|
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
|
||||||
|
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
|
||||||
|
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
|
||||||
|
]
|
||||||
|
# Add windows specific modules.
|
||||||
|
if sys.platform == "win32":
|
||||||
|
_moved_attributes += [
|
||||||
|
MovedModule("winreg", "_winreg"),
|
||||||
|
]
|
||||||
|
|
||||||
|
for attr in _moved_attributes:
|
||||||
|
setattr(_MovedItems, attr.name, attr)
|
||||||
|
if isinstance(attr, MovedModule):
|
||||||
|
_importer._add_module(attr, "moves." + attr.name)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
_MovedItems._moved_attributes = _moved_attributes
|
||||||
|
|
||||||
|
moves = _MovedItems(__name__ + ".moves")
|
||||||
|
_importer._add_module(moves, "moves")
|
||||||
|
|
||||||
|
|
||||||
|
class Module_six_moves_urllib_parse(_LazyModule):
|
||||||
|
|
||||||
|
"""Lazy loading of moved objects in six.moves.urllib_parse"""
|
||||||
|
|
||||||
|
|
||||||
|
_urllib_parse_moved_attributes = [
|
||||||
|
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("quote", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("unquote", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("urlencode", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("splitquery", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("splittag", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("splituser", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
|
||||||
|
]
|
||||||
|
for attr in _urllib_parse_moved_attributes:
|
||||||
|
setattr(Module_six_moves_urllib_parse, attr.name, attr)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
|
||||||
|
|
||||||
|
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
|
||||||
|
"moves.urllib_parse", "moves.urllib.parse")
|
||||||
|
|
||||||
|
|
||||||
|
class Module_six_moves_urllib_error(_LazyModule):
|
||||||
|
|
||||||
|
"""Lazy loading of moved objects in six.moves.urllib_error"""
|
||||||
|
|
||||||
|
|
||||||
|
_urllib_error_moved_attributes = [
|
||||||
|
MovedAttribute("URLError", "urllib2", "urllib.error"),
|
||||||
|
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
|
||||||
|
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
|
||||||
|
]
|
||||||
|
for attr in _urllib_error_moved_attributes:
|
||||||
|
setattr(Module_six_moves_urllib_error, attr.name, attr)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
|
||||||
|
|
||||||
|
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
|
||||||
|
"moves.urllib_error", "moves.urllib.error")
|
||||||
|
|
||||||
|
|
||||||
|
class Module_six_moves_urllib_request(_LazyModule):
|
||||||
|
|
||||||
|
"""Lazy loading of moved objects in six.moves.urllib_request"""
|
||||||
|
|
||||||
|
|
||||||
|
_urllib_request_moved_attributes = [
|
||||||
|
MovedAttribute("urlopen", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("install_opener", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("build_opener", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("pathname2url", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("url2pathname", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("getproxies", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("Request", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("URLopener", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
|
||||||
|
]
|
||||||
|
for attr in _urllib_request_moved_attributes:
|
||||||
|
setattr(Module_six_moves_urllib_request, attr.name, attr)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
|
||||||
|
|
||||||
|
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
|
||||||
|
"moves.urllib_request", "moves.urllib.request")
|
||||||
|
|
||||||
|
|
||||||
|
class Module_six_moves_urllib_response(_LazyModule):
|
||||||
|
|
||||||
|
"""Lazy loading of moved objects in six.moves.urllib_response"""
|
||||||
|
|
||||||
|
|
||||||
|
_urllib_response_moved_attributes = [
|
||||||
|
MovedAttribute("addbase", "urllib", "urllib.response"),
|
||||||
|
MovedAttribute("addclosehook", "urllib", "urllib.response"),
|
||||||
|
MovedAttribute("addinfo", "urllib", "urllib.response"),
|
||||||
|
MovedAttribute("addinfourl", "urllib", "urllib.response"),
|
||||||
|
]
|
||||||
|
for attr in _urllib_response_moved_attributes:
|
||||||
|
setattr(Module_six_moves_urllib_response, attr.name, attr)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
|
||||||
|
|
||||||
|
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
|
||||||
|
"moves.urllib_response", "moves.urllib.response")
|
||||||
|
|
||||||
|
|
||||||
|
class Module_six_moves_urllib_robotparser(_LazyModule):
|
||||||
|
|
||||||
|
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
|
||||||
|
|
||||||
|
|
||||||
|
_urllib_robotparser_moved_attributes = [
|
||||||
|
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
|
||||||
|
]
|
||||||
|
for attr in _urllib_robotparser_moved_attributes:
|
||||||
|
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
|
||||||
|
|
||||||
|
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
|
||||||
|
"moves.urllib_robotparser", "moves.urllib.robotparser")
|
||||||
|
|
||||||
|
|
||||||
|
class Module_six_moves_urllib(types.ModuleType):
|
||||||
|
|
||||||
|
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
|
||||||
|
__path__ = [] # mark as package
|
||||||
|
parse = _importer._get_module("moves.urllib_parse")
|
||||||
|
error = _importer._get_module("moves.urllib_error")
|
||||||
|
request = _importer._get_module("moves.urllib_request")
|
||||||
|
response = _importer._get_module("moves.urllib_response")
|
||||||
|
robotparser = _importer._get_module("moves.urllib_robotparser")
|
||||||
|
|
||||||
|
def __dir__(self):
|
||||||
|
return ['parse', 'error', 'request', 'response', 'robotparser']
|
||||||
|
|
||||||
|
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
|
||||||
|
"moves.urllib")
|
||||||
|
|
||||||
|
|
||||||
|
def add_move(move):
|
||||||
|
"""Add an item to six.moves."""
|
||||||
|
setattr(_MovedItems, move.name, move)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_move(name):
|
||||||
|
"""Remove item from six.moves."""
|
||||||
|
try:
|
||||||
|
delattr(_MovedItems, name)
|
||||||
|
except AttributeError:
|
||||||
|
try:
|
||||||
|
del moves.__dict__[name]
|
||||||
|
except KeyError:
|
||||||
|
raise AttributeError("no such move, %r" % (name,))
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
_meth_func = "__func__"
|
||||||
|
_meth_self = "__self__"
|
||||||
|
|
||||||
|
_func_closure = "__closure__"
|
||||||
|
_func_code = "__code__"
|
||||||
|
_func_defaults = "__defaults__"
|
||||||
|
_func_globals = "__globals__"
|
||||||
|
else:
|
||||||
|
_meth_func = "im_func"
|
||||||
|
_meth_self = "im_self"
|
||||||
|
|
||||||
|
_func_closure = "func_closure"
|
||||||
|
_func_code = "func_code"
|
||||||
|
_func_defaults = "func_defaults"
|
||||||
|
_func_globals = "func_globals"
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
advance_iterator = next
|
||||||
|
except NameError:
|
||||||
|
def advance_iterator(it):
|
||||||
|
return it.next()
|
||||||
|
next = advance_iterator
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
callable = callable
|
||||||
|
except NameError:
|
||||||
|
def callable(obj):
|
||||||
|
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
def get_unbound_function(unbound):
|
||||||
|
return unbound
|
||||||
|
|
||||||
|
create_bound_method = types.MethodType
|
||||||
|
|
||||||
|
def create_unbound_method(func, cls):
|
||||||
|
return func
|
||||||
|
|
||||||
|
Iterator = object
|
||||||
|
else:
|
||||||
|
def get_unbound_function(unbound):
|
||||||
|
return unbound.im_func
|
||||||
|
|
||||||
|
def create_bound_method(func, obj):
|
||||||
|
return types.MethodType(func, obj, obj.__class__)
|
||||||
|
|
||||||
|
def create_unbound_method(func, cls):
|
||||||
|
return types.MethodType(func, None, cls)
|
||||||
|
|
||||||
|
class Iterator(object):
|
||||||
|
|
||||||
|
def next(self):
|
||||||
|
return type(self).__next__(self)
|
||||||
|
|
||||||
|
callable = callable
|
||||||
|
_add_doc(get_unbound_function,
|
||||||
|
"""Get the function out of a possibly unbound function""")
|
||||||
|
|
||||||
|
|
||||||
|
get_method_function = operator.attrgetter(_meth_func)
|
||||||
|
get_method_self = operator.attrgetter(_meth_self)
|
||||||
|
get_function_closure = operator.attrgetter(_func_closure)
|
||||||
|
get_function_code = operator.attrgetter(_func_code)
|
||||||
|
get_function_defaults = operator.attrgetter(_func_defaults)
|
||||||
|
get_function_globals = operator.attrgetter(_func_globals)
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
def iterkeys(d, **kw):
|
||||||
|
return iter(d.keys(**kw))
|
||||||
|
|
||||||
|
def itervalues(d, **kw):
|
||||||
|
return iter(d.values(**kw))
|
||||||
|
|
||||||
|
def iteritems(d, **kw):
|
||||||
|
return iter(d.items(**kw))
|
||||||
|
|
||||||
|
def iterlists(d, **kw):
|
||||||
|
return iter(d.lists(**kw))
|
||||||
|
|
||||||
|
viewkeys = operator.methodcaller("keys")
|
||||||
|
|
||||||
|
viewvalues = operator.methodcaller("values")
|
||||||
|
|
||||||
|
viewitems = operator.methodcaller("items")
|
||||||
|
else:
|
||||||
|
def iterkeys(d, **kw):
|
||||||
|
return d.iterkeys(**kw)
|
||||||
|
|
||||||
|
def itervalues(d, **kw):
|
||||||
|
return d.itervalues(**kw)
|
||||||
|
|
||||||
|
def iteritems(d, **kw):
|
||||||
|
return d.iteritems(**kw)
|
||||||
|
|
||||||
|
def iterlists(d, **kw):
|
||||||
|
return d.iterlists(**kw)
|
||||||
|
|
||||||
|
viewkeys = operator.methodcaller("viewkeys")
|
||||||
|
|
||||||
|
viewvalues = operator.methodcaller("viewvalues")
|
||||||
|
|
||||||
|
viewitems = operator.methodcaller("viewitems")
|
||||||
|
|
||||||
|
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
|
||||||
|
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
|
||||||
|
_add_doc(iteritems,
|
||||||
|
"Return an iterator over the (key, value) pairs of a dictionary.")
|
||||||
|
_add_doc(iterlists,
|
||||||
|
"Return an iterator over the (key, [values]) pairs of a dictionary.")
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
def b(s):
|
||||||
|
return s.encode("latin-1")
|
||||||
|
|
||||||
|
def u(s):
|
||||||
|
return s
|
||||||
|
unichr = chr
|
||||||
|
import struct
|
||||||
|
int2byte = struct.Struct(">B").pack
|
||||||
|
del struct
|
||||||
|
byte2int = operator.itemgetter(0)
|
||||||
|
indexbytes = operator.getitem
|
||||||
|
iterbytes = iter
|
||||||
|
import io
|
||||||
|
StringIO = io.StringIO
|
||||||
|
BytesIO = io.BytesIO
|
||||||
|
_assertCountEqual = "assertCountEqual"
|
||||||
|
if sys.version_info[1] <= 1:
|
||||||
|
_assertRaisesRegex = "assertRaisesRegexp"
|
||||||
|
_assertRegex = "assertRegexpMatches"
|
||||||
|
else:
|
||||||
|
_assertRaisesRegex = "assertRaisesRegex"
|
||||||
|
_assertRegex = "assertRegex"
|
||||||
|
else:
|
||||||
|
def b(s):
|
||||||
|
return s
|
||||||
|
# Workaround for standalone backslash
|
||||||
|
|
||||||
|
def u(s):
|
||||||
|
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
|
||||||
|
unichr = unichr
|
||||||
|
int2byte = chr
|
||||||
|
|
||||||
|
def byte2int(bs):
|
||||||
|
return ord(bs[0])
|
||||||
|
|
||||||
|
def indexbytes(buf, i):
|
||||||
|
return ord(buf[i])
|
||||||
|
iterbytes = functools.partial(itertools.imap, ord)
|
||||||
|
import StringIO
|
||||||
|
StringIO = BytesIO = StringIO.StringIO
|
||||||
|
_assertCountEqual = "assertItemsEqual"
|
||||||
|
_assertRaisesRegex = "assertRaisesRegexp"
|
||||||
|
_assertRegex = "assertRegexpMatches"
|
||||||
|
_add_doc(b, """Byte literal""")
|
||||||
|
_add_doc(u, """Text literal""")
|
||||||
|
|
||||||
|
|
||||||
|
def assertCountEqual(self, *args, **kwargs):
|
||||||
|
return getattr(self, _assertCountEqual)(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def assertRaisesRegex(self, *args, **kwargs):
|
||||||
|
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def assertRegex(self, *args, **kwargs):
|
||||||
|
return getattr(self, _assertRegex)(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
exec_ = getattr(moves.builtins, "exec")
|
||||||
|
|
||||||
|
def reraise(tp, value, tb=None):
|
||||||
|
if value is None:
|
||||||
|
value = tp()
|
||||||
|
if value.__traceback__ is not tb:
|
||||||
|
raise value.with_traceback(tb)
|
||||||
|
raise value
|
||||||
|
|
||||||
|
else:
|
||||||
|
def exec_(_code_, _globs_=None, _locs_=None):
|
||||||
|
"""Execute code in a namespace."""
|
||||||
|
if _globs_ is None:
|
||||||
|
frame = sys._getframe(1)
|
||||||
|
_globs_ = frame.f_globals
|
||||||
|
if _locs_ is None:
|
||||||
|
_locs_ = frame.f_locals
|
||||||
|
del frame
|
||||||
|
elif _locs_ is None:
|
||||||
|
_locs_ = _globs_
|
||||||
|
exec("""exec _code_ in _globs_, _locs_""")
|
||||||
|
|
||||||
|
exec_("""def reraise(tp, value, tb=None):
|
||||||
|
raise tp, value, tb
|
||||||
|
""")
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info[:2] == (3, 2):
|
||||||
|
exec_("""def raise_from(value, from_value):
|
||||||
|
if from_value is None:
|
||||||
|
raise value
|
||||||
|
raise value from from_value
|
||||||
|
""")
|
||||||
|
elif sys.version_info[:2] > (3, 2):
|
||||||
|
exec_("""def raise_from(value, from_value):
|
||||||
|
raise value from from_value
|
||||||
|
""")
|
||||||
|
else:
|
||||||
|
def raise_from(value, from_value):
|
||||||
|
raise value
|
||||||
|
|
||||||
|
|
||||||
|
print_ = getattr(moves.builtins, "print", None)
|
||||||
|
if print_ is None:
|
||||||
|
def print_(*args, **kwargs):
|
||||||
|
"""The new-style print function for Python 2.4 and 2.5."""
|
||||||
|
fp = kwargs.pop("file", sys.stdout)
|
||||||
|
if fp is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
def write(data):
|
||||||
|
if not isinstance(data, basestring):
|
||||||
|
data = str(data)
|
||||||
|
# If the file has an encoding, encode unicode with it.
|
||||||
|
if (isinstance(fp, file) and
|
||||||
|
isinstance(data, unicode) and
|
||||||
|
fp.encoding is not None):
|
||||||
|
errors = getattr(fp, "errors", None)
|
||||||
|
if errors is None:
|
||||||
|
errors = "strict"
|
||||||
|
data = data.encode(fp.encoding, errors)
|
||||||
|
fp.write(data)
|
||||||
|
want_unicode = False
|
||||||
|
sep = kwargs.pop("sep", None)
|
||||||
|
if sep is not None:
|
||||||
|
if isinstance(sep, unicode):
|
||||||
|
want_unicode = True
|
||||||
|
elif not isinstance(sep, str):
|
||||||
|
raise TypeError("sep must be None or a string")
|
||||||
|
end = kwargs.pop("end", None)
|
||||||
|
if end is not None:
|
||||||
|
if isinstance(end, unicode):
|
||||||
|
want_unicode = True
|
||||||
|
elif not isinstance(end, str):
|
||||||
|
raise TypeError("end must be None or a string")
|
||||||
|
if kwargs:
|
||||||
|
raise TypeError("invalid keyword arguments to print()")
|
||||||
|
if not want_unicode:
|
||||||
|
for arg in args:
|
||||||
|
if isinstance(arg, unicode):
|
||||||
|
want_unicode = True
|
||||||
|
break
|
||||||
|
if want_unicode:
|
||||||
|
newline = unicode("\n")
|
||||||
|
space = unicode(" ")
|
||||||
|
else:
|
||||||
|
newline = "\n"
|
||||||
|
space = " "
|
||||||
|
if sep is None:
|
||||||
|
sep = space
|
||||||
|
if end is None:
|
||||||
|
end = newline
|
||||||
|
for i, arg in enumerate(args):
|
||||||
|
if i:
|
||||||
|
write(sep)
|
||||||
|
write(arg)
|
||||||
|
write(end)
|
||||||
|
if sys.version_info[:2] < (3, 3):
|
||||||
|
_print = print_
|
||||||
|
|
||||||
|
def print_(*args, **kwargs):
|
||||||
|
fp = kwargs.get("file", sys.stdout)
|
||||||
|
flush = kwargs.pop("flush", False)
|
||||||
|
_print(*args, **kwargs)
|
||||||
|
if flush and fp is not None:
|
||||||
|
fp.flush()
|
||||||
|
|
||||||
|
_add_doc(reraise, """Reraise an exception.""")
|
||||||
|
|
||||||
|
if sys.version_info[0:2] < (3, 4):
|
||||||
|
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
|
||||||
|
updated=functools.WRAPPER_UPDATES):
|
||||||
|
def wrapper(f):
|
||||||
|
f = functools.wraps(wrapped, assigned, updated)(f)
|
||||||
|
f.__wrapped__ = wrapped
|
||||||
|
return f
|
||||||
|
return wrapper
|
||||||
|
else:
|
||||||
|
wraps = functools.wraps
|
||||||
|
|
||||||
|
|
||||||
|
def with_metaclass(meta, *bases):
|
||||||
|
"""Create a base class with a metaclass."""
|
||||||
|
# This requires a bit of explanation: the basic idea is to make a dummy
|
||||||
|
# metaclass for one level of class instantiation that replaces itself with
|
||||||
|
# the actual metaclass.
|
||||||
|
class metaclass(meta):
|
||||||
|
|
||||||
|
def __new__(cls, name, this_bases, d):
|
||||||
|
return meta(name, bases, d)
|
||||||
|
return type.__new__(metaclass, 'temporary_class', (), {})
|
||||||
|
|
||||||
|
|
||||||
|
def add_metaclass(metaclass):
|
||||||
|
"""Class decorator for creating a class with a metaclass."""
|
||||||
|
def wrapper(cls):
|
||||||
|
orig_vars = cls.__dict__.copy()
|
||||||
|
slots = orig_vars.get('__slots__')
|
||||||
|
if slots is not None:
|
||||||
|
if isinstance(slots, str):
|
||||||
|
slots = [slots]
|
||||||
|
for slots_var in slots:
|
||||||
|
orig_vars.pop(slots_var)
|
||||||
|
orig_vars.pop('__dict__', None)
|
||||||
|
orig_vars.pop('__weakref__', None)
|
||||||
|
return metaclass(cls.__name__, cls.__bases__, orig_vars)
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def python_2_unicode_compatible(klass):
|
||||||
|
"""
|
||||||
|
A decorator that defines __unicode__ and __str__ methods under Python 2.
|
||||||
|
Under Python 3 it does nothing.
|
||||||
|
|
||||||
|
To support Python 2 and 3 with a single code base, define a __str__ method
|
||||||
|
returning text and apply this decorator to the class.
|
||||||
|
"""
|
||||||
|
if PY2:
|
||||||
|
if '__str__' not in klass.__dict__:
|
||||||
|
raise ValueError("@python_2_unicode_compatible cannot be applied "
|
||||||
|
"to %s because it doesn't define __str__()." %
|
||||||
|
klass.__name__)
|
||||||
|
klass.__unicode__ = klass.__str__
|
||||||
|
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
|
||||||
|
return klass
|
||||||
|
|
||||||
|
|
||||||
|
# Complete the moves implementation.
|
||||||
|
# This code is at the end of this module to speed up module loading.
|
||||||
|
# Turn this module into a package.
|
||||||
|
__path__ = [] # required for PEP 302 and PEP 451
|
||||||
|
__package__ = __name__ # see PEP 366 @ReservedAssignment
|
||||||
|
if globals().get("__spec__") is not None:
|
||||||
|
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
|
||||||
|
# Remove other six meta path importers, since they cause problems. This can
|
||||||
|
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
|
||||||
|
# this for some reason.)
|
||||||
|
if sys.meta_path:
|
||||||
|
for i, importer in enumerate(sys.meta_path):
|
||||||
|
# Here's some real nastiness: Another "instance" of the six module might
|
||||||
|
# be floating around. Therefore, we can't use isinstance() to check for
|
||||||
|
# the six meta path importer, since the other six instance will have
|
||||||
|
# inserted an importer with different class.
|
||||||
|
if (type(importer).__name__ == "_SixMetaPathImporter" and
|
||||||
|
importer.name == __name__):
|
||||||
|
del sys.meta_path[i]
|
||||||
|
break
|
||||||
|
del i, importer
|
||||||
|
# Finally, add the importer to the meta path import hook.
|
||||||
|
sys.meta_path.append(_importer)
|
73
recipes/pkgresources/pkgr/extern/__init__.py
vendored
Normal file
73
recipes/pkgresources/pkgr/extern/__init__.py
vendored
Normal file
|
@ -0,0 +1,73 @@
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
class VendorImporter:
|
||||||
|
"""
|
||||||
|
A PEP 302 meta path importer for finding optionally-vendored
|
||||||
|
or otherwise naturally-installed packages from root_name.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, root_name, vendored_names=(), vendor_pkg=None):
|
||||||
|
self.root_name = root_name
|
||||||
|
self.vendored_names = set(vendored_names)
|
||||||
|
self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def search_path(self):
|
||||||
|
"""
|
||||||
|
Search first the vendor package then as a natural package.
|
||||||
|
"""
|
||||||
|
yield self.vendor_pkg + '.'
|
||||||
|
yield ''
|
||||||
|
|
||||||
|
def find_module(self, fullname, path=None):
|
||||||
|
"""
|
||||||
|
Return self when fullname starts with root_name and the
|
||||||
|
target module is one vendored through this importer.
|
||||||
|
"""
|
||||||
|
root, base, target = fullname.partition(self.root_name + '.')
|
||||||
|
if root:
|
||||||
|
return
|
||||||
|
if not any(map(target.startswith, self.vendored_names)):
|
||||||
|
return
|
||||||
|
return self
|
||||||
|
|
||||||
|
def load_module(self, fullname):
|
||||||
|
"""
|
||||||
|
Iterate over the search path to locate and load fullname.
|
||||||
|
"""
|
||||||
|
root, base, target = fullname.partition(self.root_name + '.')
|
||||||
|
for prefix in self.search_path:
|
||||||
|
try:
|
||||||
|
extant = prefix + target
|
||||||
|
__import__(extant)
|
||||||
|
mod = sys.modules[extant]
|
||||||
|
sys.modules[fullname] = mod
|
||||||
|
# mysterious hack:
|
||||||
|
# Remove the reference to the extant package/module
|
||||||
|
# on later Python versions to cause relative imports
|
||||||
|
# in the vendor package to resolve the same modules
|
||||||
|
# as those going through this importer.
|
||||||
|
if sys.version_info > (3, 3):
|
||||||
|
del sys.modules[extant]
|
||||||
|
return mod
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
raise ImportError(
|
||||||
|
"The '{target}' package is required; "
|
||||||
|
"normally this is bundled with this package so if you get "
|
||||||
|
"this warning, consult the packager of your "
|
||||||
|
"distribution.".format(**locals())
|
||||||
|
)
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
"""
|
||||||
|
Install this importer into sys.meta_path if not already present.
|
||||||
|
"""
|
||||||
|
if self not in sys.meta_path:
|
||||||
|
sys.meta_path.append(self)
|
||||||
|
|
||||||
|
|
||||||
|
names = 'packaging', 'pyparsing', 'six', 'appdirs'
|
||||||
|
VendorImporter(__name__, names).install()
|
Loading…
Reference in a new issue