18 changed files with 1919 additions and 346 deletions
@ -0,0 +1,48 @@ |
|||
"""For neatly implementing static typing in packaging. |
|||
|
|||
`mypy` - the static type analysis tool we use - uses the `typing` module, which |
|||
provides core functionality fundamental to mypy's functioning. |
|||
|
|||
Generally, `typing` would be imported at runtime and used in that fashion - |
|||
it acts as a no-op at runtime and does not have any run-time overhead by |
|||
design. |
|||
|
|||
As it turns out, `typing` is not vendorable - it uses separate sources for |
|||
Python 2/Python 3. Thus, this codebase can not expect it to be present. |
|||
To work around this, mypy allows the typing import to be behind a False-y |
|||
optional to prevent it from running at runtime and type-comments can be used |
|||
to remove the need for the types to be accessible directly during runtime. |
|||
|
|||
This module provides the False-y guard in a nicely named fashion so that a |
|||
curious maintainer can reach here to read this. |
|||
|
|||
In packaging, all static-typing related imports should be guarded as follows: |
|||
|
|||
from packaging._typing import TYPE_CHECKING |
|||
|
|||
if TYPE_CHECKING: |
|||
from typing import ... |
|||
|
|||
Ref: https://github.com/python/mypy/issues/3216 |
|||
""" |
|||
|
|||
__all__ = ["TYPE_CHECKING", "cast"] |
|||
|
|||
# The TYPE_CHECKING constant defined by the typing module is False at runtime |
|||
# but True while type checking. |
|||
if False: # pragma: no cover |
|||
from typing import TYPE_CHECKING |
|||
else: |
|||
TYPE_CHECKING = False |
|||
|
|||
# typing's cast syntax requires calling typing.cast at runtime, but we don't |
|||
# want to import typing at runtime. Here, we inform the type checkers that |
|||
# we're importing `typing.cast` as `cast` and re-implement typing.cast's |
|||
# runtime behavior in a block that is ignored by type checkers. |
|||
if TYPE_CHECKING: # pragma: no cover |
|||
# not executed at runtime |
|||
from typing import cast |
|||
else: |
|||
# executed at runtime |
|||
def cast(type_, value): # noqa |
|||
return value |
@ -0,0 +1,751 @@ |
|||
# This file is dual licensed under the terms of the Apache License, Version |
|||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository |
|||
# for complete details. |
|||
|
|||
from __future__ import absolute_import |
|||
|
|||
import distutils.util |
|||
|
|||
try: |
|||
from importlib.machinery import EXTENSION_SUFFIXES |
|||
except ImportError: # pragma: no cover |
|||
import imp |
|||
|
|||
EXTENSION_SUFFIXES = [x[0] for x in imp.get_suffixes()] |
|||
del imp |
|||
import logging |
|||
import os |
|||
import platform |
|||
import re |
|||
import struct |
|||
import sys |
|||
import sysconfig |
|||
import warnings |
|||
|
|||
from ._typing import TYPE_CHECKING, cast |
|||
|
|||
if TYPE_CHECKING: # pragma: no cover |
|||
from typing import ( |
|||
Dict, |
|||
FrozenSet, |
|||
IO, |
|||
Iterable, |
|||
Iterator, |
|||
List, |
|||
Optional, |
|||
Sequence, |
|||
Tuple, |
|||
Union, |
|||
) |
|||
|
|||
PythonVersion = Sequence[int] |
|||
MacVersion = Tuple[int, int] |
|||
GlibcVersion = Tuple[int, int] |
|||
|
|||
|
|||
logger = logging.getLogger(__name__) |
|||
|
|||
INTERPRETER_SHORT_NAMES = { |
|||
"python": "py", # Generic. |
|||
"cpython": "cp", |
|||
"pypy": "pp", |
|||
"ironpython": "ip", |
|||
"jython": "jy", |
|||
} # type: Dict[str, str] |
|||
|
|||
|
|||
_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32 |
|||
|
|||
|
|||
class Tag(object): |
|||
""" |
|||
A representation of the tag triple for a wheel. |
|||
|
|||
Instances are considered immutable and thus are hashable. Equality checking |
|||
is also supported. |
|||
""" |
|||
|
|||
__slots__ = ["_interpreter", "_abi", "_platform"] |
|||
|
|||
def __init__(self, interpreter, abi, platform): |
|||
# type: (str, str, str) -> None |
|||
self._interpreter = interpreter.lower() |
|||
self._abi = abi.lower() |
|||
self._platform = platform.lower() |
|||
|
|||
@property |
|||
def interpreter(self): |
|||
# type: () -> str |
|||
return self._interpreter |
|||
|
|||
@property |
|||
def abi(self): |
|||
# type: () -> str |
|||
return self._abi |
|||
|
|||
@property |
|||
def platform(self): |
|||
# type: () -> str |
|||
return self._platform |
|||
|
|||
def __eq__(self, other): |
|||
# type: (object) -> bool |
|||
if not isinstance(other, Tag): |
|||
return NotImplemented |
|||
|
|||
return ( |
|||
(self.platform == other.platform) |
|||
and (self.abi == other.abi) |
|||
and (self.interpreter == other.interpreter) |
|||
) |
|||
|
|||
def __hash__(self): |
|||
# type: () -> int |
|||
return hash((self._interpreter, self._abi, self._platform)) |
|||
|
|||
def __str__(self): |
|||
# type: () -> str |
|||
return "{}-{}-{}".format(self._interpreter, self._abi, self._platform) |
|||
|
|||
def __repr__(self): |
|||
# type: () -> str |
|||
return "<{self} @ {self_id}>".format(self=self, self_id=id(self)) |
|||
|
|||
|
|||
def parse_tag(tag): |
|||
# type: (str) -> FrozenSet[Tag] |
|||
""" |
|||
Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. |
|||
|
|||
Returning a set is required due to the possibility that the tag is a |
|||
compressed tag set. |
|||
""" |
|||
tags = set() |
|||
interpreters, abis, platforms = tag.split("-") |
|||
for interpreter in interpreters.split("."): |
|||
for abi in abis.split("."): |
|||
for platform_ in platforms.split("."): |
|||
tags.add(Tag(interpreter, abi, platform_)) |
|||
return frozenset(tags) |
|||
|
|||
|
|||
def _warn_keyword_parameter(func_name, kwargs): |
|||
# type: (str, Dict[str, bool]) -> bool |
|||
""" |
|||
Backwards-compatibility with Python 2.7 to allow treating 'warn' as keyword-only. |
|||
""" |
|||
if not kwargs: |
|||
return False |
|||
elif len(kwargs) > 1 or "warn" not in kwargs: |
|||
kwargs.pop("warn", None) |
|||
arg = next(iter(kwargs.keys())) |
|||
raise TypeError( |
|||
"{}() got an unexpected keyword argument {!r}".format(func_name, arg) |
|||
) |
|||
return kwargs["warn"] |
|||
|
|||
|
|||
def _get_config_var(name, warn=False): |
|||
# type: (str, bool) -> Union[int, str, None] |
|||
value = sysconfig.get_config_var(name) |
|||
if value is None and warn: |
|||
logger.debug( |
|||
"Config variable '%s' is unset, Python ABI tag may be incorrect", name |
|||
) |
|||
return value |
|||
|
|||
|
|||
def _normalize_string(string): |
|||
# type: (str) -> str |
|||
return string.replace(".", "_").replace("-", "_") |
|||
|
|||
|
|||
def _abi3_applies(python_version): |
|||
# type: (PythonVersion) -> bool |
|||
""" |
|||
Determine if the Python version supports abi3. |
|||
|
|||
PEP 384 was first implemented in Python 3.2. |
|||
""" |
|||
return len(python_version) > 1 and tuple(python_version) >= (3, 2) |
|||
|
|||
|
|||
def _cpython_abis(py_version, warn=False): |
|||
# type: (PythonVersion, bool) -> List[str] |
|||
py_version = tuple(py_version) # To allow for version comparison. |
|||
abis = [] |
|||
version = _version_nodot(py_version[:2]) |
|||
debug = pymalloc = ucs4 = "" |
|||
with_debug = _get_config_var("Py_DEBUG", warn) |
|||
has_refcount = hasattr(sys, "gettotalrefcount") |
|||
# Windows doesn't set Py_DEBUG, so checking for support of debug-compiled |
|||
# extension modules is the best option. |
|||
# https://github.com/pypa/pip/issues/3383#issuecomment-173267692 |
|||
has_ext = "_d.pyd" in EXTENSION_SUFFIXES |
|||
if with_debug or (with_debug is None and (has_refcount or has_ext)): |
|||
debug = "d" |
|||
if py_version < (3, 8): |
|||
with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) |
|||
if with_pymalloc or with_pymalloc is None: |
|||
pymalloc = "m" |
|||
if py_version < (3, 3): |
|||
unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) |
|||
if unicode_size == 4 or ( |
|||
unicode_size is None and sys.maxunicode == 0x10FFFF |
|||
): |
|||
ucs4 = "u" |
|||
elif debug: |
|||
# Debug builds can also load "normal" extension modules. |
|||
# We can also assume no UCS-4 or pymalloc requirement. |
|||
abis.append("cp{version}".format(version=version)) |
|||
abis.insert( |
|||
0, |
|||
"cp{version}{debug}{pymalloc}{ucs4}".format( |
|||
version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4 |
|||
), |
|||
) |
|||
return abis |
|||
|
|||
|
|||
def cpython_tags( |
|||
python_version=None, # type: Optional[PythonVersion] |
|||
abis=None, # type: Optional[Iterable[str]] |
|||
platforms=None, # type: Optional[Iterable[str]] |
|||
**kwargs # type: bool |
|||
): |
|||
# type: (...) -> Iterator[Tag] |
|||
""" |
|||
Yields the tags for a CPython interpreter. |
|||
|
|||
The tags consist of: |
|||
- cp<python_version>-<abi>-<platform> |
|||
- cp<python_version>-abi3-<platform> |
|||
- cp<python_version>-none-<platform> |
|||
- cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2. |
|||
|
|||
If python_version only specifies a major version then user-provided ABIs and |
|||
the 'none' ABItag will be used. |
|||
|
|||
If 'abi3' or 'none' are specified in 'abis' then they will be yielded at |
|||
their normal position and not at the beginning. |
|||
""" |
|||
warn = _warn_keyword_parameter("cpython_tags", kwargs) |
|||
if not python_version: |
|||
python_version = sys.version_info[:2] |
|||
|
|||
interpreter = "cp{}".format(_version_nodot(python_version[:2])) |
|||
|
|||
if abis is None: |
|||
if len(python_version) > 1: |
|||
abis = _cpython_abis(python_version, warn) |
|||
else: |
|||
abis = [] |
|||
abis = list(abis) |
|||
# 'abi3' and 'none' are explicitly handled later. |
|||
for explicit_abi in ("abi3", "none"): |
|||
try: |
|||
abis.remove(explicit_abi) |
|||
except ValueError: |
|||
pass |
|||
|
|||
platforms = list(platforms or _platform_tags()) |
|||
for abi in abis: |
|||
for platform_ in platforms: |
|||
yield Tag(interpreter, abi, platform_) |
|||
if _abi3_applies(python_version): |
|||
for tag in (Tag(interpreter, "abi3", platform_) for platform_ in platforms): |
|||
yield tag |
|||
for tag in (Tag(interpreter, "none", platform_) for platform_ in platforms): |
|||
yield tag |
|||
|
|||
if _abi3_applies(python_version): |
|||
for minor_version in range(python_version[1] - 1, 1, -1): |
|||
for platform_ in platforms: |
|||
interpreter = "cp{version}".format( |
|||
version=_version_nodot((python_version[0], minor_version)) |
|||
) |
|||
yield Tag(interpreter, "abi3", platform_) |
|||
|
|||
|
|||
def _generic_abi(): |
|||
# type: () -> Iterator[str] |
|||
abi = sysconfig.get_config_var("SOABI") |
|||
if abi: |
|||
yield _normalize_string(abi) |
|||
|
|||
|
|||
def generic_tags( |
|||
interpreter=None, # type: Optional[str] |
|||
abis=None, # type: Optional[Iterable[str]] |
|||
platforms=None, # type: Optional[Iterable[str]] |
|||
**kwargs # type: bool |
|||
): |
|||
# type: (...) -> Iterator[Tag] |
|||
""" |
|||
Yields the tags for a generic interpreter. |
|||
|
|||
The tags consist of: |
|||
- <interpreter>-<abi>-<platform> |
|||
|
|||
The "none" ABI will be added if it was not explicitly provided. |
|||
""" |
|||
warn = _warn_keyword_parameter("generic_tags", kwargs) |
|||
if not interpreter: |
|||
interp_name = interpreter_name() |
|||
interp_version = interpreter_version(warn=warn) |
|||
interpreter = "".join([interp_name, interp_version]) |
|||
if abis is None: |
|||
abis = _generic_abi() |
|||
platforms = list(platforms or _platform_tags()) |
|||
abis = list(abis) |
|||
if "none" not in abis: |
|||
abis.append("none") |
|||
for abi in abis: |
|||
for platform_ in platforms: |
|||
yield Tag(interpreter, abi, platform_) |
|||
|
|||
|
|||
def _py_interpreter_range(py_version): |
|||
# type: (PythonVersion) -> Iterator[str] |
|||
""" |
|||
Yields Python versions in descending order. |
|||
|
|||
After the latest version, the major-only version will be yielded, and then |
|||
all previous versions of that major version. |
|||
""" |
|||
if len(py_version) > 1: |
|||
yield "py{version}".format(version=_version_nodot(py_version[:2])) |
|||
yield "py{major}".format(major=py_version[0]) |
|||
if len(py_version) > 1: |
|||
for minor in range(py_version[1] - 1, -1, -1): |
|||
yield "py{version}".format(version=_version_nodot((py_version[0], minor))) |
|||
|
|||
|
|||
def compatible_tags( |
|||
python_version=None, # type: Optional[PythonVersion] |
|||
interpreter=None, # type: Optional[str] |
|||
platforms=None, # type: Optional[Iterable[str]] |
|||
): |
|||
# type: (...) -> Iterator[Tag] |
|||
""" |
|||
Yields the sequence of tags that are compatible with a specific version of Python. |
|||
|
|||
The tags consist of: |
|||
- py*-none-<platform> |
|||
- <interpreter>-none-any # ... if `interpreter` is provided. |
|||
- py*-none-any |
|||
""" |
|||
if not python_version: |
|||
python_version = sys.version_info[:2] |
|||
platforms = list(platforms or _platform_tags()) |
|||
for version in _py_interpreter_range(python_version): |
|||
for platform_ in platforms: |
|||
yield Tag(version, "none", platform_) |
|||
if interpreter: |
|||
yield Tag(interpreter, "none", "any") |
|||
for version in _py_interpreter_range(python_version): |
|||
yield Tag(version, "none", "any") |
|||
|
|||
|
|||
def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER): |
|||
# type: (str, bool) -> str |
|||
if not is_32bit: |
|||
return arch |
|||
|
|||
if arch.startswith("ppc"): |
|||
return "ppc" |
|||
|
|||
return "i386" |
|||
|
|||
|
|||
def _mac_binary_formats(version, cpu_arch): |
|||
# type: (MacVersion, str) -> List[str] |
|||
formats = [cpu_arch] |
|||
if cpu_arch == "x86_64": |
|||
if version < (10, 4): |
|||
return [] |
|||
formats.extend(["intel", "fat64", "fat32"]) |
|||
|
|||
elif cpu_arch == "i386": |
|||
if version < (10, 4): |
|||
return [] |
|||
formats.extend(["intel", "fat32", "fat"]) |
|||
|
|||
elif cpu_arch == "ppc64": |
|||
# TODO: Need to care about 32-bit PPC for ppc64 through 10.2? |
|||
if version > (10, 5) or version < (10, 4): |
|||
return [] |
|||
formats.append("fat64") |
|||
|
|||
elif cpu_arch == "ppc": |
|||
if version > (10, 6): |
|||
return [] |
|||
formats.extend(["fat32", "fat"]) |
|||
|
|||
formats.append("universal") |
|||
return formats |
|||
|
|||
|
|||
def mac_platforms(version=None, arch=None): |
|||
# type: (Optional[MacVersion], Optional[str]) -> Iterator[str] |
|||
""" |
|||
Yields the platform tags for a macOS system. |
|||
|
|||
The `version` parameter is a two-item tuple specifying the macOS version to |
|||
generate platform tags for. The `arch` parameter is the CPU architecture to |
|||
generate platform tags for. Both parameters default to the appropriate value |
|||
for the current system. |
|||
""" |
|||
version_str, _, cpu_arch = platform.mac_ver() # type: ignore |
|||
if version is None: |
|||
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) |
|||
else: |
|||
version = version |
|||
if arch is None: |
|||
arch = _mac_arch(cpu_arch) |
|||
else: |
|||
arch = arch |
|||
for minor_version in range(version[1], -1, -1): |
|||
compat_version = version[0], minor_version |
|||
binary_formats = _mac_binary_formats(compat_version, arch) |
|||
for binary_format in binary_formats: |
|||
yield "macosx_{major}_{minor}_{binary_format}".format( |
|||
major=compat_version[0], |
|||
minor=compat_version[1], |
|||
binary_format=binary_format, |
|||
) |
|||
|
|||
|
|||
# From PEP 513. |
|||
def _is_manylinux_compatible(name, glibc_version): |
|||
# type: (str, GlibcVersion) -> bool |
|||
# Check for presence of _manylinux module. |
|||
try: |
|||
import _manylinux # noqa |
|||
|
|||
return bool(getattr(_manylinux, name + "_compatible")) |
|||
except (ImportError, AttributeError): |
|||
# Fall through to heuristic check below. |
|||
pass |
|||
|
|||
return _have_compatible_glibc(*glibc_version) |
|||
|
|||
|
|||
def _glibc_version_string(): |
|||
# type: () -> Optional[str] |
|||
# Returns glibc version string, or None if not using glibc. |
|||
return _glibc_version_string_confstr() or _glibc_version_string_ctypes() |
|||
|
|||
|
|||
def _glibc_version_string_confstr(): |
|||
# type: () -> Optional[str] |
|||
""" |
|||
Primary implementation of glibc_version_string using os.confstr. |
|||
""" |
|||
# os.confstr is quite a bit faster than ctypes.DLL. It's also less likely |
|||
# to be broken or missing. This strategy is used in the standard library |
|||
# platform module. |
|||
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183 |
|||
try: |
|||
# os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17". |
|||
version_string = os.confstr( # type: ignore[attr-defined] # noqa: F821 |
|||
"CS_GNU_LIBC_VERSION" |
|||
) |
|||
assert version_string is not None |
|||
_, version = version_string.split() # type: Tuple[str, str] |
|||
except (AssertionError, AttributeError, OSError, ValueError): |
|||
# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... |
|||
return None |
|||
return version |
|||
|
|||
|
|||
def _glibc_version_string_ctypes(): |
|||
# type: () -> Optional[str] |
|||
""" |
|||
Fallback implementation of glibc_version_string using ctypes. |
|||
""" |
|||
try: |
|||
import ctypes |
|||
except ImportError: |
|||
return None |
|||
|
|||
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen |
|||
# manpage says, "If filename is NULL, then the returned handle is for the |
|||
# main program". This way we can let the linker do the work to figure out |
|||
# which libc our process is actually using. |
|||
# |
|||
# Note: typeshed is wrong here so we are ignoring this line. |
|||
process_namespace = ctypes.CDLL(None) # type: ignore |
|||
try: |
|||
gnu_get_libc_version = process_namespace.gnu_get_libc_version |
|||
except AttributeError: |
|||
# Symbol doesn't exist -> therefore, we are not linked to |
|||
# glibc. |
|||
return None |
|||
|
|||
# Call gnu_get_libc_version, which returns a string like "2.5" |
|||
gnu_get_libc_version.restype = ctypes.c_char_p |
|||
version_str = gnu_get_libc_version() # type: str |
|||
# py2 / py3 compatibility: |
|||
if not isinstance(version_str, str): |
|||
version_str = version_str.decode("ascii") |
|||
|
|||
return version_str |
|||
|
|||
|
|||
# Separated out from have_compatible_glibc for easier unit testing. |
|||
def _check_glibc_version(version_str, required_major, minimum_minor): |
|||
# type: (str, int, int) -> bool |
|||
# Parse string and check against requested version. |
|||
# |
|||
# We use a regexp instead of str.split because we want to discard any |
|||
# random junk that might come after the minor version -- this might happen |
|||
# in patched/forked versions of glibc (e.g. Linaro's version of glibc |
|||
# uses version strings like "2.20-2014.11"). See gh-3588. |
|||
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str) |
|||
if not m: |
|||
warnings.warn( |
|||
"Expected glibc version with 2 components major.minor," |
|||
" got: %s" % version_str, |
|||
RuntimeWarning, |
|||
) |
|||
return False |
|||
return ( |
|||
int(m.group("major")) == required_major |
|||
and int(m.group("minor")) >= minimum_minor |
|||
) |
|||
|
|||
|
|||
def _have_compatible_glibc(required_major, minimum_minor): |
|||
# type: (int, int) -> bool |
|||
version_str = _glibc_version_string() |
|||
if version_str is None: |
|||
return False |
|||
return _check_glibc_version(version_str, required_major, minimum_minor) |
|||
|
|||
|
|||
# Python does not provide platform information at sufficient granularity to |
|||
# identify the architecture of the running executable in some cases, so we |
|||
# determine it dynamically by reading the information from the running |
|||
# process. This only applies on Linux, which uses the ELF format. |
|||
class _ELFFileHeader(object): |
|||
# https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header |
|||
class _InvalidELFFileHeader(ValueError): |
|||
""" |
|||
An invalid ELF file header was found. |
|||
""" |
|||
|
|||
ELF_MAGIC_NUMBER = 0x7F454C46 |
|||
ELFCLASS32 = 1 |
|||
ELFCLASS64 = 2 |
|||
ELFDATA2LSB = 1 |
|||
ELFDATA2MSB = 2 |
|||
EM_386 = 3 |
|||
EM_S390 = 22 |
|||
EM_ARM = 40 |
|||
EM_X86_64 = 62 |
|||
EF_ARM_ABIMASK = 0xFF000000 |
|||
EF_ARM_ABI_VER5 = 0x05000000 |
|||
EF_ARM_ABI_FLOAT_HARD = 0x00000400 |
|||
|
|||
def __init__(self, file): |
|||
# type: (IO[bytes]) -> None |
|||
def unpack(fmt): |
|||
# type: (str) -> int |
|||
try: |
|||
(result,) = struct.unpack( |
|||
fmt, file.read(struct.calcsize(fmt)) |
|||
) # type: (int, ) |
|||
except struct.error: |
|||
raise _ELFFileHeader._InvalidELFFileHeader() |
|||
return result |
|||
|
|||
self.e_ident_magic = unpack(">I") |
|||
if self.e_ident_magic != self.ELF_MAGIC_NUMBER: |
|||
raise _ELFFileHeader._InvalidELFFileHeader() |
|||
self.e_ident_class = unpack("B") |
|||
if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}: |
|||
raise _ELFFileHeader._InvalidELFFileHeader() |
|||
self.e_ident_data = unpack("B") |
|||
if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}: |
|||
raise _ELFFileHeader._InvalidELFFileHeader() |
|||
self.e_ident_version = unpack("B") |
|||
self.e_ident_osabi = unpack("B") |
|||
self.e_ident_abiversion = unpack("B") |
|||
self.e_ident_pad = file.read(7) |
|||
format_h = "<H" if self.e_ident_data == self.ELFDATA2LSB else ">H" |
|||
format_i = "<I" if self.e_ident_data == self.ELFDATA2LSB else ">I" |
|||
format_q = "<Q" if self.e_ident_data == self.ELFDATA2LSB else ">Q" |
|||
format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q |
|||
self.e_type = unpack(format_h) |
|||
self.e_machine = unpack(format_h) |
|||
self.e_version = unpack(format_i) |
|||
self.e_entry = unpack(format_p) |
|||
self.e_phoff = unpack(format_p) |
|||
self.e_shoff = unpack(format_p) |
|||
self.e_flags = unpack(format_i) |
|||
self.e_ehsize = unpack(format_h) |
|||
self.e_phentsize = unpack(format_h) |
|||
self.e_phnum = unpack(format_h) |
|||
self.e_shentsize = unpack(format_h) |
|||
self.e_shnum = unpack(format_h) |
|||
self.e_shstrndx = unpack(format_h) |
|||
|
|||
|
|||
def _get_elf_header(): |
|||
# type: () -> Optional[_ELFFileHeader] |
|||
try: |
|||
with open(sys.executable, "rb") as f: |
|||
elf_header = _ELFFileHeader(f) |
|||
except (IOError, OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader): |
|||
return None |
|||
return elf_header |
|||
|
|||
|
|||
def _is_linux_armhf(): |
|||
# type: () -> bool |
|||
# hard-float ABI can be detected from the ELF header of the running |
|||
# process |
|||
# https://static.docs.arm.com/ihi0044/g/aaelf32.pdf |
|||
elf_header = _get_elf_header() |
|||
if elf_header is None: |
|||
return False |
|||
result = elf_header.e_ident_class == elf_header.ELFCLASS32 |
|||
result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB |
|||
result &= elf_header.e_machine == elf_header.EM_ARM |
|||
result &= ( |
|||
elf_header.e_flags & elf_header.EF_ARM_ABIMASK |
|||
) == elf_header.EF_ARM_ABI_VER5 |
|||
result &= ( |
|||
elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD |
|||
) == elf_header.EF_ARM_ABI_FLOAT_HARD |
|||
return result |
|||
|
|||
|
|||
def _is_linux_i686(): |
|||
# type: () -> bool |
|||
elf_header = _get_elf_header() |
|||
if elf_header is None: |
|||
return False |
|||
result = elf_header.e_ident_class == elf_header.ELFCLASS32 |
|||
result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB |
|||
result &= elf_header.e_machine == elf_header.EM_386 |
|||
return result |
|||
|
|||
|
|||
def _have_compatible_manylinux_abi(arch): |
|||
# type: (str) -> bool |
|||
if arch == "armv7l": |
|||
return _is_linux_armhf() |
|||
if arch == "i686": |
|||
return _is_linux_i686() |
|||
return True |
|||
|
|||
|
|||
def _linux_platforms(is_32bit=_32_BIT_INTERPRETER): |
|||
# type: (bool) -> Iterator[str] |
|||
linux = _normalize_string(distutils.util.get_platform()) |
|||
if is_32bit: |
|||
if linux == "linux_x86_64": |
|||
linux = "linux_i686" |
|||
elif linux == "linux_aarch64": |
|||
linux = "linux_armv7l" |
|||
manylinux_support = [] |
|||
_, arch = linux.split("_", 1) |
|||
if _have_compatible_manylinux_abi(arch): |
|||
if arch in {"x86_64", "i686", "aarch64", "armv7l", "ppc64", "ppc64le", "s390x"}: |
|||
manylinux_support.append( |
|||
("manylinux2014", (2, 17)) |
|||
) # CentOS 7 w/ glibc 2.17 (PEP 599) |
|||
if arch in {"x86_64", "i686"}: |
|||
manylinux_support.append( |
|||
("manylinux2010", (2, 12)) |
|||
) # CentOS 6 w/ glibc 2.12 (PEP 571) |
|||
manylinux_support.append( |
|||
("manylinux1", (2, 5)) |
|||
) # CentOS 5 w/ glibc 2.5 (PEP 513) |
|||
manylinux_support_iter = iter(manylinux_support) |
|||
for name, glibc_version in manylinux_support_iter: |
|||
if _is_manylinux_compatible(name, glibc_version): |
|||
yield linux.replace("linux", name) |
|||
break |
|||
# Support for a later manylinux implies support for an earlier version. |
|||
for name, _ in manylinux_support_iter: |
|||
yield linux.replace("linux", name) |
|||
yield linux |
|||
|
|||
|
|||
def _generic_platforms(): |
|||
# type: () -> Iterator[str] |
|||
yield _normalize_string(distutils.util.get_platform()) |
|||
|
|||
|
|||
def _platform_tags(): |
|||
# type: () -> Iterator[str] |
|||
""" |
|||
Provides the platform tags for this installation. |
|||
""" |
|||
if platform.system() == "Darwin": |
|||
return mac_platforms() |
|||
elif platform.system() == "Linux": |
|||
return _linux_platforms() |
|||
else: |
|||
return _generic_platforms() |
|||
|
|||
|
|||
def interpreter_name(): |
|||
# type: () -> str |
|||
""" |
|||
Returns the name of the running interpreter. |
|||
""" |
|||
try: |
|||
name = sys.implementation.name # type: ignore |
|||
except AttributeError: # pragma: no cover |
|||
# Python 2.7 compatibility. |
|||
name = platform.python_implementation().lower() |
|||
return INTERPRETER_SHORT_NAMES.get(name) or name |
|||
|
|||
|
|||
def interpreter_version(**kwargs): |
|||
# type: (bool) -> str |
|||
""" |
|||
Returns the version of the running interpreter. |
|||
""" |
|||
warn = _warn_keyword_parameter("interpreter_version", kwargs) |
|||
version = _get_config_var("py_version_nodot", warn=warn) |
|||
if version: |
|||
version = str(version) |
|||
else: |
|||
version = _version_nodot(sys.version_info[:2]) |
|||
return version |
|||
|
|||
|
|||
def _version_nodot(version): |
|||
# type: (PythonVersion) -> str |
|||
if any(v >= 10 for v in version): |
|||
sep = "_" |
|||
else: |
|||
sep = "" |
|||
return sep.join(map(str, version)) |
|||
|
|||
|
|||
def sys_tags(**kwargs): |
|||
# type: (bool) -> Iterator[Tag] |
|||
""" |
|||
Returns the sequence of tag triples for the running interpreter. |
|||
|
|||
The order of the sequence corresponds to priority order for the |
|||
interpreter, from most to least important. |
|||
""" |
|||
warn = _warn_keyword_parameter("sys_tags", kwargs) |
|||
|
|||
interp_name = interpreter_name() |
|||
if interp_name == "cp": |
|||
for tag in cpython_tags(warn=warn): |
|||
yield tag |
|||
else: |
|||
for tag in generic_tags(): |
|||
yield tag |
|||
|
|||
for tag in compatible_tags(): |
|||
yield tag |
@ -0,0 +1,4 @@ |
|||
packaging==20.4 |
|||
pyparsing==2.2.1 |
|||
six==1.10.0 |
|||
appdirs==1.4.3 |
@ -0,0 +1,401 @@ |
|||
Pluggable Distributions of Python Software |
|||
========================================== |
|||
|
|||
Distributions |
|||
------------- |
|||
|
|||
A "Distribution" is a collection of files that represent a "Release" of a |
|||
"Project" as of a particular point in time, denoted by a |
|||
"Version":: |
|||
|
|||
>>> import sys, pkg_resources |
|||
>>> from pkg_resources import Distribution |
|||
>>> Distribution(project_name="Foo", version="1.2") |
|||
Foo 1.2 |
|||
|
|||
Distributions have a location, which can be a filename, URL, or really anything |
|||
else you care to use:: |
|||
|
|||
>>> dist = Distribution( |
|||
... location="http://example.com/something", |
|||
... project_name="Bar", version="0.9" |
|||
... ) |
|||
|
|||
>>> dist |
|||
Bar 0.9 (http://example.com/something) |
|||
|
|||
|
|||
Distributions have various introspectable attributes:: |
|||
|
|||
>>> dist.location |
|||
'http://example.com/something' |
|||
|
|||
>>> dist.project_name |
|||
'Bar' |
|||
|
|||
>>> dist.version |
|||
'0.9' |
|||
|
|||
>>> dist.py_version == '{}.{}'.format(*sys.version_info) |
|||
True |
|||
|
|||
>>> print(dist.platform) |
|||
None |
|||
|
|||
Including various computed attributes:: |
|||
|
|||
>>> from pkg_resources import parse_version |
|||
>>> dist.parsed_version == parse_version(dist.version) |
|||
True |
|||
|
|||
>>> dist.key # case-insensitive form of the project name |
|||
'bar' |
|||
|
|||
Distributions are compared (and hashed) by version first:: |
|||
|
|||
>>> Distribution(version='1.0') == Distribution(version='1.0') |
|||
True |
|||
>>> Distribution(version='1.0') == Distribution(version='1.1') |
|||
False |
|||
>>> Distribution(version='1.0') < Distribution(version='1.1') |
|||
True |
|||
|
|||
but also by project name (case-insensitive), platform, Python version, |
|||
location, etc.:: |
|||
|
|||
>>> Distribution(project_name="Foo",version="1.0") == \ |
|||
... Distribution(project_name="Foo",version="1.0") |
|||
True |
|||
|
|||
>>> Distribution(project_name="Foo",version="1.0") == \ |
|||
... Distribution(project_name="foo",version="1.0") |
|||
True |
|||
|
|||
>>> Distribution(project_name="Foo",version="1.0") == \ |
|||
... Distribution(project_name="Foo",version="1.1") |
|||
False |
|||
|
|||
>>> Distribution(project_name="Foo",py_version="2.3",version="1.0") == \ |
|||
... Distribution(project_name="Foo",py_version="2.4",version="1.0") |
|||
False |
|||
|
|||
>>> Distribution(location="spam",version="1.0") == \ |
|||
... Distribution(location="spam",version="1.0") |
|||
True |
|||
|
|||
>>> Distribution(location="spam",version="1.0") == \ |
|||
... Distribution(location="baz",version="1.0") |
|||
False |
|||
|
|||
|
|||
|
|||
Hash and compare distribution by prio/plat |
|||
|
|||
Get version from metadata |
|||
provider capabilities |
|||
egg_name() |
|||
as_requirement() |
|||
from_location, from_filename (w/path normalization) |
|||
|
|||
Releases may have zero or more "Requirements", which indicate |
|||
what releases of another project the release requires in order to |
|||
function. A Requirement names the other project, expresses some criteria |
|||
as to what releases of that project are acceptable, and lists any "Extras" |
|||
that the requiring release may need from that project. (An Extra is an |
|||
optional feature of a Release, that can only be used if its additional |
|||
Requirements are satisfied.) |
|||
|
|||
|
|||
|
|||
The Working Set |
|||
--------------- |
|||
|
|||
A collection of active distributions is called a Working Set. Note that a |
|||
Working Set can contain any importable distribution, not just pluggable ones. |
|||
For example, the Python standard library is an importable distribution that |
|||
will usually be part of the Working Set, even though it is not pluggable. |
|||
Similarly, when you are doing development work on a project, the files you are |
|||
editing are also a Distribution. (And, with a little attention to the |
|||
directory names used, and including some additional metadata, such a |
|||
"development distribution" can be made pluggable as well.) |
|||
|
|||
>>> from pkg_resources import WorkingSet |
|||
|
|||
A working set's entries are the sys.path entries that correspond to the active |
|||
distributions. By default, the working set's entries are the items on |
|||
``sys.path``:: |
|||
|
|||
>>> ws = WorkingSet() |
|||
>>> ws.entries == sys.path |
|||
True |
|||
|
|||
But you can also create an empty working set explicitly, and add distributions |
|||
to it:: |
|||
|
|||
>>> ws = WorkingSet([]) |
|||
>>> ws.add(dist) |
|||
>>> ws.entries |
|||
['http://example.com/something'] |
|||
>>> dist in ws |
|||
True |
|||
>>> Distribution('foo',version="") in ws |
|||
False |
|||
|
|||
And you can iterate over its distributions:: |
|||
|
|||
>>> list(ws) |
|||
[Bar 0.9 (http://example.com/something)] |
|||
|
|||
Adding the same distribution more than once is a no-op:: |
|||
|
|||
>>> ws.add(dist) |
|||
>>> list(ws) |
|||
[Bar 0.9 (http://example.com/something)] |
|||
|
|||
For that matter, adding multiple distributions for the same project also does |
|||
nothing, because a working set can only hold one active distribution per |
|||
project -- the first one added to it:: |
|||
|
|||
>>> ws.add( |
|||
... Distribution( |
|||
... 'http://example.com/something', project_name="Bar", |
|||
... version="7.2" |
|||
... ) |
|||
... ) |
|||
>>> list(ws) |
|||
[Bar 0.9 (http://example.com/something)] |
|||
|
|||
You can append a path entry to a working set using ``add_entry()``:: |
|||
|
|||
>>> ws.entries |
|||
['http://example.com/something'] |
|||
>>> ws.add_entry(pkg_resources.__file__) |
|||
>>> ws.entries |
|||
['http://example.com/something', '...pkg_resources...'] |
|||
|
|||
Multiple additions result in multiple entries, even if the entry is already in |
|||
the working set (because ``sys.path`` can contain the same entry more than |
|||
once):: |
|||
|
|||
>>> ws.add_entry(pkg_resources.__file__) |
|||
>>> ws.entries |
|||
['...example.com...', '...pkg_resources...', '...pkg_resources...'] |
|||
|
|||
And you can specify the path entry a distribution was found under, using the |
|||
optional second parameter to ``add()``:: |
|||
|
|||
>>> ws = WorkingSet([]) |
|||
>>> ws.add(dist,"foo") |
|||
>>> ws.entries |
|||
['foo'] |
|||
|
|||
But even if a distribution is found under multiple path entries, it still only |
|||
shows up once when iterating the working set: |
|||
|
|||
>>> ws.add_entry(ws.entries[0]) |
|||
>>> list(ws) |
|||
[Bar 0.9 (http://example.com/something)] |
|||
|
|||
You can ask a WorkingSet to ``find()`` a distribution matching a requirement:: |
|||
|
|||
>>> from pkg_resources import Requirement |
|||
>>> print(ws.find(Requirement.parse("Foo==1.0"))) # no match, return None |
|||
None |
|||
|
|||
>>> ws.find(Requirement.parse("Bar==0.9")) # match, return distribution |
|||
Bar 0.9 (http://example.com/something) |
|||
|
|||
Note that asking for a conflicting version of a distribution already in a |
|||
working set triggers a ``pkg_resources.VersionConflict`` error: |
|||
|
|||
>>> try: |
|||
... ws.find(Requirement.parse("Bar==1.0")) |
|||
... except pkg_resources.VersionConflict as exc: |
|||
... print(str(exc)) |
|||
... else: |
|||
... raise AssertionError("VersionConflict was not raised") |
|||
(Bar 0.9 (http://example.com/something), Requirement.parse('Bar==1.0')) |
|||
|
|||
You can subscribe a callback function to receive notifications whenever a new |
|||
distribution is added to a working set. The callback is immediately invoked |
|||
once for each existing distribution in the working set, and then is called |
|||
again for new distributions added thereafter:: |
|||
|
|||
>>> def added(dist): print("Added %s" % dist) |
|||
>>> ws.subscribe(added) |
|||
Added Bar 0.9 |
|||
>>> foo12 = Distribution(project_name="Foo", version="1.2", location="f12") |
|||
>>> ws.add(foo12) |
|||
Added Foo 1.2 |
|||
|
|||
Note, however, that only the first distribution added for a given project name |
|||
will trigger a callback, even during the initial ``subscribe()`` callback:: |
|||
|
|||
>>> foo14 = Distribution(project_name="Foo", version="1.4", location="f14") |
|||
>>> ws.add(foo14) # no callback, because Foo 1.2 is already active |
|||
|
|||
>>> ws = WorkingSet([]) |
|||
>>> ws.add(foo12) |
|||
>>> ws.add(foo14) |
|||
>>> ws.subscribe(added) |
|||
Added Foo 1.2 |
|||
|
|||
And adding a callback more than once has no effect, either:: |
|||
|
|||
>>> ws.subscribe(added) # no callbacks |
|||
|
|||
# and no double-callbacks on subsequent additions, either |
|||
>>> just_a_test = Distribution(project_name="JustATest", version="0.99") |
|||
>>> ws.add(just_a_test) |
|||
Added JustATest 0.99 |
|||
|
|||
|
|||
Finding Plugins |
|||
--------------- |
|||
|
|||
``WorkingSet`` objects can be used to figure out what plugins in an |
|||
``Environment`` can be loaded without any resolution errors:: |
|||
|
|||
>>> from pkg_resources import Environment |
|||
|
|||
>>> plugins = Environment([]) # normally, a list of plugin directories |
|||
>>> plugins.add(foo12) |
|||
>>> plugins.add(foo14) |
|||
>>> plugins.add(just_a_test) |
|||
|
|||
In the simplest case, we just get the newest version of each distribution in |
|||
the plugin environment:: |
|||
|
|||
>>> ws = WorkingSet([]) |
|||
>>> ws.find_plugins(plugins) |
|||
([JustATest 0.99, Foo 1.4 (f14)], {}) |
|||
|
|||
But if there's a problem with a version conflict or missing requirements, the |
|||
method falls back to older versions, and the error info dict will contain an |
|||
exception instance for each unloadable plugin:: |
|||
|
|||
>>> ws.add(foo12) # this will conflict with Foo 1.4 |
|||
>>> ws.find_plugins(plugins) |
|||
([JustATest 0.99, Foo 1.2 (f12)], {Foo 1.4 (f14): VersionConflict(...)}) |
|||
|
|||
But if you disallow fallbacks, the failed plugin will be skipped instead of |
|||
trying older versions:: |
|||
|
|||
>>> ws.find_plugins(plugins, fallback=False) |
|||
([JustATest 0.99], {Foo 1.4 (f14): VersionConflict(...)}) |
|||
|
|||
|
|||
|
|||
Platform Compatibility Rules |
|||
---------------------------- |
|||
|
|||
On the Mac, there are potential compatibility issues for modules compiled |
|||
on newer versions of macOS than what the user is running. Additionally, |
|||
macOS will soon have two platforms to contend with: Intel and PowerPC. |
|||
|
|||
Basic equality works as on other platforms:: |
|||
|
|||
>>> from pkg_resources import compatible_platforms as cp |
|||
>>> reqd = 'macosx-10.4-ppc' |
|||
>>> cp(reqd, reqd) |
|||
True |
|||
>>> cp("win32", reqd) |
|||
False |
|||
|
|||
Distributions made on other machine types are not compatible:: |
|||
|
|||
>>> cp("macosx-10.4-i386", reqd) |
|||
False |
|||
|
|||
Distributions made on earlier versions of the OS are compatible, as |
|||
long as they are from the same top-level version. The patchlevel version |
|||
number does not matter:: |
|||
|
|||
>>> cp("macosx-10.4-ppc", reqd) |
|||
True |
|||
>>> cp("macosx-10.3-ppc", reqd) |
|||
True |
|||
>>> cp("macosx-10.5-ppc", reqd) |
|||
False |
|||
>>> cp("macosx-9.5-ppc", reqd) |
|||
False |
|||
|
|||
Backwards compatibility for packages made via earlier versions of |
|||
setuptools is provided as well:: |
|||
|
|||
>>> cp("darwin-8.2.0-Power_Macintosh", reqd) |
|||
True |
|||
>>> cp("darwin-7.2.0-Power_Macintosh", reqd) |
|||
True |
|||
>>> cp("darwin-8.2.0-Power_Macintosh", "macosx-10.3-ppc") |
|||
False |
|||
|
|||
|
|||
Environment Markers |
|||
------------------- |
|||
|
|||
>>> from pkg_resources import invalid_marker as im, evaluate_marker as em |
|||
>>> import os |
|||
|
|||
>>> print(im("sys_platform")) |
|||
Invalid marker: 'sys_platform', parse error at '' |
|||
|
|||
>>> print(im("sys_platform==")) |
|||
Invalid marker: 'sys_platform==', parse error at '' |
|||
|
|||
>>> print(im("sys_platform=='win32'")) |
|||
False |
|||
|
|||
>>> print(im("sys=='x'")) |
|||
Invalid marker: "sys=='x'", parse error at "sys=='x'" |
|||
|
|||
>>> print(im("(extra)")) |
|||
Invalid marker: '(extra)', parse error at ')' |
|||
|
|||
>>> print(im("(extra")) |
|||
Invalid marker: '(extra', parse error at '' |
|||
|
|||
>>> print(im("os.open('foo')=='y'")) |
|||
Invalid marker: "os.open('foo')=='y'", parse error at 'os.open(' |
|||
|
|||
>>> print(im("'x'=='y' and os.open('foo')=='y'")) # no short-circuit! |
|||
Invalid marker: "'x'=='y' and os.open('foo')=='y'", parse error at 'and os.o' |
|||
|
|||
>>> print(im("'x'=='x' or os.open('foo')=='y'")) # no short-circuit! |
|||
Invalid marker: "'x'=='x' or os.open('foo')=='y'", parse error at 'or os.op' |
|||
|
|||
>>> print(im("'x' < 'y' < 'z'")) |
|||
Invalid marker: "'x' < 'y' < 'z'", parse error at "< 'z'" |
|||
|
|||
>>> print(im("r'x'=='x'")) |
|||
Invalid marker: "r'x'=='x'", parse error at "r'x'=='x" |
|||
|
|||
>>> print(im("'''x'''=='x'")) |
|||
Invalid marker: "'''x'''=='x'", parse error at "'x'''=='" |
|||
|
|||
>>> print(im('"""x"""=="x"')) |
|||
Invalid marker: '"""x"""=="x"', parse error at '"x"""=="' |
|||
|
|||
>>> print(im(r"x\n=='x'")) |
|||
Invalid marker: "x\\n=='x'", parse error at "x\\n=='x'" |
|||
|
|||
>>> print(im("os.open=='y'")) |
|||
Invalid marker: "os.open=='y'", parse error at 'os.open=' |
|||
|
|||
>>> em("sys_platform=='win32'") == (sys.platform=='win32') |
|||
True |
|||
|
|||
>>> em("python_version >= '2.7'") |
|||
True |
|||
|
|||
>>> em("python_version > '2.6'") |
|||
True |
|||
|
|||
>>> im("implementation_name=='cpython'") |
|||
False |
|||
|
|||
>>> im("platform_python_implementation=='CPython'") |
|||
False |
|||
|
|||
>>> im("implementation_version=='3.5.1'") |
|||
False |
@ -1,23 +0,0 @@ |
|||
import os |
|||
import errno |
|||
import sys |
|||
|
|||
from .extern import six |
|||
|
|||
|
|||
def _makedirs_31(path, exist_ok=False): |
|||
try: |
|||
os.makedirs(path) |
|||
except OSError as exc: |
|||
if not exist_ok or exc.errno != errno.EEXIST: |
|||
raise |
|||
|
|||
|
|||
# rely on compatibility behavior until mode considerations |
|||
# and exists_ok considerations are disentangled. |
|||
# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663 |
|||
needs_makedirs = ( |
|||
six.PY2 or |
|||
(3, 4) <= sys.version_info < (3, 4, 1) |
|||
) |
|||
makedirs = _makedirs_31 if needs_makedirs else os.makedirs |
Loading…
Reference in new issue