virtuelle Umgebungen teil20 und teil20a
This commit is contained in:
		
							
								
								
									
										3
									
								
								teil20/lib/python3.11/site-packages/wheel/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								teil20/lib/python3.11/site-packages/wheel/__init__.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,3 @@
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
__version__ = "0.38.4"
 | 
			
		||||
							
								
								
									
										23
									
								
								teil20/lib/python3.11/site-packages/wheel/__main__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								teil20/lib/python3.11/site-packages/wheel/__main__.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,23 @@
 | 
			
		||||
"""
 | 
			
		||||
Wheel command line tool (enable python -m wheel syntax)
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def main():  # needed for console script
 | 
			
		||||
    if __package__ == "":
 | 
			
		||||
        # To be able to run 'python wheel-0.9.whl/wheel':
 | 
			
		||||
        import os.path
 | 
			
		||||
 | 
			
		||||
        path = os.path.dirname(os.path.dirname(__file__))
 | 
			
		||||
        sys.path[0:0] = [path]
 | 
			
		||||
    import wheel.cli
 | 
			
		||||
 | 
			
		||||
    sys.exit(wheel.cli.main())
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == "__main__":
 | 
			
		||||
    sys.exit(main())
 | 
			
		||||
@@ -0,0 +1,26 @@
 | 
			
		||||
# copied from setuptools.logging, omitting monkeypatching
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import logging
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _not_warning(record):
 | 
			
		||||
    return record.levelno < logging.WARNING
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def configure():
 | 
			
		||||
    """
 | 
			
		||||
    Configure logging to emit warning and above to stderr
 | 
			
		||||
    and everything else to stdout. This behavior is provided
 | 
			
		||||
    for compatibility with distutils.log but may change in
 | 
			
		||||
    the future.
 | 
			
		||||
    """
 | 
			
		||||
    err_handler = logging.StreamHandler()
 | 
			
		||||
    err_handler.setLevel(logging.WARNING)
 | 
			
		||||
    out_handler = logging.StreamHandler(sys.stdout)
 | 
			
		||||
    out_handler.addFilter(_not_warning)
 | 
			
		||||
    handlers = err_handler, out_handler
 | 
			
		||||
    logging.basicConfig(
 | 
			
		||||
        format="{message}", style="{", handlers=handlers, level=logging.DEBUG
 | 
			
		||||
    )
 | 
			
		||||
							
								
								
									
										550
									
								
								teil20/lib/python3.11/site-packages/wheel/bdist_wheel.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										550
									
								
								teil20/lib/python3.11/site-packages/wheel/bdist_wheel.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,550 @@
 | 
			
		||||
"""
 | 
			
		||||
Create a wheel (.whl) distribution.
 | 
			
		||||
 | 
			
		||||
A wheel is a built archive format.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import re
 | 
			
		||||
import shutil
 | 
			
		||||
import stat
 | 
			
		||||
import sys
 | 
			
		||||
import sysconfig
 | 
			
		||||
import warnings
 | 
			
		||||
from collections import OrderedDict
 | 
			
		||||
from email.generator import BytesGenerator, Generator
 | 
			
		||||
from email.policy import EmailPolicy
 | 
			
		||||
from glob import iglob
 | 
			
		||||
from io import BytesIO
 | 
			
		||||
from shutil import rmtree
 | 
			
		||||
from zipfile import ZIP_DEFLATED, ZIP_STORED
 | 
			
		||||
 | 
			
		||||
import pkg_resources
 | 
			
		||||
from setuptools import Command
 | 
			
		||||
 | 
			
		||||
from . import __version__ as wheel_version
 | 
			
		||||
from .macosx_libfile import calculate_macosx_platform_tag
 | 
			
		||||
from .metadata import pkginfo_to_metadata
 | 
			
		||||
from .util import log
 | 
			
		||||
from .vendored.packaging import tags
 | 
			
		||||
from .wheelfile import WheelFile
 | 
			
		||||
 | 
			
		||||
safe_name = pkg_resources.safe_name
 | 
			
		||||
safe_version = pkg_resources.safe_version
 | 
			
		||||
setuptools_major_version = int(
 | 
			
		||||
    pkg_resources.get_distribution("setuptools").version.split(".")[0]
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
PY_LIMITED_API_PATTERN = r"cp3\d"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def python_tag():
 | 
			
		||||
    return f"py{sys.version_info[0]}"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_platform(archive_root):
 | 
			
		||||
    """Return our platform name 'win32', 'linux_x86_64'"""
 | 
			
		||||
    result = sysconfig.get_platform()
 | 
			
		||||
    if result.startswith("macosx") and archive_root is not None:
 | 
			
		||||
        result = calculate_macosx_platform_tag(archive_root, result)
 | 
			
		||||
    elif result == "linux-x86_64" and sys.maxsize == 2147483647:
 | 
			
		||||
        # pip pull request #3497
 | 
			
		||||
        result = "linux-i686"
 | 
			
		||||
 | 
			
		||||
    return result.replace("-", "_")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_flag(var, fallback, expected=True, warn=True):
 | 
			
		||||
    """Use a fallback value for determining SOABI flags if the needed config
 | 
			
		||||
    var is unset or unavailable."""
 | 
			
		||||
    val = sysconfig.get_config_var(var)
 | 
			
		||||
    if val is None:
 | 
			
		||||
        if warn:
 | 
			
		||||
            warnings.warn(
 | 
			
		||||
                "Config variable '{}' is unset, Python ABI tag may "
 | 
			
		||||
                "be incorrect".format(var),
 | 
			
		||||
                RuntimeWarning,
 | 
			
		||||
                2,
 | 
			
		||||
            )
 | 
			
		||||
        return fallback
 | 
			
		||||
    return val == expected
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_abi_tag():
 | 
			
		||||
    """Return the ABI tag based on SOABI (if available) or emulate SOABI (PyPy2)."""
 | 
			
		||||
    soabi = sysconfig.get_config_var("SOABI")
 | 
			
		||||
    impl = tags.interpreter_name()
 | 
			
		||||
    if not soabi and impl in ("cp", "pp") and hasattr(sys, "maxunicode"):
 | 
			
		||||
        d = ""
 | 
			
		||||
        m = ""
 | 
			
		||||
        u = ""
 | 
			
		||||
        if get_flag("Py_DEBUG", hasattr(sys, "gettotalrefcount"), warn=(impl == "cp")):
 | 
			
		||||
            d = "d"
 | 
			
		||||
 | 
			
		||||
        if get_flag(
 | 
			
		||||
            "WITH_PYMALLOC",
 | 
			
		||||
            impl == "cp",
 | 
			
		||||
            warn=(impl == "cp" and sys.version_info < (3, 8)),
 | 
			
		||||
        ) and sys.version_info < (3, 8):
 | 
			
		||||
            m = "m"
 | 
			
		||||
 | 
			
		||||
        abi = f"{impl}{tags.interpreter_version()}{d}{m}{u}"
 | 
			
		||||
    elif soabi and impl == "cp":
 | 
			
		||||
        abi = "cp" + soabi.split("-")[1]
 | 
			
		||||
    elif soabi and impl == "pp":
 | 
			
		||||
        # we want something like pypy36-pp73
 | 
			
		||||
        abi = "-".join(soabi.split("-")[:2])
 | 
			
		||||
        abi = abi.replace(".", "_").replace("-", "_")
 | 
			
		||||
    elif soabi:
 | 
			
		||||
        abi = soabi.replace(".", "_").replace("-", "_")
 | 
			
		||||
    else:
 | 
			
		||||
        abi = None
 | 
			
		||||
 | 
			
		||||
    return abi
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def safer_name(name):
 | 
			
		||||
    return safe_name(name).replace("-", "_")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def safer_version(version):
 | 
			
		||||
    return safe_version(version).replace("-", "_")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def remove_readonly(func, path, excinfo):
 | 
			
		||||
    print(str(excinfo[1]))
 | 
			
		||||
    os.chmod(path, stat.S_IWRITE)
 | 
			
		||||
    func(path)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class bdist_wheel(Command):
 | 
			
		||||
 | 
			
		||||
    description = "create a wheel distribution"
 | 
			
		||||
 | 
			
		||||
    supported_compressions = OrderedDict(
 | 
			
		||||
        [("stored", ZIP_STORED), ("deflated", ZIP_DEFLATED)]
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    user_options = [
 | 
			
		||||
        ("bdist-dir=", "b", "temporary directory for creating the distribution"),
 | 
			
		||||
        (
 | 
			
		||||
            "plat-name=",
 | 
			
		||||
            "p",
 | 
			
		||||
            "platform name to embed in generated filenames "
 | 
			
		||||
            "(default: %s)" % get_platform(None),
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            "keep-temp",
 | 
			
		||||
            "k",
 | 
			
		||||
            "keep the pseudo-installation tree around after "
 | 
			
		||||
            + "creating the distribution archive",
 | 
			
		||||
        ),
 | 
			
		||||
        ("dist-dir=", "d", "directory to put final built distributions in"),
 | 
			
		||||
        ("skip-build", None, "skip rebuilding everything (for testing/debugging)"),
 | 
			
		||||
        (
 | 
			
		||||
            "relative",
 | 
			
		||||
            None,
 | 
			
		||||
            "build the archive using relative paths " "(default: false)",
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            "owner=",
 | 
			
		||||
            "u",
 | 
			
		||||
            "Owner name used when creating a tar file" " [default: current user]",
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            "group=",
 | 
			
		||||
            "g",
 | 
			
		||||
            "Group name used when creating a tar file" " [default: current group]",
 | 
			
		||||
        ),
 | 
			
		||||
        ("universal", None, "make a universal wheel" " (default: false)"),
 | 
			
		||||
        (
 | 
			
		||||
            "compression=",
 | 
			
		||||
            None,
 | 
			
		||||
            "zipfile compression (one of: {})"
 | 
			
		||||
            " (default: 'deflated')".format(", ".join(supported_compressions)),
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            "python-tag=",
 | 
			
		||||
            None,
 | 
			
		||||
            "Python implementation compatibility tag"
 | 
			
		||||
            " (default: '%s')" % (python_tag()),
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            "build-number=",
 | 
			
		||||
            None,
 | 
			
		||||
            "Build number for this particular version. "
 | 
			
		||||
            "As specified in PEP-0427, this must start with a digit. "
 | 
			
		||||
            "[default: None]",
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            "py-limited-api=",
 | 
			
		||||
            None,
 | 
			
		||||
            "Python tag (cp32|cp33|cpNN) for abi3 wheel tag" " (default: false)",
 | 
			
		||||
        ),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    boolean_options = ["keep-temp", "skip-build", "relative", "universal"]
 | 
			
		||||
 | 
			
		||||
    def initialize_options(self):
 | 
			
		||||
        self.bdist_dir = None
 | 
			
		||||
        self.data_dir = None
 | 
			
		||||
        self.plat_name = None
 | 
			
		||||
        self.plat_tag = None
 | 
			
		||||
        self.format = "zip"
 | 
			
		||||
        self.keep_temp = False
 | 
			
		||||
        self.dist_dir = None
 | 
			
		||||
        self.egginfo_dir = None
 | 
			
		||||
        self.root_is_pure = None
 | 
			
		||||
        self.skip_build = None
 | 
			
		||||
        self.relative = False
 | 
			
		||||
        self.owner = None
 | 
			
		||||
        self.group = None
 | 
			
		||||
        self.universal = False
 | 
			
		||||
        self.compression = "deflated"
 | 
			
		||||
        self.python_tag = python_tag()
 | 
			
		||||
        self.build_number = None
 | 
			
		||||
        self.py_limited_api = False
 | 
			
		||||
        self.plat_name_supplied = False
 | 
			
		||||
 | 
			
		||||
    def finalize_options(self):
 | 
			
		||||
        if self.bdist_dir is None:
 | 
			
		||||
            bdist_base = self.get_finalized_command("bdist").bdist_base
 | 
			
		||||
            self.bdist_dir = os.path.join(bdist_base, "wheel")
 | 
			
		||||
 | 
			
		||||
        self.data_dir = self.wheel_dist_name + ".data"
 | 
			
		||||
        self.plat_name_supplied = self.plat_name is not None
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            self.compression = self.supported_compressions[self.compression]
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            raise ValueError(f"Unsupported compression: {self.compression}")
 | 
			
		||||
 | 
			
		||||
        need_options = ("dist_dir", "plat_name", "skip_build")
 | 
			
		||||
 | 
			
		||||
        self.set_undefined_options("bdist", *zip(need_options, need_options))
 | 
			
		||||
 | 
			
		||||
        self.root_is_pure = not (
 | 
			
		||||
            self.distribution.has_ext_modules() or self.distribution.has_c_libraries()
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        if self.py_limited_api and not re.match(
 | 
			
		||||
            PY_LIMITED_API_PATTERN, self.py_limited_api
 | 
			
		||||
        ):
 | 
			
		||||
            raise ValueError("py-limited-api must match '%s'" % PY_LIMITED_API_PATTERN)
 | 
			
		||||
 | 
			
		||||
        # Support legacy [wheel] section for setting universal
 | 
			
		||||
        wheel = self.distribution.get_option_dict("wheel")
 | 
			
		||||
        if "universal" in wheel:
 | 
			
		||||
            # please don't define this in your global configs
 | 
			
		||||
            log.warning(
 | 
			
		||||
                "The [wheel] section is deprecated. Use [bdist_wheel] instead.",
 | 
			
		||||
            )
 | 
			
		||||
            val = wheel["universal"][1].strip()
 | 
			
		||||
            if val.lower() in ("1", "true", "yes"):
 | 
			
		||||
                self.universal = True
 | 
			
		||||
 | 
			
		||||
        if self.build_number is not None and not self.build_number[:1].isdigit():
 | 
			
		||||
            raise ValueError("Build tag (build-number) must start with a digit.")
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def wheel_dist_name(self):
 | 
			
		||||
        """Return distribution full name with - replaced with _"""
 | 
			
		||||
        components = (
 | 
			
		||||
            safer_name(self.distribution.get_name()),
 | 
			
		||||
            safer_version(self.distribution.get_version()),
 | 
			
		||||
        )
 | 
			
		||||
        if self.build_number:
 | 
			
		||||
            components += (self.build_number,)
 | 
			
		||||
        return "-".join(components)
 | 
			
		||||
 | 
			
		||||
    def get_tag(self):
 | 
			
		||||
        # bdist sets self.plat_name if unset, we should only use it for purepy
 | 
			
		||||
        # wheels if the user supplied it.
 | 
			
		||||
        if self.plat_name_supplied:
 | 
			
		||||
            plat_name = self.plat_name
 | 
			
		||||
        elif self.root_is_pure:
 | 
			
		||||
            plat_name = "any"
 | 
			
		||||
        else:
 | 
			
		||||
            # macosx contains system version in platform name so need special handle
 | 
			
		||||
            if self.plat_name and not self.plat_name.startswith("macosx"):
 | 
			
		||||
                plat_name = self.plat_name
 | 
			
		||||
            else:
 | 
			
		||||
                # on macosx always limit the platform name to comply with any
 | 
			
		||||
                # c-extension modules in bdist_dir, since the user can specify
 | 
			
		||||
                # a higher MACOSX_DEPLOYMENT_TARGET via tools like CMake
 | 
			
		||||
 | 
			
		||||
                # on other platforms, and on macosx if there are no c-extension
 | 
			
		||||
                # modules, use the default platform name.
 | 
			
		||||
                plat_name = get_platform(self.bdist_dir)
 | 
			
		||||
 | 
			
		||||
            if (
 | 
			
		||||
                plat_name in ("linux-x86_64", "linux_x86_64")
 | 
			
		||||
                and sys.maxsize == 2147483647
 | 
			
		||||
            ):
 | 
			
		||||
                plat_name = "linux_i686"
 | 
			
		||||
 | 
			
		||||
        plat_name = plat_name.lower().replace("-", "_").replace(".", "_")
 | 
			
		||||
 | 
			
		||||
        if self.root_is_pure:
 | 
			
		||||
            if self.universal:
 | 
			
		||||
                impl = "py2.py3"
 | 
			
		||||
            else:
 | 
			
		||||
                impl = self.python_tag
 | 
			
		||||
            tag = (impl, "none", plat_name)
 | 
			
		||||
        else:
 | 
			
		||||
            impl_name = tags.interpreter_name()
 | 
			
		||||
            impl_ver = tags.interpreter_version()
 | 
			
		||||
            impl = impl_name + impl_ver
 | 
			
		||||
            # We don't work on CPython 3.1, 3.0.
 | 
			
		||||
            if self.py_limited_api and (impl_name + impl_ver).startswith("cp3"):
 | 
			
		||||
                impl = self.py_limited_api
 | 
			
		||||
                abi_tag = "abi3"
 | 
			
		||||
            else:
 | 
			
		||||
                abi_tag = str(get_abi_tag()).lower()
 | 
			
		||||
            tag = (impl, abi_tag, plat_name)
 | 
			
		||||
            # issue gh-374: allow overriding plat_name
 | 
			
		||||
            supported_tags = [
 | 
			
		||||
                (t.interpreter, t.abi, plat_name) for t in tags.sys_tags()
 | 
			
		||||
            ]
 | 
			
		||||
            assert (
 | 
			
		||||
                tag in supported_tags
 | 
			
		||||
            ), f"would build wheel with unsupported tag {tag}"
 | 
			
		||||
        return tag
 | 
			
		||||
 | 
			
		||||
    def run(self):
 | 
			
		||||
        build_scripts = self.reinitialize_command("build_scripts")
 | 
			
		||||
        build_scripts.executable = "python"
 | 
			
		||||
        build_scripts.force = True
 | 
			
		||||
 | 
			
		||||
        build_ext = self.reinitialize_command("build_ext")
 | 
			
		||||
        build_ext.inplace = False
 | 
			
		||||
 | 
			
		||||
        if not self.skip_build:
 | 
			
		||||
            self.run_command("build")
 | 
			
		||||
 | 
			
		||||
        install = self.reinitialize_command("install", reinit_subcommands=True)
 | 
			
		||||
        install.root = self.bdist_dir
 | 
			
		||||
        install.compile = False
 | 
			
		||||
        install.skip_build = self.skip_build
 | 
			
		||||
        install.warn_dir = False
 | 
			
		||||
 | 
			
		||||
        # A wheel without setuptools scripts is more cross-platform.
 | 
			
		||||
        # Use the (undocumented) `no_ep` option to setuptools'
 | 
			
		||||
        # install_scripts command to avoid creating entry point scripts.
 | 
			
		||||
        install_scripts = self.reinitialize_command("install_scripts")
 | 
			
		||||
        install_scripts.no_ep = True
 | 
			
		||||
 | 
			
		||||
        # Use a custom scheme for the archive, because we have to decide
 | 
			
		||||
        # at installation time which scheme to use.
 | 
			
		||||
        for key in ("headers", "scripts", "data", "purelib", "platlib"):
 | 
			
		||||
            setattr(install, "install_" + key, os.path.join(self.data_dir, key))
 | 
			
		||||
 | 
			
		||||
        basedir_observed = ""
 | 
			
		||||
 | 
			
		||||
        if os.name == "nt":
 | 
			
		||||
            # win32 barfs if any of these are ''; could be '.'?
 | 
			
		||||
            # (distutils.command.install:change_roots bug)
 | 
			
		||||
            basedir_observed = os.path.normpath(os.path.join(self.data_dir, ".."))
 | 
			
		||||
            self.install_libbase = self.install_lib = basedir_observed
 | 
			
		||||
 | 
			
		||||
        setattr(
 | 
			
		||||
            install,
 | 
			
		||||
            "install_purelib" if self.root_is_pure else "install_platlib",
 | 
			
		||||
            basedir_observed,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        log.info(f"installing to {self.bdist_dir}")
 | 
			
		||||
 | 
			
		||||
        self.run_command("install")
 | 
			
		||||
 | 
			
		||||
        impl_tag, abi_tag, plat_tag = self.get_tag()
 | 
			
		||||
        archive_basename = f"{self.wheel_dist_name}-{impl_tag}-{abi_tag}-{plat_tag}"
 | 
			
		||||
        if not self.relative:
 | 
			
		||||
            archive_root = self.bdist_dir
 | 
			
		||||
        else:
 | 
			
		||||
            archive_root = os.path.join(
 | 
			
		||||
                self.bdist_dir, self._ensure_relative(install.install_base)
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        self.set_undefined_options("install_egg_info", ("target", "egginfo_dir"))
 | 
			
		||||
        distinfo_dirname = "{}-{}.dist-info".format(
 | 
			
		||||
            safer_name(self.distribution.get_name()),
 | 
			
		||||
            safer_version(self.distribution.get_version()),
 | 
			
		||||
        )
 | 
			
		||||
        distinfo_dir = os.path.join(self.bdist_dir, distinfo_dirname)
 | 
			
		||||
        self.egg2dist(self.egginfo_dir, distinfo_dir)
 | 
			
		||||
 | 
			
		||||
        self.write_wheelfile(distinfo_dir)
 | 
			
		||||
 | 
			
		||||
        # Make the archive
 | 
			
		||||
        if not os.path.exists(self.dist_dir):
 | 
			
		||||
            os.makedirs(self.dist_dir)
 | 
			
		||||
 | 
			
		||||
        wheel_path = os.path.join(self.dist_dir, archive_basename + ".whl")
 | 
			
		||||
        with WheelFile(wheel_path, "w", self.compression) as wf:
 | 
			
		||||
            wf.write_files(archive_root)
 | 
			
		||||
 | 
			
		||||
        # Add to 'Distribution.dist_files' so that the "upload" command works
 | 
			
		||||
        getattr(self.distribution, "dist_files", []).append(
 | 
			
		||||
            (
 | 
			
		||||
                "bdist_wheel",
 | 
			
		||||
                "{}.{}".format(*sys.version_info[:2]),  # like 3.7
 | 
			
		||||
                wheel_path,
 | 
			
		||||
            )
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        if not self.keep_temp:
 | 
			
		||||
            log.info(f"removing {self.bdist_dir}")
 | 
			
		||||
            if not self.dry_run:
 | 
			
		||||
                rmtree(self.bdist_dir, onerror=remove_readonly)
 | 
			
		||||
 | 
			
		||||
    def write_wheelfile(
 | 
			
		||||
        self, wheelfile_base, generator="bdist_wheel (" + wheel_version + ")"
 | 
			
		||||
    ):
 | 
			
		||||
        from email.message import Message
 | 
			
		||||
 | 
			
		||||
        msg = Message()
 | 
			
		||||
        msg["Wheel-Version"] = "1.0"  # of the spec
 | 
			
		||||
        msg["Generator"] = generator
 | 
			
		||||
        msg["Root-Is-Purelib"] = str(self.root_is_pure).lower()
 | 
			
		||||
        if self.build_number is not None:
 | 
			
		||||
            msg["Build"] = self.build_number
 | 
			
		||||
 | 
			
		||||
        # Doesn't work for bdist_wininst
 | 
			
		||||
        impl_tag, abi_tag, plat_tag = self.get_tag()
 | 
			
		||||
        for impl in impl_tag.split("."):
 | 
			
		||||
            for abi in abi_tag.split("."):
 | 
			
		||||
                for plat in plat_tag.split("."):
 | 
			
		||||
                    msg["Tag"] = "-".join((impl, abi, plat))
 | 
			
		||||
 | 
			
		||||
        wheelfile_path = os.path.join(wheelfile_base, "WHEEL")
 | 
			
		||||
        log.info(f"creating {wheelfile_path}")
 | 
			
		||||
        buffer = BytesIO()
 | 
			
		||||
        BytesGenerator(buffer, maxheaderlen=0).flatten(msg)
 | 
			
		||||
        with open(wheelfile_path, "wb") as f:
 | 
			
		||||
            f.write(buffer.getvalue().replace(b"\r\n", b"\r"))
 | 
			
		||||
 | 
			
		||||
    def _ensure_relative(self, path):
 | 
			
		||||
        # copied from dir_util, deleted
 | 
			
		||||
        drive, path = os.path.splitdrive(path)
 | 
			
		||||
        if path[0:1] == os.sep:
 | 
			
		||||
            path = drive + path[1:]
 | 
			
		||||
        return path
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def license_paths(self):
 | 
			
		||||
        if setuptools_major_version >= 57:
 | 
			
		||||
            # Setuptools has resolved any patterns to actual file names
 | 
			
		||||
            return self.distribution.metadata.license_files or ()
 | 
			
		||||
 | 
			
		||||
        files = set()
 | 
			
		||||
        metadata = self.distribution.get_option_dict("metadata")
 | 
			
		||||
        if setuptools_major_version >= 42:
 | 
			
		||||
            # Setuptools recognizes the license_files option but does not do globbing
 | 
			
		||||
            patterns = self.distribution.metadata.license_files
 | 
			
		||||
        else:
 | 
			
		||||
            # Prior to those, wheel is entirely responsible for handling license files
 | 
			
		||||
            if "license_files" in metadata:
 | 
			
		||||
                patterns = metadata["license_files"][1].split()
 | 
			
		||||
            else:
 | 
			
		||||
                patterns = ()
 | 
			
		||||
 | 
			
		||||
        if "license_file" in metadata:
 | 
			
		||||
            warnings.warn(
 | 
			
		||||
                'The "license_file" option is deprecated. Use "license_files" instead.',
 | 
			
		||||
                DeprecationWarning,
 | 
			
		||||
            )
 | 
			
		||||
            files.add(metadata["license_file"][1])
 | 
			
		||||
 | 
			
		||||
        if not files and not patterns and not isinstance(patterns, list):
 | 
			
		||||
            patterns = ("LICEN[CS]E*", "COPYING*", "NOTICE*", "AUTHORS*")
 | 
			
		||||
 | 
			
		||||
        for pattern in patterns:
 | 
			
		||||
            for path in iglob(pattern):
 | 
			
		||||
                if path.endswith("~"):
 | 
			
		||||
                    log.debug(
 | 
			
		||||
                        f'ignoring license file "{path}" as it looks like a backup'
 | 
			
		||||
                    )
 | 
			
		||||
                    continue
 | 
			
		||||
 | 
			
		||||
                if path not in files and os.path.isfile(path):
 | 
			
		||||
                    log.info(
 | 
			
		||||
                        f'adding license file "{path}" (matched pattern "{pattern}")'
 | 
			
		||||
                    )
 | 
			
		||||
                    files.add(path)
 | 
			
		||||
 | 
			
		||||
        return files
 | 
			
		||||
 | 
			
		||||
    def egg2dist(self, egginfo_path, distinfo_path):
 | 
			
		||||
        """Convert an .egg-info directory into a .dist-info directory"""
 | 
			
		||||
 | 
			
		||||
        def adios(p):
 | 
			
		||||
            """Appropriately delete directory, file or link."""
 | 
			
		||||
            if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p):
 | 
			
		||||
                shutil.rmtree(p)
 | 
			
		||||
            elif os.path.exists(p):
 | 
			
		||||
                os.unlink(p)
 | 
			
		||||
 | 
			
		||||
        adios(distinfo_path)
 | 
			
		||||
 | 
			
		||||
        if not os.path.exists(egginfo_path):
 | 
			
		||||
            # There is no egg-info. This is probably because the egg-info
 | 
			
		||||
            # file/directory is not named matching the distribution name used
 | 
			
		||||
            # to name the archive file. Check for this case and report
 | 
			
		||||
            # accordingly.
 | 
			
		||||
            import glob
 | 
			
		||||
 | 
			
		||||
            pat = os.path.join(os.path.dirname(egginfo_path), "*.egg-info")
 | 
			
		||||
            possible = glob.glob(pat)
 | 
			
		||||
            err = f"Egg metadata expected at {egginfo_path} but not found"
 | 
			
		||||
            if possible:
 | 
			
		||||
                alt = os.path.basename(possible[0])
 | 
			
		||||
                err += f" ({alt} found - possible misnamed archive file?)"
 | 
			
		||||
 | 
			
		||||
            raise ValueError(err)
 | 
			
		||||
 | 
			
		||||
        if os.path.isfile(egginfo_path):
 | 
			
		||||
            # .egg-info is a single file
 | 
			
		||||
            pkginfo_path = egginfo_path
 | 
			
		||||
            pkg_info = pkginfo_to_metadata(egginfo_path, egginfo_path)
 | 
			
		||||
            os.mkdir(distinfo_path)
 | 
			
		||||
        else:
 | 
			
		||||
            # .egg-info is a directory
 | 
			
		||||
            pkginfo_path = os.path.join(egginfo_path, "PKG-INFO")
 | 
			
		||||
            pkg_info = pkginfo_to_metadata(egginfo_path, pkginfo_path)
 | 
			
		||||
 | 
			
		||||
            # ignore common egg metadata that is useless to wheel
 | 
			
		||||
            shutil.copytree(
 | 
			
		||||
                egginfo_path,
 | 
			
		||||
                distinfo_path,
 | 
			
		||||
                ignore=lambda x, y: {
 | 
			
		||||
                    "PKG-INFO",
 | 
			
		||||
                    "requires.txt",
 | 
			
		||||
                    "SOURCES.txt",
 | 
			
		||||
                    "not-zip-safe",
 | 
			
		||||
                },
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            # delete dependency_links if it is only whitespace
 | 
			
		||||
            dependency_links_path = os.path.join(distinfo_path, "dependency_links.txt")
 | 
			
		||||
            with open(dependency_links_path) as dependency_links_file:
 | 
			
		||||
                dependency_links = dependency_links_file.read().strip()
 | 
			
		||||
            if not dependency_links:
 | 
			
		||||
                adios(dependency_links_path)
 | 
			
		||||
 | 
			
		||||
        pkg_info_path = os.path.join(distinfo_path, "METADATA")
 | 
			
		||||
        serialization_policy = EmailPolicy(
 | 
			
		||||
            utf8=True,
 | 
			
		||||
            mangle_from_=False,
 | 
			
		||||
            max_line_length=0,
 | 
			
		||||
        )
 | 
			
		||||
        with open(pkg_info_path, "w", encoding="utf-8") as out:
 | 
			
		||||
            Generator(out, policy=serialization_policy).flatten(pkg_info)
 | 
			
		||||
 | 
			
		||||
        for license_path in self.license_paths:
 | 
			
		||||
            filename = os.path.basename(license_path)
 | 
			
		||||
            shutil.copy(license_path, os.path.join(distinfo_path, filename))
 | 
			
		||||
 | 
			
		||||
        adios(egginfo_path)
 | 
			
		||||
							
								
								
									
										96
									
								
								teil20/lib/python3.11/site-packages/wheel/cli/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										96
									
								
								teil20/lib/python3.11/site-packages/wheel/cli/__init__.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,96 @@
 | 
			
		||||
"""
 | 
			
		||||
Wheel command-line utility.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import argparse
 | 
			
		||||
import os
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class WheelError(Exception):
 | 
			
		||||
    pass
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def unpack_f(args):
 | 
			
		||||
    from .unpack import unpack
 | 
			
		||||
 | 
			
		||||
    unpack(args.wheelfile, args.dest)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def pack_f(args):
 | 
			
		||||
    from .pack import pack
 | 
			
		||||
 | 
			
		||||
    pack(args.directory, args.dest_dir, args.build_number)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def convert_f(args):
 | 
			
		||||
    from .convert import convert
 | 
			
		||||
 | 
			
		||||
    convert(args.files, args.dest_dir, args.verbose)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def version_f(args):
 | 
			
		||||
    from .. import __version__
 | 
			
		||||
 | 
			
		||||
    print("wheel %s" % __version__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def parser():
 | 
			
		||||
    p = argparse.ArgumentParser()
 | 
			
		||||
    s = p.add_subparsers(help="commands")
 | 
			
		||||
 | 
			
		||||
    unpack_parser = s.add_parser("unpack", help="Unpack wheel")
 | 
			
		||||
    unpack_parser.add_argument(
 | 
			
		||||
        "--dest", "-d", help="Destination directory", default="."
 | 
			
		||||
    )
 | 
			
		||||
    unpack_parser.add_argument("wheelfile", help="Wheel file")
 | 
			
		||||
    unpack_parser.set_defaults(func=unpack_f)
 | 
			
		||||
 | 
			
		||||
    repack_parser = s.add_parser("pack", help="Repack wheel")
 | 
			
		||||
    repack_parser.add_argument("directory", help="Root directory of the unpacked wheel")
 | 
			
		||||
    repack_parser.add_argument(
 | 
			
		||||
        "--dest-dir",
 | 
			
		||||
        "-d",
 | 
			
		||||
        default=os.path.curdir,
 | 
			
		||||
        help="Directory to store the wheel (default %(default)s)",
 | 
			
		||||
    )
 | 
			
		||||
    repack_parser.add_argument(
 | 
			
		||||
        "--build-number", help="Build tag to use in the wheel name"
 | 
			
		||||
    )
 | 
			
		||||
    repack_parser.set_defaults(func=pack_f)
 | 
			
		||||
 | 
			
		||||
    convert_parser = s.add_parser("convert", help="Convert egg or wininst to wheel")
 | 
			
		||||
    convert_parser.add_argument("files", nargs="*", help="Files to convert")
 | 
			
		||||
    convert_parser.add_argument(
 | 
			
		||||
        "--dest-dir",
 | 
			
		||||
        "-d",
 | 
			
		||||
        default=os.path.curdir,
 | 
			
		||||
        help="Directory to store wheels (default %(default)s)",
 | 
			
		||||
    )
 | 
			
		||||
    convert_parser.add_argument("--verbose", "-v", action="store_true")
 | 
			
		||||
    convert_parser.set_defaults(func=convert_f)
 | 
			
		||||
 | 
			
		||||
    version_parser = s.add_parser("version", help="Print version and exit")
 | 
			
		||||
    version_parser.set_defaults(func=version_f)
 | 
			
		||||
 | 
			
		||||
    help_parser = s.add_parser("help", help="Show this help")
 | 
			
		||||
    help_parser.set_defaults(func=lambda args: p.print_help())
 | 
			
		||||
 | 
			
		||||
    return p
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def main():
 | 
			
		||||
    p = parser()
 | 
			
		||||
    args = p.parse_args()
 | 
			
		||||
    if not hasattr(args, "func"):
 | 
			
		||||
        p.print_help()
 | 
			
		||||
    else:
 | 
			
		||||
        try:
 | 
			
		||||
            args.func(args)
 | 
			
		||||
            return 0
 | 
			
		||||
        except WheelError as e:
 | 
			
		||||
            print(e, file=sys.stderr)
 | 
			
		||||
 | 
			
		||||
    return 1
 | 
			
		||||
							
								
								
									
										273
									
								
								teil20/lib/python3.11/site-packages/wheel/cli/convert.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										273
									
								
								teil20/lib/python3.11/site-packages/wheel/cli/convert.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,273 @@
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import os.path
 | 
			
		||||
import re
 | 
			
		||||
import shutil
 | 
			
		||||
import tempfile
 | 
			
		||||
import zipfile
 | 
			
		||||
from glob import iglob
 | 
			
		||||
 | 
			
		||||
from ..bdist_wheel import bdist_wheel
 | 
			
		||||
from ..wheelfile import WheelFile
 | 
			
		||||
from . import WheelError
 | 
			
		||||
 | 
			
		||||
try:
 | 
			
		||||
    from setuptools import Distribution
 | 
			
		||||
except ImportError:
 | 
			
		||||
    from distutils.dist import Distribution
 | 
			
		||||
 | 
			
		||||
egg_info_re = re.compile(
 | 
			
		||||
    r"""
 | 
			
		||||
    (?P<name>.+?)-(?P<ver>.+?)
 | 
			
		||||
    (-(?P<pyver>py\d\.\d+)
 | 
			
		||||
     (-(?P<arch>.+?))?
 | 
			
		||||
    )?.egg$""",
 | 
			
		||||
    re.VERBOSE,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class _bdist_wheel_tag(bdist_wheel):
 | 
			
		||||
    # allow the client to override the default generated wheel tag
 | 
			
		||||
    # The default bdist_wheel implementation uses python and abi tags
 | 
			
		||||
    # of the running python process. This is not suitable for
 | 
			
		||||
    # generating/repackaging prebuild binaries.
 | 
			
		||||
 | 
			
		||||
    full_tag_supplied = False
 | 
			
		||||
    full_tag = None  # None or a (pytag, soabitag, plattag) triple
 | 
			
		||||
 | 
			
		||||
    def get_tag(self):
 | 
			
		||||
        if self.full_tag_supplied and self.full_tag is not None:
 | 
			
		||||
            return self.full_tag
 | 
			
		||||
        else:
 | 
			
		||||
            return bdist_wheel.get_tag(self)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def egg2wheel(egg_path: str, dest_dir: str):
 | 
			
		||||
    filename = os.path.basename(egg_path)
 | 
			
		||||
    match = egg_info_re.match(filename)
 | 
			
		||||
    if not match:
 | 
			
		||||
        raise WheelError(f"Invalid egg file name: {filename}")
 | 
			
		||||
 | 
			
		||||
    egg_info = match.groupdict()
 | 
			
		||||
    dir = tempfile.mkdtemp(suffix="_e2w")
 | 
			
		||||
    if os.path.isfile(egg_path):
 | 
			
		||||
        # assume we have a bdist_egg otherwise
 | 
			
		||||
        with zipfile.ZipFile(egg_path) as egg:
 | 
			
		||||
            egg.extractall(dir)
 | 
			
		||||
    else:
 | 
			
		||||
        # support buildout-style installed eggs directories
 | 
			
		||||
        for pth in os.listdir(egg_path):
 | 
			
		||||
            src = os.path.join(egg_path, pth)
 | 
			
		||||
            if os.path.isfile(src):
 | 
			
		||||
                shutil.copy2(src, dir)
 | 
			
		||||
            else:
 | 
			
		||||
                shutil.copytree(src, os.path.join(dir, pth))
 | 
			
		||||
 | 
			
		||||
    pyver = egg_info["pyver"]
 | 
			
		||||
    if pyver:
 | 
			
		||||
        pyver = egg_info["pyver"] = pyver.replace(".", "")
 | 
			
		||||
 | 
			
		||||
    arch = (egg_info["arch"] or "any").replace(".", "_").replace("-", "_")
 | 
			
		||||
 | 
			
		||||
    # assume all binary eggs are for CPython
 | 
			
		||||
    abi = "cp" + pyver[2:] if arch != "any" else "none"
 | 
			
		||||
 | 
			
		||||
    root_is_purelib = egg_info["arch"] is None
 | 
			
		||||
    if root_is_purelib:
 | 
			
		||||
        bw = bdist_wheel(Distribution())
 | 
			
		||||
    else:
 | 
			
		||||
        bw = _bdist_wheel_tag(Distribution())
 | 
			
		||||
 | 
			
		||||
    bw.root_is_pure = root_is_purelib
 | 
			
		||||
    bw.python_tag = pyver
 | 
			
		||||
    bw.plat_name_supplied = True
 | 
			
		||||
    bw.plat_name = egg_info["arch"] or "any"
 | 
			
		||||
    if not root_is_purelib:
 | 
			
		||||
        bw.full_tag_supplied = True
 | 
			
		||||
        bw.full_tag = (pyver, abi, arch)
 | 
			
		||||
 | 
			
		||||
    dist_info_dir = os.path.join(dir, "{name}-{ver}.dist-info".format(**egg_info))
 | 
			
		||||
    bw.egg2dist(os.path.join(dir, "EGG-INFO"), dist_info_dir)
 | 
			
		||||
    bw.write_wheelfile(dist_info_dir, generator="egg2wheel")
 | 
			
		||||
    wheel_name = "{name}-{ver}-{pyver}-{}-{}.whl".format(abi, arch, **egg_info)
 | 
			
		||||
    with WheelFile(os.path.join(dest_dir, wheel_name), "w") as wf:
 | 
			
		||||
        wf.write_files(dir)
 | 
			
		||||
 | 
			
		||||
    shutil.rmtree(dir)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def parse_wininst_info(wininfo_name, egginfo_name):
 | 
			
		||||
    """Extract metadata from filenames.
 | 
			
		||||
 | 
			
		||||
    Extracts the 4 metadataitems needed (name, version, pyversion, arch) from
 | 
			
		||||
    the installer filename and the name of the egg-info directory embedded in
 | 
			
		||||
    the zipfile (if any).
 | 
			
		||||
 | 
			
		||||
    The egginfo filename has the format::
 | 
			
		||||
 | 
			
		||||
        name-ver(-pyver)(-arch).egg-info
 | 
			
		||||
 | 
			
		||||
    The installer filename has the format::
 | 
			
		||||
 | 
			
		||||
        name-ver.arch(-pyver).exe
 | 
			
		||||
 | 
			
		||||
    Some things to note:
 | 
			
		||||
 | 
			
		||||
    1. The installer filename is not definitive. An installer can be renamed
 | 
			
		||||
       and work perfectly well as an installer. So more reliable data should
 | 
			
		||||
       be used whenever possible.
 | 
			
		||||
    2. The egg-info data should be preferred for the name and version, because
 | 
			
		||||
       these come straight from the distutils metadata, and are mandatory.
 | 
			
		||||
    3. The pyver from the egg-info data should be ignored, as it is
 | 
			
		||||
       constructed from the version of Python used to build the installer,
 | 
			
		||||
       which is irrelevant - the installer filename is correct here (even to
 | 
			
		||||
       the point that when it's not there, any version is implied).
 | 
			
		||||
    4. The architecture must be taken from the installer filename, as it is
 | 
			
		||||
       not included in the egg-info data.
 | 
			
		||||
    5. Architecture-neutral installers still have an architecture because the
 | 
			
		||||
       installer format itself (being executable) is architecture-specific. We
 | 
			
		||||
       should therefore ignore the architecture if the content is pure-python.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    egginfo = None
 | 
			
		||||
    if egginfo_name:
 | 
			
		||||
        egginfo = egg_info_re.search(egginfo_name)
 | 
			
		||||
        if not egginfo:
 | 
			
		||||
            raise ValueError(f"Egg info filename {egginfo_name} is not valid")
 | 
			
		||||
 | 
			
		||||
    # Parse the wininst filename
 | 
			
		||||
    # 1. Distribution name (up to the first '-')
 | 
			
		||||
    w_name, sep, rest = wininfo_name.partition("-")
 | 
			
		||||
    if not sep:
 | 
			
		||||
        raise ValueError(f"Installer filename {wininfo_name} is not valid")
 | 
			
		||||
 | 
			
		||||
    # Strip '.exe'
 | 
			
		||||
    rest = rest[:-4]
 | 
			
		||||
    # 2. Python version (from the last '-', must start with 'py')
 | 
			
		||||
    rest2, sep, w_pyver = rest.rpartition("-")
 | 
			
		||||
    if sep and w_pyver.startswith("py"):
 | 
			
		||||
        rest = rest2
 | 
			
		||||
        w_pyver = w_pyver.replace(".", "")
 | 
			
		||||
    else:
 | 
			
		||||
        # Not version specific - use py2.py3. While it is possible that
 | 
			
		||||
        # pure-Python code is not compatible with both Python 2 and 3, there
 | 
			
		||||
        # is no way of knowing from the wininst format, so we assume the best
 | 
			
		||||
        # here (the user can always manually rename the wheel to be more
 | 
			
		||||
        # restrictive if needed).
 | 
			
		||||
        w_pyver = "py2.py3"
 | 
			
		||||
    # 3. Version and architecture
 | 
			
		||||
    w_ver, sep, w_arch = rest.rpartition(".")
 | 
			
		||||
    if not sep:
 | 
			
		||||
        raise ValueError(f"Installer filename {wininfo_name} is not valid")
 | 
			
		||||
 | 
			
		||||
    if egginfo:
 | 
			
		||||
        w_name = egginfo.group("name")
 | 
			
		||||
        w_ver = egginfo.group("ver")
 | 
			
		||||
 | 
			
		||||
    return {"name": w_name, "ver": w_ver, "arch": w_arch, "pyver": w_pyver}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def wininst2wheel(path, dest_dir):
 | 
			
		||||
    with zipfile.ZipFile(path) as bdw:
 | 
			
		||||
        # Search for egg-info in the archive
 | 
			
		||||
        egginfo_name = None
 | 
			
		||||
        for filename in bdw.namelist():
 | 
			
		||||
            if ".egg-info" in filename:
 | 
			
		||||
                egginfo_name = filename
 | 
			
		||||
                break
 | 
			
		||||
 | 
			
		||||
        info = parse_wininst_info(os.path.basename(path), egginfo_name)
 | 
			
		||||
 | 
			
		||||
        root_is_purelib = True
 | 
			
		||||
        for zipinfo in bdw.infolist():
 | 
			
		||||
            if zipinfo.filename.startswith("PLATLIB"):
 | 
			
		||||
                root_is_purelib = False
 | 
			
		||||
                break
 | 
			
		||||
        if root_is_purelib:
 | 
			
		||||
            paths = {"purelib": ""}
 | 
			
		||||
        else:
 | 
			
		||||
            paths = {"platlib": ""}
 | 
			
		||||
 | 
			
		||||
        dist_info = "%(name)s-%(ver)s" % info
 | 
			
		||||
        datadir = "%s.data/" % dist_info
 | 
			
		||||
 | 
			
		||||
        # rewrite paths to trick ZipFile into extracting an egg
 | 
			
		||||
        # XXX grab wininst .ini - between .exe, padding, and first zip file.
 | 
			
		||||
        members = []
 | 
			
		||||
        egginfo_name = ""
 | 
			
		||||
        for zipinfo in bdw.infolist():
 | 
			
		||||
            key, basename = zipinfo.filename.split("/", 1)
 | 
			
		||||
            key = key.lower()
 | 
			
		||||
            basepath = paths.get(key, None)
 | 
			
		||||
            if basepath is None:
 | 
			
		||||
                basepath = datadir + key.lower() + "/"
 | 
			
		||||
            oldname = zipinfo.filename
 | 
			
		||||
            newname = basepath + basename
 | 
			
		||||
            zipinfo.filename = newname
 | 
			
		||||
            del bdw.NameToInfo[oldname]
 | 
			
		||||
            bdw.NameToInfo[newname] = zipinfo
 | 
			
		||||
            # Collect member names, but omit '' (from an entry like "PLATLIB/"
 | 
			
		||||
            if newname:
 | 
			
		||||
                members.append(newname)
 | 
			
		||||
            # Remember egg-info name for the egg2dist call below
 | 
			
		||||
            if not egginfo_name:
 | 
			
		||||
                if newname.endswith(".egg-info"):
 | 
			
		||||
                    egginfo_name = newname
 | 
			
		||||
                elif ".egg-info/" in newname:
 | 
			
		||||
                    egginfo_name, sep, _ = newname.rpartition("/")
 | 
			
		||||
        dir = tempfile.mkdtemp(suffix="_b2w")
 | 
			
		||||
        bdw.extractall(dir, members)
 | 
			
		||||
 | 
			
		||||
    # egg2wheel
 | 
			
		||||
    abi = "none"
 | 
			
		||||
    pyver = info["pyver"]
 | 
			
		||||
    arch = (info["arch"] or "any").replace(".", "_").replace("-", "_")
 | 
			
		||||
    # Wininst installers always have arch even if they are not
 | 
			
		||||
    # architecture-specific (because the format itself is).
 | 
			
		||||
    # So, assume the content is architecture-neutral if root is purelib.
 | 
			
		||||
    if root_is_purelib:
 | 
			
		||||
        arch = "any"
 | 
			
		||||
    # If the installer is architecture-specific, it's almost certainly also
 | 
			
		||||
    # CPython-specific.
 | 
			
		||||
    if arch != "any":
 | 
			
		||||
        pyver = pyver.replace("py", "cp")
 | 
			
		||||
    wheel_name = "-".join((dist_info, pyver, abi, arch))
 | 
			
		||||
    if root_is_purelib:
 | 
			
		||||
        bw = bdist_wheel(Distribution())
 | 
			
		||||
    else:
 | 
			
		||||
        bw = _bdist_wheel_tag(Distribution())
 | 
			
		||||
 | 
			
		||||
    bw.root_is_pure = root_is_purelib
 | 
			
		||||
    bw.python_tag = pyver
 | 
			
		||||
    bw.plat_name_supplied = True
 | 
			
		||||
    bw.plat_name = info["arch"] or "any"
 | 
			
		||||
 | 
			
		||||
    if not root_is_purelib:
 | 
			
		||||
        bw.full_tag_supplied = True
 | 
			
		||||
        bw.full_tag = (pyver, abi, arch)
 | 
			
		||||
 | 
			
		||||
    dist_info_dir = os.path.join(dir, "%s.dist-info" % dist_info)
 | 
			
		||||
    bw.egg2dist(os.path.join(dir, egginfo_name), dist_info_dir)
 | 
			
		||||
    bw.write_wheelfile(dist_info_dir, generator="wininst2wheel")
 | 
			
		||||
 | 
			
		||||
    wheel_path = os.path.join(dest_dir, wheel_name)
 | 
			
		||||
    with WheelFile(wheel_path, "w") as wf:
 | 
			
		||||
        wf.write_files(dir)
 | 
			
		||||
 | 
			
		||||
    shutil.rmtree(dir)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def convert(files, dest_dir, verbose):
 | 
			
		||||
    for pat in files:
 | 
			
		||||
        for installer in iglob(pat):
 | 
			
		||||
            if os.path.splitext(installer)[1] == ".egg":
 | 
			
		||||
                conv = egg2wheel
 | 
			
		||||
            else:
 | 
			
		||||
                conv = wininst2wheel
 | 
			
		||||
 | 
			
		||||
            if verbose:
 | 
			
		||||
                print(f"{installer}... ", flush=True)
 | 
			
		||||
 | 
			
		||||
            conv(installer, dest_dir)
 | 
			
		||||
            if verbose:
 | 
			
		||||
                print("OK")
 | 
			
		||||
							
								
								
									
										90
									
								
								teil20/lib/python3.11/site-packages/wheel/cli/pack.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										90
									
								
								teil20/lib/python3.11/site-packages/wheel/cli/pack.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,90 @@
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import os.path
 | 
			
		||||
import re
 | 
			
		||||
 | 
			
		||||
from wheel.cli import WheelError
 | 
			
		||||
from wheel.wheelfile import WheelFile
 | 
			
		||||
 | 
			
		||||
DIST_INFO_RE = re.compile(r"^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?))\.dist-info$")
 | 
			
		||||
BUILD_NUM_RE = re.compile(rb"Build: (\d\w*)$")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def pack(directory: str, dest_dir: str, build_number: str | None):
 | 
			
		||||
    """Repack a previously unpacked wheel directory into a new wheel file.
 | 
			
		||||
 | 
			
		||||
    The .dist-info/WHEEL file must contain one or more tags so that the target
 | 
			
		||||
    wheel file name can be determined.
 | 
			
		||||
 | 
			
		||||
    :param directory: The unpacked wheel directory
 | 
			
		||||
    :param dest_dir: Destination directory (defaults to the current directory)
 | 
			
		||||
    """
 | 
			
		||||
    # Find the .dist-info directory
 | 
			
		||||
    dist_info_dirs = [
 | 
			
		||||
        fn
 | 
			
		||||
        for fn in os.listdir(directory)
 | 
			
		||||
        if os.path.isdir(os.path.join(directory, fn)) and DIST_INFO_RE.match(fn)
 | 
			
		||||
    ]
 | 
			
		||||
    if len(dist_info_dirs) > 1:
 | 
			
		||||
        raise WheelError(f"Multiple .dist-info directories found in {directory}")
 | 
			
		||||
    elif not dist_info_dirs:
 | 
			
		||||
        raise WheelError(f"No .dist-info directories found in {directory}")
 | 
			
		||||
 | 
			
		||||
    # Determine the target wheel filename
 | 
			
		||||
    dist_info_dir = dist_info_dirs[0]
 | 
			
		||||
    name_version = DIST_INFO_RE.match(dist_info_dir).group("namever")
 | 
			
		||||
 | 
			
		||||
    # Read the tags and the existing build number from .dist-info/WHEEL
 | 
			
		||||
    existing_build_number = None
 | 
			
		||||
    wheel_file_path = os.path.join(directory, dist_info_dir, "WHEEL")
 | 
			
		||||
    with open(wheel_file_path) as f:
 | 
			
		||||
        tags = []
 | 
			
		||||
        for line in f:
 | 
			
		||||
            if line.startswith("Tag: "):
 | 
			
		||||
                tags.append(line.split(" ")[1].rstrip())
 | 
			
		||||
            elif line.startswith("Build: "):
 | 
			
		||||
                existing_build_number = line.split(" ")[1].rstrip()
 | 
			
		||||
 | 
			
		||||
        if not tags:
 | 
			
		||||
            raise WheelError(
 | 
			
		||||
                "No tags present in {}/WHEEL; cannot determine target wheel "
 | 
			
		||||
                "filename".format(dist_info_dir)
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
    # Set the wheel file name and add/replace/remove the Build tag in .dist-info/WHEEL
 | 
			
		||||
    build_number = build_number if build_number is not None else existing_build_number
 | 
			
		||||
    if build_number is not None:
 | 
			
		||||
        if build_number:
 | 
			
		||||
            name_version += "-" + build_number
 | 
			
		||||
 | 
			
		||||
        if build_number != existing_build_number:
 | 
			
		||||
            replacement = (
 | 
			
		||||
                ("Build: %s\r\n" % build_number).encode("ascii")
 | 
			
		||||
                if build_number
 | 
			
		||||
                else b""
 | 
			
		||||
            )
 | 
			
		||||
            with open(wheel_file_path, "rb+") as f:
 | 
			
		||||
                wheel_file_content = f.read()
 | 
			
		||||
                wheel_file_content, num_replaced = BUILD_NUM_RE.subn(
 | 
			
		||||
                    replacement, wheel_file_content
 | 
			
		||||
                )
 | 
			
		||||
                if not num_replaced:
 | 
			
		||||
                    wheel_file_content += replacement
 | 
			
		||||
 | 
			
		||||
                f.seek(0)
 | 
			
		||||
                f.truncate()
 | 
			
		||||
                f.write(wheel_file_content)
 | 
			
		||||
 | 
			
		||||
    # Reassemble the tags for the wheel file
 | 
			
		||||
    impls = sorted({tag.split("-")[0] for tag in tags})
 | 
			
		||||
    abivers = sorted({tag.split("-")[1] for tag in tags})
 | 
			
		||||
    platforms = sorted({tag.split("-")[2] for tag in tags})
 | 
			
		||||
    tagline = "-".join([".".join(impls), ".".join(abivers), ".".join(platforms)])
 | 
			
		||||
 | 
			
		||||
    # Repack the wheel
 | 
			
		||||
    wheel_path = os.path.join(dest_dir, f"{name_version}-{tagline}.whl")
 | 
			
		||||
    with WheelFile(wheel_path, "w") as wf:
 | 
			
		||||
        print(f"Repacking wheel as {wheel_path}...", end="", flush=True)
 | 
			
		||||
        wf.write_files(directory)
 | 
			
		||||
 | 
			
		||||
    print("OK")
 | 
			
		||||
							
								
								
									
										23
									
								
								teil20/lib/python3.11/site-packages/wheel/cli/unpack.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								teil20/lib/python3.11/site-packages/wheel/cli/unpack.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,23 @@
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
from pathlib import Path
 | 
			
		||||
 | 
			
		||||
from ..wheelfile import WheelFile
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def unpack(path: str, dest: str = ".") -> None:
 | 
			
		||||
    """Unpack a wheel.
 | 
			
		||||
 | 
			
		||||
    Wheel content will be unpacked to {dest}/{name}-{ver}, where {name}
 | 
			
		||||
    is the package name and {ver} its version.
 | 
			
		||||
 | 
			
		||||
    :param path: The path to the wheel.
 | 
			
		||||
    :param dest: Destination directory (default to current directory).
 | 
			
		||||
    """
 | 
			
		||||
    with WheelFile(path) as wf:
 | 
			
		||||
        namever = wf.parsed_filename.group("namever")
 | 
			
		||||
        destination = Path(dest) / namever
 | 
			
		||||
        print(f"Unpacking to: {destination}...", end="", flush=True)
 | 
			
		||||
        wf.extractall(destination)
 | 
			
		||||
 | 
			
		||||
    print("OK")
 | 
			
		||||
							
								
								
									
										471
									
								
								teil20/lib/python3.11/site-packages/wheel/macosx_libfile.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										471
									
								
								teil20/lib/python3.11/site-packages/wheel/macosx_libfile.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,471 @@
 | 
			
		||||
"""
 | 
			
		||||
This module contains function to analyse dynamic library
 | 
			
		||||
headers to extract system information
 | 
			
		||||
 | 
			
		||||
Currently only for MacOSX
 | 
			
		||||
 | 
			
		||||
Library file on macosx system starts with Mach-O or Fat field.
 | 
			
		||||
This can be distinguish by first 32 bites and it is called magic number.
 | 
			
		||||
Proper value of magic number is with suffix _MAGIC. Suffix _CIGAM means
 | 
			
		||||
reversed bytes order.
 | 
			
		||||
Both fields can occur in two types: 32 and 64 bytes.
 | 
			
		||||
 | 
			
		||||
FAT field inform that this library contains few version of library
 | 
			
		||||
(typically for different types version). It contains
 | 
			
		||||
information where Mach-O headers starts.
 | 
			
		||||
 | 
			
		||||
Each section started with Mach-O header contains one library
 | 
			
		||||
(So if file starts with this field it contains only one version).
 | 
			
		||||
 | 
			
		||||
After filed Mach-O there are section fields.
 | 
			
		||||
Each of them starts with two fields:
 | 
			
		||||
cmd - magic number for this command
 | 
			
		||||
cmdsize - total size occupied by this section information.
 | 
			
		||||
 | 
			
		||||
In this case only sections LC_VERSION_MIN_MACOSX (for macosx 10.13 and earlier)
 | 
			
		||||
and LC_BUILD_VERSION (for macosx 10.14 and newer) are interesting,
 | 
			
		||||
because them contains information about minimal system version.
 | 
			
		||||
 | 
			
		||||
Important remarks:
 | 
			
		||||
- For fat files this implementation looks for maximum number version.
 | 
			
		||||
  It not check if it is 32 or 64 and do not compare it with currently built package.
 | 
			
		||||
  So it is possible to false report higher version that needed.
 | 
			
		||||
- All structures signatures are taken form macosx header files.
 | 
			
		||||
- I think that binary format will be more stable than `otool` output.
 | 
			
		||||
  and if apple introduce some changes both implementation will need to be updated.
 | 
			
		||||
- The system compile will set the deployment target no lower than
 | 
			
		||||
  11.0 for arm64 builds. For "Universal 2" builds use the x86_64 deployment
 | 
			
		||||
  target when the arm64 target is 11.0.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import ctypes
 | 
			
		||||
import os
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
"""here the needed const and struct from mach-o header files"""
 | 
			
		||||
 | 
			
		||||
FAT_MAGIC = 0xCAFEBABE
 | 
			
		||||
FAT_CIGAM = 0xBEBAFECA
 | 
			
		||||
FAT_MAGIC_64 = 0xCAFEBABF
 | 
			
		||||
FAT_CIGAM_64 = 0xBFBAFECA
 | 
			
		||||
MH_MAGIC = 0xFEEDFACE
 | 
			
		||||
MH_CIGAM = 0xCEFAEDFE
 | 
			
		||||
MH_MAGIC_64 = 0xFEEDFACF
 | 
			
		||||
MH_CIGAM_64 = 0xCFFAEDFE
 | 
			
		||||
 | 
			
		||||
LC_VERSION_MIN_MACOSX = 0x24
 | 
			
		||||
LC_BUILD_VERSION = 0x32
 | 
			
		||||
 | 
			
		||||
CPU_TYPE_ARM64 = 0x0100000C
 | 
			
		||||
 | 
			
		||||
mach_header_fields = [
 | 
			
		||||
    ("magic", ctypes.c_uint32),
 | 
			
		||||
    ("cputype", ctypes.c_int),
 | 
			
		||||
    ("cpusubtype", ctypes.c_int),
 | 
			
		||||
    ("filetype", ctypes.c_uint32),
 | 
			
		||||
    ("ncmds", ctypes.c_uint32),
 | 
			
		||||
    ("sizeofcmds", ctypes.c_uint32),
 | 
			
		||||
    ("flags", ctypes.c_uint32),
 | 
			
		||||
]
 | 
			
		||||
"""
 | 
			
		||||
struct mach_header {
 | 
			
		||||
    uint32_t	magic;		/* mach magic number identifier */
 | 
			
		||||
    cpu_type_t	cputype;	/* cpu specifier */
 | 
			
		||||
    cpu_subtype_t	cpusubtype;	/* machine specifier */
 | 
			
		||||
    uint32_t	filetype;	/* type of file */
 | 
			
		||||
    uint32_t	ncmds;		/* number of load commands */
 | 
			
		||||
    uint32_t	sizeofcmds;	/* the size of all the load commands */
 | 
			
		||||
    uint32_t	flags;		/* flags */
 | 
			
		||||
};
 | 
			
		||||
typedef integer_t cpu_type_t;
 | 
			
		||||
typedef integer_t cpu_subtype_t;
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
mach_header_fields_64 = mach_header_fields + [("reserved", ctypes.c_uint32)]
 | 
			
		||||
"""
 | 
			
		||||
struct mach_header_64 {
 | 
			
		||||
    uint32_t	magic;		/* mach magic number identifier */
 | 
			
		||||
    cpu_type_t	cputype;	/* cpu specifier */
 | 
			
		||||
    cpu_subtype_t	cpusubtype;	/* machine specifier */
 | 
			
		||||
    uint32_t	filetype;	/* type of file */
 | 
			
		||||
    uint32_t	ncmds;		/* number of load commands */
 | 
			
		||||
    uint32_t	sizeofcmds;	/* the size of all the load commands */
 | 
			
		||||
    uint32_t	flags;		/* flags */
 | 
			
		||||
    uint32_t	reserved;	/* reserved */
 | 
			
		||||
};
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
fat_header_fields = [("magic", ctypes.c_uint32), ("nfat_arch", ctypes.c_uint32)]
 | 
			
		||||
"""
 | 
			
		||||
struct fat_header {
 | 
			
		||||
    uint32_t	magic;		/* FAT_MAGIC or FAT_MAGIC_64 */
 | 
			
		||||
    uint32_t	nfat_arch;	/* number of structs that follow */
 | 
			
		||||
};
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
fat_arch_fields = [
 | 
			
		||||
    ("cputype", ctypes.c_int),
 | 
			
		||||
    ("cpusubtype", ctypes.c_int),
 | 
			
		||||
    ("offset", ctypes.c_uint32),
 | 
			
		||||
    ("size", ctypes.c_uint32),
 | 
			
		||||
    ("align", ctypes.c_uint32),
 | 
			
		||||
]
 | 
			
		||||
"""
 | 
			
		||||
struct fat_arch {
 | 
			
		||||
    cpu_type_t	cputype;	/* cpu specifier (int) */
 | 
			
		||||
    cpu_subtype_t	cpusubtype;	/* machine specifier (int) */
 | 
			
		||||
    uint32_t	offset;		/* file offset to this object file */
 | 
			
		||||
    uint32_t	size;		/* size of this object file */
 | 
			
		||||
    uint32_t	align;		/* alignment as a power of 2 */
 | 
			
		||||
};
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
fat_arch_64_fields = [
 | 
			
		||||
    ("cputype", ctypes.c_int),
 | 
			
		||||
    ("cpusubtype", ctypes.c_int),
 | 
			
		||||
    ("offset", ctypes.c_uint64),
 | 
			
		||||
    ("size", ctypes.c_uint64),
 | 
			
		||||
    ("align", ctypes.c_uint32),
 | 
			
		||||
    ("reserved", ctypes.c_uint32),
 | 
			
		||||
]
 | 
			
		||||
"""
 | 
			
		||||
struct fat_arch_64 {
 | 
			
		||||
    cpu_type_t	cputype;	/* cpu specifier (int) */
 | 
			
		||||
    cpu_subtype_t	cpusubtype;	/* machine specifier (int) */
 | 
			
		||||
    uint64_t	offset;		/* file offset to this object file */
 | 
			
		||||
    uint64_t	size;		/* size of this object file */
 | 
			
		||||
    uint32_t	align;		/* alignment as a power of 2 */
 | 
			
		||||
    uint32_t	reserved;	/* reserved */
 | 
			
		||||
};
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
segment_base_fields = [("cmd", ctypes.c_uint32), ("cmdsize", ctypes.c_uint32)]
 | 
			
		||||
"""base for reading segment info"""
 | 
			
		||||
 | 
			
		||||
segment_command_fields = [
 | 
			
		||||
    ("cmd", ctypes.c_uint32),
 | 
			
		||||
    ("cmdsize", ctypes.c_uint32),
 | 
			
		||||
    ("segname", ctypes.c_char * 16),
 | 
			
		||||
    ("vmaddr", ctypes.c_uint32),
 | 
			
		||||
    ("vmsize", ctypes.c_uint32),
 | 
			
		||||
    ("fileoff", ctypes.c_uint32),
 | 
			
		||||
    ("filesize", ctypes.c_uint32),
 | 
			
		||||
    ("maxprot", ctypes.c_int),
 | 
			
		||||
    ("initprot", ctypes.c_int),
 | 
			
		||||
    ("nsects", ctypes.c_uint32),
 | 
			
		||||
    ("flags", ctypes.c_uint32),
 | 
			
		||||
]
 | 
			
		||||
"""
 | 
			
		||||
struct segment_command { /* for 32-bit architectures */
 | 
			
		||||
    uint32_t	cmd;		/* LC_SEGMENT */
 | 
			
		||||
    uint32_t	cmdsize;	/* includes sizeof section structs */
 | 
			
		||||
    char		segname[16];	/* segment name */
 | 
			
		||||
    uint32_t	vmaddr;		/* memory address of this segment */
 | 
			
		||||
    uint32_t	vmsize;		/* memory size of this segment */
 | 
			
		||||
    uint32_t	fileoff;	/* file offset of this segment */
 | 
			
		||||
    uint32_t	filesize;	/* amount to map from the file */
 | 
			
		||||
    vm_prot_t	maxprot;	/* maximum VM protection */
 | 
			
		||||
    vm_prot_t	initprot;	/* initial VM protection */
 | 
			
		||||
    uint32_t	nsects;		/* number of sections in segment */
 | 
			
		||||
    uint32_t	flags;		/* flags */
 | 
			
		||||
};
 | 
			
		||||
typedef int vm_prot_t;
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
segment_command_fields_64 = [
 | 
			
		||||
    ("cmd", ctypes.c_uint32),
 | 
			
		||||
    ("cmdsize", ctypes.c_uint32),
 | 
			
		||||
    ("segname", ctypes.c_char * 16),
 | 
			
		||||
    ("vmaddr", ctypes.c_uint64),
 | 
			
		||||
    ("vmsize", ctypes.c_uint64),
 | 
			
		||||
    ("fileoff", ctypes.c_uint64),
 | 
			
		||||
    ("filesize", ctypes.c_uint64),
 | 
			
		||||
    ("maxprot", ctypes.c_int),
 | 
			
		||||
    ("initprot", ctypes.c_int),
 | 
			
		||||
    ("nsects", ctypes.c_uint32),
 | 
			
		||||
    ("flags", ctypes.c_uint32),
 | 
			
		||||
]
 | 
			
		||||
"""
 | 
			
		||||
struct segment_command_64 { /* for 64-bit architectures */
 | 
			
		||||
    uint32_t	cmd;		/* LC_SEGMENT_64 */
 | 
			
		||||
    uint32_t	cmdsize;	/* includes sizeof section_64 structs */
 | 
			
		||||
    char		segname[16];	/* segment name */
 | 
			
		||||
    uint64_t	vmaddr;		/* memory address of this segment */
 | 
			
		||||
    uint64_t	vmsize;		/* memory size of this segment */
 | 
			
		||||
    uint64_t	fileoff;	/* file offset of this segment */
 | 
			
		||||
    uint64_t	filesize;	/* amount to map from the file */
 | 
			
		||||
    vm_prot_t	maxprot;	/* maximum VM protection */
 | 
			
		||||
    vm_prot_t	initprot;	/* initial VM protection */
 | 
			
		||||
    uint32_t	nsects;		/* number of sections in segment */
 | 
			
		||||
    uint32_t	flags;		/* flags */
 | 
			
		||||
};
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
version_min_command_fields = segment_base_fields + [
 | 
			
		||||
    ("version", ctypes.c_uint32),
 | 
			
		||||
    ("sdk", ctypes.c_uint32),
 | 
			
		||||
]
 | 
			
		||||
"""
 | 
			
		||||
struct version_min_command {
 | 
			
		||||
    uint32_t	cmd;		/* LC_VERSION_MIN_MACOSX or
 | 
			
		||||
                               LC_VERSION_MIN_IPHONEOS or
 | 
			
		||||
                               LC_VERSION_MIN_WATCHOS or
 | 
			
		||||
                               LC_VERSION_MIN_TVOS */
 | 
			
		||||
    uint32_t	cmdsize;	/* sizeof(struct min_version_command) */
 | 
			
		||||
    uint32_t	version;	/* X.Y.Z is encoded in nibbles xxxx.yy.zz */
 | 
			
		||||
    uint32_t	sdk;		/* X.Y.Z is encoded in nibbles xxxx.yy.zz */
 | 
			
		||||
};
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
build_version_command_fields = segment_base_fields + [
 | 
			
		||||
    ("platform", ctypes.c_uint32),
 | 
			
		||||
    ("minos", ctypes.c_uint32),
 | 
			
		||||
    ("sdk", ctypes.c_uint32),
 | 
			
		||||
    ("ntools", ctypes.c_uint32),
 | 
			
		||||
]
 | 
			
		||||
"""
 | 
			
		||||
struct build_version_command {
 | 
			
		||||
    uint32_t	cmd;		/* LC_BUILD_VERSION */
 | 
			
		||||
    uint32_t	cmdsize;	/* sizeof(struct build_version_command) plus */
 | 
			
		||||
                                /* ntools * sizeof(struct build_tool_version) */
 | 
			
		||||
    uint32_t	platform;	/* platform */
 | 
			
		||||
    uint32_t	minos;		/* X.Y.Z is encoded in nibbles xxxx.yy.zz */
 | 
			
		||||
    uint32_t	sdk;		/* X.Y.Z is encoded in nibbles xxxx.yy.zz */
 | 
			
		||||
    uint32_t	ntools;		/* number of tool entries following this */
 | 
			
		||||
};
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def swap32(x):
 | 
			
		||||
    return (
 | 
			
		||||
        ((x << 24) & 0xFF000000)
 | 
			
		||||
        | ((x << 8) & 0x00FF0000)
 | 
			
		||||
        | ((x >> 8) & 0x0000FF00)
 | 
			
		||||
        | ((x >> 24) & 0x000000FF)
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_base_class_and_magic_number(lib_file, seek=None):
 | 
			
		||||
    if seek is None:
 | 
			
		||||
        seek = lib_file.tell()
 | 
			
		||||
    else:
 | 
			
		||||
        lib_file.seek(seek)
 | 
			
		||||
    magic_number = ctypes.c_uint32.from_buffer_copy(
 | 
			
		||||
        lib_file.read(ctypes.sizeof(ctypes.c_uint32))
 | 
			
		||||
    ).value
 | 
			
		||||
 | 
			
		||||
    # Handle wrong byte order
 | 
			
		||||
    if magic_number in [FAT_CIGAM, FAT_CIGAM_64, MH_CIGAM, MH_CIGAM_64]:
 | 
			
		||||
        if sys.byteorder == "little":
 | 
			
		||||
            BaseClass = ctypes.BigEndianStructure
 | 
			
		||||
        else:
 | 
			
		||||
            BaseClass = ctypes.LittleEndianStructure
 | 
			
		||||
 | 
			
		||||
        magic_number = swap32(magic_number)
 | 
			
		||||
    else:
 | 
			
		||||
        BaseClass = ctypes.Structure
 | 
			
		||||
 | 
			
		||||
    lib_file.seek(seek)
 | 
			
		||||
    return BaseClass, magic_number
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def read_data(struct_class, lib_file):
 | 
			
		||||
    return struct_class.from_buffer_copy(lib_file.read(ctypes.sizeof(struct_class)))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def extract_macosx_min_system_version(path_to_lib):
 | 
			
		||||
    with open(path_to_lib, "rb") as lib_file:
 | 
			
		||||
        BaseClass, magic_number = get_base_class_and_magic_number(lib_file, 0)
 | 
			
		||||
        if magic_number not in [FAT_MAGIC, FAT_MAGIC_64, MH_MAGIC, MH_MAGIC_64]:
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        if magic_number in [FAT_MAGIC, FAT_CIGAM_64]:
 | 
			
		||||
 | 
			
		||||
            class FatHeader(BaseClass):
 | 
			
		||||
                _fields_ = fat_header_fields
 | 
			
		||||
 | 
			
		||||
            fat_header = read_data(FatHeader, lib_file)
 | 
			
		||||
            if magic_number == FAT_MAGIC:
 | 
			
		||||
 | 
			
		||||
                class FatArch(BaseClass):
 | 
			
		||||
                    _fields_ = fat_arch_fields
 | 
			
		||||
 | 
			
		||||
            else:
 | 
			
		||||
 | 
			
		||||
                class FatArch(BaseClass):
 | 
			
		||||
                    _fields_ = fat_arch_64_fields
 | 
			
		||||
 | 
			
		||||
            fat_arch_list = [
 | 
			
		||||
                read_data(FatArch, lib_file) for _ in range(fat_header.nfat_arch)
 | 
			
		||||
            ]
 | 
			
		||||
 | 
			
		||||
            versions_list = []
 | 
			
		||||
            for el in fat_arch_list:
 | 
			
		||||
                try:
 | 
			
		||||
                    version = read_mach_header(lib_file, el.offset)
 | 
			
		||||
                    if version is not None:
 | 
			
		||||
                        if el.cputype == CPU_TYPE_ARM64 and len(fat_arch_list) != 1:
 | 
			
		||||
                            # Xcode will not set the deployment target below 11.0.0
 | 
			
		||||
                            # for the arm64 architecture. Ignore the arm64 deployment
 | 
			
		||||
                            # in fat binaries when the target is 11.0.0, that way
 | 
			
		||||
                            # the other architectures can select a lower deployment
 | 
			
		||||
                            # target.
 | 
			
		||||
                            # This is safe because there is no arm64 variant for
 | 
			
		||||
                            # macOS 10.15 or earlier.
 | 
			
		||||
                            if version == (11, 0, 0):
 | 
			
		||||
                                continue
 | 
			
		||||
                        versions_list.append(version)
 | 
			
		||||
                except ValueError:
 | 
			
		||||
                    pass
 | 
			
		||||
 | 
			
		||||
            if len(versions_list) > 0:
 | 
			
		||||
                return max(versions_list)
 | 
			
		||||
            else:
 | 
			
		||||
                return None
 | 
			
		||||
 | 
			
		||||
        else:
 | 
			
		||||
            try:
 | 
			
		||||
                return read_mach_header(lib_file, 0)
 | 
			
		||||
            except ValueError:
 | 
			
		||||
                """when some error during read library files"""
 | 
			
		||||
                return None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def read_mach_header(lib_file, seek=None):
 | 
			
		||||
    """
 | 
			
		||||
    This funcition parse mach-O header and extract
 | 
			
		||||
    information about minimal system version
 | 
			
		||||
 | 
			
		||||
    :param lib_file: reference to opened library file with pointer
 | 
			
		||||
    """
 | 
			
		||||
    if seek is not None:
 | 
			
		||||
        lib_file.seek(seek)
 | 
			
		||||
    base_class, magic_number = get_base_class_and_magic_number(lib_file)
 | 
			
		||||
    arch = "32" if magic_number == MH_MAGIC else "64"
 | 
			
		||||
 | 
			
		||||
    class SegmentBase(base_class):
 | 
			
		||||
        _fields_ = segment_base_fields
 | 
			
		||||
 | 
			
		||||
    if arch == "32":
 | 
			
		||||
 | 
			
		||||
        class MachHeader(base_class):
 | 
			
		||||
            _fields_ = mach_header_fields
 | 
			
		||||
 | 
			
		||||
    else:
 | 
			
		||||
 | 
			
		||||
        class MachHeader(base_class):
 | 
			
		||||
            _fields_ = mach_header_fields_64
 | 
			
		||||
 | 
			
		||||
    mach_header = read_data(MachHeader, lib_file)
 | 
			
		||||
    for _i in range(mach_header.ncmds):
 | 
			
		||||
        pos = lib_file.tell()
 | 
			
		||||
        segment_base = read_data(SegmentBase, lib_file)
 | 
			
		||||
        lib_file.seek(pos)
 | 
			
		||||
        if segment_base.cmd == LC_VERSION_MIN_MACOSX:
 | 
			
		||||
 | 
			
		||||
            class VersionMinCommand(base_class):
 | 
			
		||||
                _fields_ = version_min_command_fields
 | 
			
		||||
 | 
			
		||||
            version_info = read_data(VersionMinCommand, lib_file)
 | 
			
		||||
            return parse_version(version_info.version)
 | 
			
		||||
        elif segment_base.cmd == LC_BUILD_VERSION:
 | 
			
		||||
 | 
			
		||||
            class VersionBuild(base_class):
 | 
			
		||||
                _fields_ = build_version_command_fields
 | 
			
		||||
 | 
			
		||||
            version_info = read_data(VersionBuild, lib_file)
 | 
			
		||||
            return parse_version(version_info.minos)
 | 
			
		||||
        else:
 | 
			
		||||
            lib_file.seek(pos + segment_base.cmdsize)
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def parse_version(version):
 | 
			
		||||
    x = (version & 0xFFFF0000) >> 16
 | 
			
		||||
    y = (version & 0x0000FF00) >> 8
 | 
			
		||||
    z = version & 0x000000FF
 | 
			
		||||
    return x, y, z
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def calculate_macosx_platform_tag(archive_root, platform_tag):
 | 
			
		||||
    """
 | 
			
		||||
    Calculate proper macosx platform tag basing on files which are included to wheel
 | 
			
		||||
 | 
			
		||||
    Example platform tag `macosx-10.14-x86_64`
 | 
			
		||||
    """
 | 
			
		||||
    prefix, base_version, suffix = platform_tag.split("-")
 | 
			
		||||
    base_version = tuple(int(x) for x in base_version.split("."))
 | 
			
		||||
    base_version = base_version[:2]
 | 
			
		||||
    if base_version[0] > 10:
 | 
			
		||||
        base_version = (base_version[0], 0)
 | 
			
		||||
    assert len(base_version) == 2
 | 
			
		||||
    if "MACOSX_DEPLOYMENT_TARGET" in os.environ:
 | 
			
		||||
        deploy_target = tuple(
 | 
			
		||||
            int(x) for x in os.environ["MACOSX_DEPLOYMENT_TARGET"].split(".")
 | 
			
		||||
        )
 | 
			
		||||
        deploy_target = deploy_target[:2]
 | 
			
		||||
        if deploy_target[0] > 10:
 | 
			
		||||
            deploy_target = (deploy_target[0], 0)
 | 
			
		||||
        if deploy_target < base_version:
 | 
			
		||||
            sys.stderr.write(
 | 
			
		||||
                "[WARNING] MACOSX_DEPLOYMENT_TARGET is set to a lower value ({}) than "
 | 
			
		||||
                "the version on which the Python interpreter was compiled ({}), and "
 | 
			
		||||
                "will be ignored.\n".format(
 | 
			
		||||
                    ".".join(str(x) for x in deploy_target),
 | 
			
		||||
                    ".".join(str(x) for x in base_version),
 | 
			
		||||
                )
 | 
			
		||||
            )
 | 
			
		||||
        else:
 | 
			
		||||
            base_version = deploy_target
 | 
			
		||||
 | 
			
		||||
    assert len(base_version) == 2
 | 
			
		||||
    start_version = base_version
 | 
			
		||||
    versions_dict = {}
 | 
			
		||||
    for (dirpath, _dirnames, filenames) in os.walk(archive_root):
 | 
			
		||||
        for filename in filenames:
 | 
			
		||||
            if filename.endswith(".dylib") or filename.endswith(".so"):
 | 
			
		||||
                lib_path = os.path.join(dirpath, filename)
 | 
			
		||||
                min_ver = extract_macosx_min_system_version(lib_path)
 | 
			
		||||
                if min_ver is not None:
 | 
			
		||||
                    min_ver = min_ver[0:2]
 | 
			
		||||
                    if min_ver[0] > 10:
 | 
			
		||||
                        min_ver = (min_ver[0], 0)
 | 
			
		||||
                    versions_dict[lib_path] = min_ver
 | 
			
		||||
 | 
			
		||||
    if len(versions_dict) > 0:
 | 
			
		||||
        base_version = max(base_version, max(versions_dict.values()))
 | 
			
		||||
 | 
			
		||||
    # macosx platform tag do not support minor bugfix release
 | 
			
		||||
    fin_base_version = "_".join([str(x) for x in base_version])
 | 
			
		||||
    if start_version < base_version:
 | 
			
		||||
        problematic_files = [k for k, v in versions_dict.items() if v > start_version]
 | 
			
		||||
        problematic_files = "\n".join(problematic_files)
 | 
			
		||||
        if len(problematic_files) == 1:
 | 
			
		||||
            files_form = "this file"
 | 
			
		||||
        else:
 | 
			
		||||
            files_form = "these files"
 | 
			
		||||
        error_message = (
 | 
			
		||||
            "[WARNING] This wheel needs a higher macOS version than {}  "
 | 
			
		||||
            "To silence this warning, set MACOSX_DEPLOYMENT_TARGET to at least "
 | 
			
		||||
            + fin_base_version
 | 
			
		||||
            + " or recreate "
 | 
			
		||||
            + files_form
 | 
			
		||||
            + " with lower "
 | 
			
		||||
            "MACOSX_DEPLOYMENT_TARGET:  \n" + problematic_files
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        if "MACOSX_DEPLOYMENT_TARGET" in os.environ:
 | 
			
		||||
            error_message = error_message.format(
 | 
			
		||||
                "is set in MACOSX_DEPLOYMENT_TARGET variable."
 | 
			
		||||
            )
 | 
			
		||||
        else:
 | 
			
		||||
            error_message = error_message.format(
 | 
			
		||||
                "the version your Python interpreter is compiled against."
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        sys.stderr.write(error_message)
 | 
			
		||||
 | 
			
		||||
    platform_tag = prefix + "_" + fin_base_version + "_" + suffix
 | 
			
		||||
    return platform_tag
 | 
			
		||||
							
								
								
									
										109
									
								
								teil20/lib/python3.11/site-packages/wheel/metadata.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										109
									
								
								teil20/lib/python3.11/site-packages/wheel/metadata.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,109 @@
 | 
			
		||||
"""
 | 
			
		||||
Tools for converting old- to new-style metadata.
 | 
			
		||||
"""
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import os.path
 | 
			
		||||
import textwrap
 | 
			
		||||
from email.message import Message
 | 
			
		||||
from email.parser import Parser
 | 
			
		||||
from typing import Iterator
 | 
			
		||||
 | 
			
		||||
from pkg_resources import Requirement, safe_extra, split_sections
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def requires_to_requires_dist(requirement: Requirement) -> str:
 | 
			
		||||
    """Return the version specifier for a requirement in PEP 345/566 fashion."""
 | 
			
		||||
    if getattr(requirement, "url", None):
 | 
			
		||||
        return " @ " + requirement.url
 | 
			
		||||
 | 
			
		||||
    requires_dist = []
 | 
			
		||||
    for op, ver in requirement.specs:
 | 
			
		||||
        requires_dist.append(op + ver)
 | 
			
		||||
 | 
			
		||||
    if requires_dist:
 | 
			
		||||
        return " (" + ",".join(sorted(requires_dist)) + ")"
 | 
			
		||||
    else:
 | 
			
		||||
        return ""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def convert_requirements(requirements: list[str]) -> Iterator[str]:
 | 
			
		||||
    """Yield Requires-Dist: strings for parsed requirements strings."""
 | 
			
		||||
    for req in requirements:
 | 
			
		||||
        parsed_requirement = Requirement.parse(req)
 | 
			
		||||
        spec = requires_to_requires_dist(parsed_requirement)
 | 
			
		||||
        extras = ",".join(sorted(parsed_requirement.extras))
 | 
			
		||||
        if extras:
 | 
			
		||||
            extras = f"[{extras}]"
 | 
			
		||||
 | 
			
		||||
        yield parsed_requirement.project_name + extras + spec
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def generate_requirements(
 | 
			
		||||
    extras_require: dict[str, list[str]]
 | 
			
		||||
) -> Iterator[tuple[str, str]]:
 | 
			
		||||
    """
 | 
			
		||||
    Convert requirements from a setup()-style dictionary to
 | 
			
		||||
    ('Requires-Dist', 'requirement') and ('Provides-Extra', 'extra') tuples.
 | 
			
		||||
 | 
			
		||||
    extras_require is a dictionary of {extra: [requirements]} as passed to setup(),
 | 
			
		||||
    using the empty extra {'': [requirements]} to hold install_requires.
 | 
			
		||||
    """
 | 
			
		||||
    for extra, depends in extras_require.items():
 | 
			
		||||
        condition = ""
 | 
			
		||||
        extra = extra or ""
 | 
			
		||||
        if ":" in extra:  # setuptools extra:condition syntax
 | 
			
		||||
            extra, condition = extra.split(":", 1)
 | 
			
		||||
 | 
			
		||||
        extra = safe_extra(extra)
 | 
			
		||||
        if extra:
 | 
			
		||||
            yield "Provides-Extra", extra
 | 
			
		||||
            if condition:
 | 
			
		||||
                condition = "(" + condition + ") and "
 | 
			
		||||
            condition += "extra == '%s'" % extra
 | 
			
		||||
 | 
			
		||||
        if condition:
 | 
			
		||||
            condition = " ; " + condition
 | 
			
		||||
 | 
			
		||||
        for new_req in convert_requirements(depends):
 | 
			
		||||
            yield "Requires-Dist", new_req + condition
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def pkginfo_to_metadata(egg_info_path: str, pkginfo_path: str) -> Message:
 | 
			
		||||
    """
 | 
			
		||||
    Convert .egg-info directory with PKG-INFO to the Metadata 2.1 format
 | 
			
		||||
    """
 | 
			
		||||
    with open(pkginfo_path, encoding="utf-8") as headers:
 | 
			
		||||
        pkg_info = Parser().parse(headers)
 | 
			
		||||
 | 
			
		||||
    pkg_info.replace_header("Metadata-Version", "2.1")
 | 
			
		||||
    # Those will be regenerated from `requires.txt`.
 | 
			
		||||
    del pkg_info["Provides-Extra"]
 | 
			
		||||
    del pkg_info["Requires-Dist"]
 | 
			
		||||
    requires_path = os.path.join(egg_info_path, "requires.txt")
 | 
			
		||||
    if os.path.exists(requires_path):
 | 
			
		||||
        with open(requires_path) as requires_file:
 | 
			
		||||
            requires = requires_file.read()
 | 
			
		||||
 | 
			
		||||
        parsed_requirements = sorted(split_sections(requires), key=lambda x: x[0] or "")
 | 
			
		||||
        for extra, reqs in parsed_requirements:
 | 
			
		||||
            for key, value in generate_requirements({extra: reqs}):
 | 
			
		||||
                if (key, value) not in pkg_info.items():
 | 
			
		||||
                    pkg_info[key] = value
 | 
			
		||||
 | 
			
		||||
    description = pkg_info["Description"]
 | 
			
		||||
    if description:
 | 
			
		||||
        description_lines = pkg_info["Description"].splitlines()
 | 
			
		||||
        dedented_description = "\n".join(
 | 
			
		||||
            # if the first line of long_description is blank,
 | 
			
		||||
            # the first line here will be indented.
 | 
			
		||||
            (
 | 
			
		||||
                description_lines[0].lstrip(),
 | 
			
		||||
                textwrap.dedent("\n".join(description_lines[1:])),
 | 
			
		||||
                "\n",
 | 
			
		||||
            )
 | 
			
		||||
        )
 | 
			
		||||
        pkg_info.set_payload(dedented_description)
 | 
			
		||||
        del pkg_info["Description"]
 | 
			
		||||
 | 
			
		||||
    return pkg_info
 | 
			
		||||
							
								
								
									
										26
									
								
								teil20/lib/python3.11/site-packages/wheel/util.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								teil20/lib/python3.11/site-packages/wheel/util.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,26 @@
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import base64
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger("wheel")
 | 
			
		||||
 | 
			
		||||
# ensure Python logging is configured
 | 
			
		||||
try:
 | 
			
		||||
    __import__("setuptools.logging")
 | 
			
		||||
except ImportError:
 | 
			
		||||
    # setuptools < ??
 | 
			
		||||
    from . import _setuptools_logging
 | 
			
		||||
 | 
			
		||||
    _setuptools_logging.configure()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def urlsafe_b64encode(data: bytes) -> bytes:
 | 
			
		||||
    """urlsafe_b64encode without padding"""
 | 
			
		||||
    return base64.urlsafe_b64encode(data).rstrip(b"=")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def urlsafe_b64decode(data: bytes) -> bytes:
 | 
			
		||||
    """urlsafe_b64decode without padding"""
 | 
			
		||||
    pad = b"=" * (4 - (len(data) & 3))
 | 
			
		||||
    return base64.urlsafe_b64decode(data + pad)
 | 
			
		||||
@@ -0,0 +1,303 @@
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import collections
 | 
			
		||||
import functools
 | 
			
		||||
import os
 | 
			
		||||
import re
 | 
			
		||||
import struct
 | 
			
		||||
import sys
 | 
			
		||||
import warnings
 | 
			
		||||
from typing import IO, Iterator, NamedTuple
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Python does not provide platform information at sufficient granularity to
 | 
			
		||||
# identify the architecture of the running executable in some cases, so we
 | 
			
		||||
# determine it dynamically by reading the information from the running
 | 
			
		||||
# process. This only applies on Linux, which uses the ELF format.
 | 
			
		||||
class _ELFFileHeader:
 | 
			
		||||
    # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header
 | 
			
		||||
    class _InvalidELFFileHeader(ValueError):
 | 
			
		||||
        """
 | 
			
		||||
        An invalid ELF file header was found.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    ELF_MAGIC_NUMBER = 0x7F454C46
 | 
			
		||||
    ELFCLASS32 = 1
 | 
			
		||||
    ELFCLASS64 = 2
 | 
			
		||||
    ELFDATA2LSB = 1
 | 
			
		||||
    ELFDATA2MSB = 2
 | 
			
		||||
    EM_386 = 3
 | 
			
		||||
    EM_S390 = 22
 | 
			
		||||
    EM_ARM = 40
 | 
			
		||||
    EM_X86_64 = 62
 | 
			
		||||
    EF_ARM_ABIMASK = 0xFF000000
 | 
			
		||||
    EF_ARM_ABI_VER5 = 0x05000000
 | 
			
		||||
    EF_ARM_ABI_FLOAT_HARD = 0x00000400
 | 
			
		||||
 | 
			
		||||
    def __init__(self, file: IO[bytes]) -> None:
 | 
			
		||||
        def unpack(fmt: str) -> int:
 | 
			
		||||
            try:
 | 
			
		||||
                data = file.read(struct.calcsize(fmt))
 | 
			
		||||
                result: tuple[int, ...] = struct.unpack(fmt, data)
 | 
			
		||||
            except struct.error:
 | 
			
		||||
                raise _ELFFileHeader._InvalidELFFileHeader()
 | 
			
		||||
            return result[0]
 | 
			
		||||
 | 
			
		||||
        self.e_ident_magic = unpack(">I")
 | 
			
		||||
        if self.e_ident_magic != self.ELF_MAGIC_NUMBER:
 | 
			
		||||
            raise _ELFFileHeader._InvalidELFFileHeader()
 | 
			
		||||
        self.e_ident_class = unpack("B")
 | 
			
		||||
        if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}:
 | 
			
		||||
            raise _ELFFileHeader._InvalidELFFileHeader()
 | 
			
		||||
        self.e_ident_data = unpack("B")
 | 
			
		||||
        if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}:
 | 
			
		||||
            raise _ELFFileHeader._InvalidELFFileHeader()
 | 
			
		||||
        self.e_ident_version = unpack("B")
 | 
			
		||||
        self.e_ident_osabi = unpack("B")
 | 
			
		||||
        self.e_ident_abiversion = unpack("B")
 | 
			
		||||
        self.e_ident_pad = file.read(7)
 | 
			
		||||
        format_h = "<H" if self.e_ident_data == self.ELFDATA2LSB else ">H"
 | 
			
		||||
        format_i = "<I" if self.e_ident_data == self.ELFDATA2LSB else ">I"
 | 
			
		||||
        format_q = "<Q" if self.e_ident_data == self.ELFDATA2LSB else ">Q"
 | 
			
		||||
        format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q
 | 
			
		||||
        self.e_type = unpack(format_h)
 | 
			
		||||
        self.e_machine = unpack(format_h)
 | 
			
		||||
        self.e_version = unpack(format_i)
 | 
			
		||||
        self.e_entry = unpack(format_p)
 | 
			
		||||
        self.e_phoff = unpack(format_p)
 | 
			
		||||
        self.e_shoff = unpack(format_p)
 | 
			
		||||
        self.e_flags = unpack(format_i)
 | 
			
		||||
        self.e_ehsize = unpack(format_h)
 | 
			
		||||
        self.e_phentsize = unpack(format_h)
 | 
			
		||||
        self.e_phnum = unpack(format_h)
 | 
			
		||||
        self.e_shentsize = unpack(format_h)
 | 
			
		||||
        self.e_shnum = unpack(format_h)
 | 
			
		||||
        self.e_shstrndx = unpack(format_h)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _get_elf_header() -> _ELFFileHeader | None:
 | 
			
		||||
    try:
 | 
			
		||||
        with open(sys.executable, "rb") as f:
 | 
			
		||||
            elf_header = _ELFFileHeader(f)
 | 
			
		||||
    except (OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader):
 | 
			
		||||
        return None
 | 
			
		||||
    return elf_header
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _is_linux_armhf() -> bool:
 | 
			
		||||
    # hard-float ABI can be detected from the ELF header of the running
 | 
			
		||||
    # process
 | 
			
		||||
    # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
 | 
			
		||||
    elf_header = _get_elf_header()
 | 
			
		||||
    if elf_header is None:
 | 
			
		||||
        return False
 | 
			
		||||
    result = elf_header.e_ident_class == elf_header.ELFCLASS32
 | 
			
		||||
    result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
 | 
			
		||||
    result &= elf_header.e_machine == elf_header.EM_ARM
 | 
			
		||||
    result &= (
 | 
			
		||||
        elf_header.e_flags & elf_header.EF_ARM_ABIMASK
 | 
			
		||||
    ) == elf_header.EF_ARM_ABI_VER5
 | 
			
		||||
    result &= (
 | 
			
		||||
        elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD
 | 
			
		||||
    ) == elf_header.EF_ARM_ABI_FLOAT_HARD
 | 
			
		||||
    return result
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _is_linux_i686() -> bool:
 | 
			
		||||
    elf_header = _get_elf_header()
 | 
			
		||||
    if elf_header is None:
 | 
			
		||||
        return False
 | 
			
		||||
    result = elf_header.e_ident_class == elf_header.ELFCLASS32
 | 
			
		||||
    result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
 | 
			
		||||
    result &= elf_header.e_machine == elf_header.EM_386
 | 
			
		||||
    return result
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _have_compatible_abi(arch: str) -> bool:
 | 
			
		||||
    if arch == "armv7l":
 | 
			
		||||
        return _is_linux_armhf()
 | 
			
		||||
    if arch == "i686":
 | 
			
		||||
        return _is_linux_i686()
 | 
			
		||||
    return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# If glibc ever changes its major version, we need to know what the last
 | 
			
		||||
# minor version was, so we can build the complete list of all versions.
 | 
			
		||||
# For now, guess what the highest minor version might be, assume it will
 | 
			
		||||
# be 50 for testing. Once this actually happens, update the dictionary
 | 
			
		||||
# with the actual value.
 | 
			
		||||
_LAST_GLIBC_MINOR: dict[int, int] = collections.defaultdict(lambda: 50)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class _GLibCVersion(NamedTuple):
 | 
			
		||||
    major: int
 | 
			
		||||
    minor: int
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _glibc_version_string_confstr() -> str | None:
 | 
			
		||||
    """
 | 
			
		||||
    Primary implementation of glibc_version_string using os.confstr.
 | 
			
		||||
    """
 | 
			
		||||
    # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
 | 
			
		||||
    # to be broken or missing. This strategy is used in the standard library
 | 
			
		||||
    # platform module.
 | 
			
		||||
    # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
 | 
			
		||||
    try:
 | 
			
		||||
        # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17".
 | 
			
		||||
        version_string = os.confstr("CS_GNU_LIBC_VERSION")
 | 
			
		||||
        assert version_string is not None
 | 
			
		||||
        _, version = version_string.split()
 | 
			
		||||
    except (AssertionError, AttributeError, OSError, ValueError):
 | 
			
		||||
        # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
 | 
			
		||||
        return None
 | 
			
		||||
    return version
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _glibc_version_string_ctypes() -> str | None:
 | 
			
		||||
    """
 | 
			
		||||
    Fallback implementation of glibc_version_string using ctypes.
 | 
			
		||||
    """
 | 
			
		||||
    try:
 | 
			
		||||
        import ctypes
 | 
			
		||||
    except ImportError:
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
 | 
			
		||||
    # manpage says, "If filename is NULL, then the returned handle is for the
 | 
			
		||||
    # main program". This way we can let the linker do the work to figure out
 | 
			
		||||
    # which libc our process is actually using.
 | 
			
		||||
    #
 | 
			
		||||
    # We must also handle the special case where the executable is not a
 | 
			
		||||
    # dynamically linked executable. This can occur when using musl libc,
 | 
			
		||||
    # for example. In this situation, dlopen() will error, leading to an
 | 
			
		||||
    # OSError. Interestingly, at least in the case of musl, there is no
 | 
			
		||||
    # errno set on the OSError. The single string argument used to construct
 | 
			
		||||
    # OSError comes from libc itself and is therefore not portable to
 | 
			
		||||
    # hard code here. In any case, failure to call dlopen() means we
 | 
			
		||||
    # can proceed, so we bail on our attempt.
 | 
			
		||||
    try:
 | 
			
		||||
        process_namespace = ctypes.CDLL(None)
 | 
			
		||||
    except OSError:
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        gnu_get_libc_version = process_namespace.gnu_get_libc_version
 | 
			
		||||
    except AttributeError:
 | 
			
		||||
        # Symbol doesn't exist -> therefore, we are not linked to
 | 
			
		||||
        # glibc.
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    # Call gnu_get_libc_version, which returns a string like "2.5"
 | 
			
		||||
    gnu_get_libc_version.restype = ctypes.c_char_p
 | 
			
		||||
    version_str: str = gnu_get_libc_version()
 | 
			
		||||
    # py2 / py3 compatibility:
 | 
			
		||||
    if not isinstance(version_str, str):
 | 
			
		||||
        version_str = version_str.decode("ascii")
 | 
			
		||||
 | 
			
		||||
    return version_str
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _glibc_version_string() -> str | None:
 | 
			
		||||
    """Returns glibc version string, or None if not using glibc."""
 | 
			
		||||
    return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _parse_glibc_version(version_str: str) -> tuple[int, int]:
 | 
			
		||||
    """Parse glibc version.
 | 
			
		||||
 | 
			
		||||
    We use a regexp instead of str.split because we want to discard any
 | 
			
		||||
    random junk that might come after the minor version -- this might happen
 | 
			
		||||
    in patched/forked versions of glibc (e.g. Linaro's version of glibc
 | 
			
		||||
    uses version strings like "2.20-2014.11"). See gh-3588.
 | 
			
		||||
    """
 | 
			
		||||
    m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
 | 
			
		||||
    if not m:
 | 
			
		||||
        warnings.warn(
 | 
			
		||||
            "Expected glibc version with 2 components major.minor,"
 | 
			
		||||
            " got: %s" % version_str,
 | 
			
		||||
            RuntimeWarning,
 | 
			
		||||
        )
 | 
			
		||||
        return -1, -1
 | 
			
		||||
    return int(m.group("major")), int(m.group("minor"))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@functools.lru_cache()
 | 
			
		||||
def _get_glibc_version() -> tuple[int, int]:
 | 
			
		||||
    version_str = _glibc_version_string()
 | 
			
		||||
    if version_str is None:
 | 
			
		||||
        return (-1, -1)
 | 
			
		||||
    return _parse_glibc_version(version_str)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# From PEP 513, PEP 600
 | 
			
		||||
def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool:
 | 
			
		||||
    sys_glibc = _get_glibc_version()
 | 
			
		||||
    if sys_glibc < version:
 | 
			
		||||
        return False
 | 
			
		||||
    # Check for presence of _manylinux module.
 | 
			
		||||
    try:
 | 
			
		||||
        import _manylinux  # noqa
 | 
			
		||||
    except ImportError:
 | 
			
		||||
        return True
 | 
			
		||||
    if hasattr(_manylinux, "manylinux_compatible"):
 | 
			
		||||
        result = _manylinux.manylinux_compatible(version[0], version[1], arch)
 | 
			
		||||
        if result is not None:
 | 
			
		||||
            return bool(result)
 | 
			
		||||
        return True
 | 
			
		||||
    if version == _GLibCVersion(2, 5):
 | 
			
		||||
        if hasattr(_manylinux, "manylinux1_compatible"):
 | 
			
		||||
            return bool(_manylinux.manylinux1_compatible)
 | 
			
		||||
    if version == _GLibCVersion(2, 12):
 | 
			
		||||
        if hasattr(_manylinux, "manylinux2010_compatible"):
 | 
			
		||||
            return bool(_manylinux.manylinux2010_compatible)
 | 
			
		||||
    if version == _GLibCVersion(2, 17):
 | 
			
		||||
        if hasattr(_manylinux, "manylinux2014_compatible"):
 | 
			
		||||
            return bool(_manylinux.manylinux2014_compatible)
 | 
			
		||||
    return True
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_LEGACY_MANYLINUX_MAP = {
 | 
			
		||||
    # CentOS 7 w/ glibc 2.17 (PEP 599)
 | 
			
		||||
    (2, 17): "manylinux2014",
 | 
			
		||||
    # CentOS 6 w/ glibc 2.12 (PEP 571)
 | 
			
		||||
    (2, 12): "manylinux2010",
 | 
			
		||||
    # CentOS 5 w/ glibc 2.5 (PEP 513)
 | 
			
		||||
    (2, 5): "manylinux1",
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def platform_tags(linux: str, arch: str) -> Iterator[str]:
 | 
			
		||||
    if not _have_compatible_abi(arch):
 | 
			
		||||
        return
 | 
			
		||||
    # Oldest glibc to be supported regardless of architecture is (2, 17).
 | 
			
		||||
    too_old_glibc2 = _GLibCVersion(2, 16)
 | 
			
		||||
    if arch in {"x86_64", "i686"}:
 | 
			
		||||
        # On x86/i686 also oldest glibc to be supported is (2, 5).
 | 
			
		||||
        too_old_glibc2 = _GLibCVersion(2, 4)
 | 
			
		||||
    current_glibc = _GLibCVersion(*_get_glibc_version())
 | 
			
		||||
    glibc_max_list = [current_glibc]
 | 
			
		||||
    # We can assume compatibility across glibc major versions.
 | 
			
		||||
    # https://sourceware.org/bugzilla/show_bug.cgi?id=24636
 | 
			
		||||
    #
 | 
			
		||||
    # Build a list of maximum glibc versions so that we can
 | 
			
		||||
    # output the canonical list of all glibc from current_glibc
 | 
			
		||||
    # down to too_old_glibc2, including all intermediary versions.
 | 
			
		||||
    for glibc_major in range(current_glibc.major - 1, 1, -1):
 | 
			
		||||
        glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
 | 
			
		||||
        glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
 | 
			
		||||
    for glibc_max in glibc_max_list:
 | 
			
		||||
        if glibc_max.major == too_old_glibc2.major:
 | 
			
		||||
            min_minor = too_old_glibc2.minor
 | 
			
		||||
        else:
 | 
			
		||||
            # For other glibc major versions oldest supported is (x, 0).
 | 
			
		||||
            min_minor = -1
 | 
			
		||||
        for glibc_minor in range(glibc_max.minor, min_minor, -1):
 | 
			
		||||
            glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
 | 
			
		||||
            tag = "manylinux_{}_{}".format(*glibc_version)
 | 
			
		||||
            if _is_compatible(tag, arch, glibc_version):
 | 
			
		||||
                yield linux.replace("linux", tag)
 | 
			
		||||
            # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
 | 
			
		||||
            if glibc_version in _LEGACY_MANYLINUX_MAP:
 | 
			
		||||
                legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
 | 
			
		||||
                if _is_compatible(legacy_tag, arch, glibc_version):
 | 
			
		||||
                    yield linux.replace("linux", legacy_tag)
 | 
			
		||||
@@ -0,0 +1,138 @@
 | 
			
		||||
"""PEP 656 support.
 | 
			
		||||
 | 
			
		||||
This module implements logic to detect if the currently running Python is
 | 
			
		||||
linked against musl, and what musl version is used.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import contextlib
 | 
			
		||||
import functools
 | 
			
		||||
import operator
 | 
			
		||||
import os
 | 
			
		||||
import re
 | 
			
		||||
import struct
 | 
			
		||||
import subprocess
 | 
			
		||||
import sys
 | 
			
		||||
from typing import IO, Iterator, NamedTuple
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _read_unpacked(f: IO[bytes], fmt: str) -> tuple[int, ...]:
 | 
			
		||||
    return struct.unpack(fmt, f.read(struct.calcsize(fmt)))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _parse_ld_musl_from_elf(f: IO[bytes]) -> str | None:
 | 
			
		||||
    """Detect musl libc location by parsing the Python executable.
 | 
			
		||||
 | 
			
		||||
    Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
 | 
			
		||||
    ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
 | 
			
		||||
    """
 | 
			
		||||
    f.seek(0)
 | 
			
		||||
    try:
 | 
			
		||||
        ident = _read_unpacked(f, "16B")
 | 
			
		||||
    except struct.error:
 | 
			
		||||
        return None
 | 
			
		||||
    if ident[:4] != tuple(b"\x7fELF"):  # Invalid magic, not ELF.
 | 
			
		||||
        return None
 | 
			
		||||
    f.seek(struct.calcsize("HHI"), 1)  # Skip file type, machine, and version.
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        # e_fmt: Format for program header.
 | 
			
		||||
        # p_fmt: Format for section header.
 | 
			
		||||
        # p_idx: Indexes to find p_type, p_offset, and p_filesz.
 | 
			
		||||
        e_fmt, p_fmt, p_idx = {
 | 
			
		||||
            1: ("IIIIHHH", "IIIIIIII", (0, 1, 4)),  # 32-bit.
 | 
			
		||||
            2: ("QQQIHHH", "IIQQQQQQ", (0, 2, 5)),  # 64-bit.
 | 
			
		||||
        }[ident[4]]
 | 
			
		||||
    except KeyError:
 | 
			
		||||
        return None
 | 
			
		||||
    else:
 | 
			
		||||
        p_get = operator.itemgetter(*p_idx)
 | 
			
		||||
 | 
			
		||||
    # Find the interpreter section and return its content.
 | 
			
		||||
    try:
 | 
			
		||||
        _, e_phoff, _, _, _, e_phentsize, e_phnum = _read_unpacked(f, e_fmt)
 | 
			
		||||
    except struct.error:
 | 
			
		||||
        return None
 | 
			
		||||
    for i in range(e_phnum + 1):
 | 
			
		||||
        f.seek(e_phoff + e_phentsize * i)
 | 
			
		||||
        try:
 | 
			
		||||
            p_type, p_offset, p_filesz = p_get(_read_unpacked(f, p_fmt))
 | 
			
		||||
        except struct.error:
 | 
			
		||||
            return None
 | 
			
		||||
        if p_type != 3:  # Not PT_INTERP.
 | 
			
		||||
            continue
 | 
			
		||||
        f.seek(p_offset)
 | 
			
		||||
        interpreter = os.fsdecode(f.read(p_filesz)).strip("\0")
 | 
			
		||||
        if "musl" not in interpreter:
 | 
			
		||||
            return None
 | 
			
		||||
        return interpreter
 | 
			
		||||
    return None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class _MuslVersion(NamedTuple):
 | 
			
		||||
    major: int
 | 
			
		||||
    minor: int
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _parse_musl_version(output: str) -> _MuslVersion | None:
 | 
			
		||||
    lines = [n for n in (n.strip() for n in output.splitlines()) if n]
 | 
			
		||||
    if len(lines) < 2 or lines[0][:4] != "musl":
 | 
			
		||||
        return None
 | 
			
		||||
    m = re.match(r"Version (\d+)\.(\d+)", lines[1])
 | 
			
		||||
    if not m:
 | 
			
		||||
        return None
 | 
			
		||||
    return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@functools.lru_cache()
 | 
			
		||||
def _get_musl_version(executable: str) -> _MuslVersion | None:
 | 
			
		||||
    """Detect currently-running musl runtime version.
 | 
			
		||||
 | 
			
		||||
    This is done by checking the specified executable's dynamic linking
 | 
			
		||||
    information, and invoking the loader to parse its output for a version
 | 
			
		||||
    string. If the loader is musl, the output would be something like::
 | 
			
		||||
 | 
			
		||||
        musl libc (x86_64)
 | 
			
		||||
        Version 1.2.2
 | 
			
		||||
        Dynamic Program Loader
 | 
			
		||||
    """
 | 
			
		||||
    with contextlib.ExitStack() as stack:
 | 
			
		||||
        try:
 | 
			
		||||
            f = stack.enter_context(open(executable, "rb"))
 | 
			
		||||
        except OSError:
 | 
			
		||||
            return None
 | 
			
		||||
        ld = _parse_ld_musl_from_elf(f)
 | 
			
		||||
    if not ld:
 | 
			
		||||
        return None
 | 
			
		||||
    proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True)
 | 
			
		||||
    return _parse_musl_version(proc.stderr)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def platform_tags(arch: str) -> Iterator[str]:
 | 
			
		||||
    """Generate musllinux tags compatible to the current platform.
 | 
			
		||||
 | 
			
		||||
    :param arch: Should be the part of platform tag after the ``linux_``
 | 
			
		||||
        prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a
 | 
			
		||||
        prerequisite for the current platform to be musllinux-compatible.
 | 
			
		||||
 | 
			
		||||
    :returns: An iterator of compatible musllinux tags.
 | 
			
		||||
    """
 | 
			
		||||
    sys_musl = _get_musl_version(sys.executable)
 | 
			
		||||
    if sys_musl is None:  # Python not dynamically linked against musl.
 | 
			
		||||
        return
 | 
			
		||||
    for minor in range(sys_musl.minor, -1, -1):
 | 
			
		||||
        yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == "__main__":  # pragma: no cover
 | 
			
		||||
    import sysconfig
 | 
			
		||||
 | 
			
		||||
    plat = sysconfig.get_platform()
 | 
			
		||||
    assert plat.startswith("linux-"), "not linux"
 | 
			
		||||
 | 
			
		||||
    print("plat:", plat)
 | 
			
		||||
    print("musl:", _get_musl_version(sys.executable))
 | 
			
		||||
    print("tags:", end=" ")
 | 
			
		||||
    for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
 | 
			
		||||
        print(t, end="\n      ")
 | 
			
		||||
@@ -0,0 +1,478 @@
 | 
			
		||||
# This file is dual licensed under the terms of the Apache License, Version
 | 
			
		||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
 | 
			
		||||
# for complete details.
 | 
			
		||||
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import logging
 | 
			
		||||
import platform
 | 
			
		||||
import sys
 | 
			
		||||
import sysconfig
 | 
			
		||||
from importlib.machinery import EXTENSION_SUFFIXES
 | 
			
		||||
from typing import Iterable, Iterator, Sequence, Tuple, cast
 | 
			
		||||
 | 
			
		||||
from . import _manylinux, _musllinux
 | 
			
		||||
 | 
			
		||||
logger = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
PythonVersion = Sequence[int]
 | 
			
		||||
MacVersion = Tuple[int, int]
 | 
			
		||||
 | 
			
		||||
INTERPRETER_SHORT_NAMES: dict[str, str] = {
 | 
			
		||||
    "python": "py",  # Generic.
 | 
			
		||||
    "cpython": "cp",
 | 
			
		||||
    "pypy": "pp",
 | 
			
		||||
    "ironpython": "ip",
 | 
			
		||||
    "jython": "jy",
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Tag:
 | 
			
		||||
    """
 | 
			
		||||
    A representation of the tag triple for a wheel.
 | 
			
		||||
 | 
			
		||||
    Instances are considered immutable and thus are hashable. Equality checking
 | 
			
		||||
    is also supported.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    __slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
 | 
			
		||||
 | 
			
		||||
    def __init__(self, interpreter: str, abi: str, platform: str) -> None:
 | 
			
		||||
        self._interpreter = interpreter.lower()
 | 
			
		||||
        self._abi = abi.lower()
 | 
			
		||||
        self._platform = platform.lower()
 | 
			
		||||
        # The __hash__ of every single element in a Set[Tag] will be evaluated each time
 | 
			
		||||
        # that a set calls its `.disjoint()` method, which may be called hundreds of
 | 
			
		||||
        # times when scanning a page of links for packages with tags matching that
 | 
			
		||||
        # Set[Tag]. Pre-computing the value here produces significant speedups for
 | 
			
		||||
        # downstream consumers.
 | 
			
		||||
        self._hash = hash((self._interpreter, self._abi, self._platform))
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def interpreter(self) -> str:
 | 
			
		||||
        return self._interpreter
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def abi(self) -> str:
 | 
			
		||||
        return self._abi
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def platform(self) -> str:
 | 
			
		||||
        return self._platform
 | 
			
		||||
 | 
			
		||||
    def __eq__(self, other: object) -> bool:
 | 
			
		||||
        if not isinstance(other, Tag):
 | 
			
		||||
            return NotImplemented
 | 
			
		||||
 | 
			
		||||
        return (
 | 
			
		||||
            (self._hash == other._hash)  # Short-circuit ASAP for perf reasons.
 | 
			
		||||
            and (self._platform == other._platform)
 | 
			
		||||
            and (self._abi == other._abi)
 | 
			
		||||
            and (self._interpreter == other._interpreter)
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def __hash__(self) -> int:
 | 
			
		||||
        return self._hash
 | 
			
		||||
 | 
			
		||||
    def __str__(self) -> str:
 | 
			
		||||
        return f"{self._interpreter}-{self._abi}-{self._platform}"
 | 
			
		||||
 | 
			
		||||
    def __repr__(self) -> str:
 | 
			
		||||
        return f"<{self} @ {id(self)}>"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def parse_tag(tag: str) -> frozenset[Tag]:
 | 
			
		||||
    """
 | 
			
		||||
    Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
 | 
			
		||||
 | 
			
		||||
    Returning a set is required due to the possibility that the tag is a
 | 
			
		||||
    compressed tag set.
 | 
			
		||||
    """
 | 
			
		||||
    tags = set()
 | 
			
		||||
    interpreters, abis, platforms = tag.split("-")
 | 
			
		||||
    for interpreter in interpreters.split("."):
 | 
			
		||||
        for abi in abis.split("."):
 | 
			
		||||
            for platform_ in platforms.split("."):
 | 
			
		||||
                tags.add(Tag(interpreter, abi, platform_))
 | 
			
		||||
    return frozenset(tags)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _get_config_var(name: str, warn: bool = False) -> int | str | None:
 | 
			
		||||
    value = sysconfig.get_config_var(name)
 | 
			
		||||
    if value is None and warn:
 | 
			
		||||
        logger.debug(
 | 
			
		||||
            "Config variable '%s' is unset, Python ABI tag may be incorrect", name
 | 
			
		||||
        )
 | 
			
		||||
    return value
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _normalize_string(string: str) -> str:
 | 
			
		||||
    return string.replace(".", "_").replace("-", "_")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _abi3_applies(python_version: PythonVersion) -> bool:
 | 
			
		||||
    """
 | 
			
		||||
    Determine if the Python version supports abi3.
 | 
			
		||||
 | 
			
		||||
    PEP 384 was first implemented in Python 3.2.
 | 
			
		||||
    """
 | 
			
		||||
    return len(python_version) > 1 and tuple(python_version) >= (3, 2)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> list[str]:
 | 
			
		||||
    py_version = tuple(py_version)  # To allow for version comparison.
 | 
			
		||||
    abis = []
 | 
			
		||||
    version = _version_nodot(py_version[:2])
 | 
			
		||||
    debug = pymalloc = ucs4 = ""
 | 
			
		||||
    with_debug = _get_config_var("Py_DEBUG", warn)
 | 
			
		||||
    has_refcount = hasattr(sys, "gettotalrefcount")
 | 
			
		||||
    # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
 | 
			
		||||
    # extension modules is the best option.
 | 
			
		||||
    # https://github.com/pypa/pip/issues/3383#issuecomment-173267692
 | 
			
		||||
    has_ext = "_d.pyd" in EXTENSION_SUFFIXES
 | 
			
		||||
    if with_debug or (with_debug is None and (has_refcount or has_ext)):
 | 
			
		||||
        debug = "d"
 | 
			
		||||
    if py_version < (3, 8):
 | 
			
		||||
        with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
 | 
			
		||||
        if with_pymalloc or with_pymalloc is None:
 | 
			
		||||
            pymalloc = "m"
 | 
			
		||||
        if py_version < (3, 3):
 | 
			
		||||
            unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
 | 
			
		||||
            if unicode_size == 4 or (
 | 
			
		||||
                unicode_size is None and sys.maxunicode == 0x10FFFF
 | 
			
		||||
            ):
 | 
			
		||||
                ucs4 = "u"
 | 
			
		||||
    elif debug:
 | 
			
		||||
        # Debug builds can also load "normal" extension modules.
 | 
			
		||||
        # We can also assume no UCS-4 or pymalloc requirement.
 | 
			
		||||
        abis.append(f"cp{version}")
 | 
			
		||||
    abis.insert(
 | 
			
		||||
        0,
 | 
			
		||||
        "cp{version}{debug}{pymalloc}{ucs4}".format(
 | 
			
		||||
            version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
 | 
			
		||||
        ),
 | 
			
		||||
    )
 | 
			
		||||
    return abis
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def cpython_tags(
 | 
			
		||||
    python_version: PythonVersion | None = None,
 | 
			
		||||
    abis: Iterable[str] | None = None,
 | 
			
		||||
    platforms: Iterable[str] | None = None,
 | 
			
		||||
    *,
 | 
			
		||||
    warn: bool = False,
 | 
			
		||||
) -> Iterator[Tag]:
 | 
			
		||||
    """
 | 
			
		||||
    Yields the tags for a CPython interpreter.
 | 
			
		||||
 | 
			
		||||
    The tags consist of:
 | 
			
		||||
    - cp<python_version>-<abi>-<platform>
 | 
			
		||||
    - cp<python_version>-abi3-<platform>
 | 
			
		||||
    - cp<python_version>-none-<platform>
 | 
			
		||||
    - cp<less than python_version>-abi3-<platform>  # Older Python versions down to 3.2.
 | 
			
		||||
 | 
			
		||||
    If python_version only specifies a major version then user-provided ABIs and
 | 
			
		||||
    the 'none' ABItag will be used.
 | 
			
		||||
 | 
			
		||||
    If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
 | 
			
		||||
    their normal position and not at the beginning.
 | 
			
		||||
    """
 | 
			
		||||
    if not python_version:
 | 
			
		||||
        python_version = sys.version_info[:2]
 | 
			
		||||
 | 
			
		||||
    interpreter = f"cp{_version_nodot(python_version[:2])}"
 | 
			
		||||
 | 
			
		||||
    if abis is None:
 | 
			
		||||
        if len(python_version) > 1:
 | 
			
		||||
            abis = _cpython_abis(python_version, warn)
 | 
			
		||||
        else:
 | 
			
		||||
            abis = []
 | 
			
		||||
    abis = list(abis)
 | 
			
		||||
    # 'abi3' and 'none' are explicitly handled later.
 | 
			
		||||
    for explicit_abi in ("abi3", "none"):
 | 
			
		||||
        try:
 | 
			
		||||
            abis.remove(explicit_abi)
 | 
			
		||||
        except ValueError:
 | 
			
		||||
            pass
 | 
			
		||||
 | 
			
		||||
    platforms = list(platforms or platform_tags())
 | 
			
		||||
    for abi in abis:
 | 
			
		||||
        for platform_ in platforms:
 | 
			
		||||
            yield Tag(interpreter, abi, platform_)
 | 
			
		||||
    if _abi3_applies(python_version):
 | 
			
		||||
        yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
 | 
			
		||||
    yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
 | 
			
		||||
 | 
			
		||||
    if _abi3_applies(python_version):
 | 
			
		||||
        for minor_version in range(python_version[1] - 1, 1, -1):
 | 
			
		||||
            for platform_ in platforms:
 | 
			
		||||
                interpreter = "cp{version}".format(
 | 
			
		||||
                    version=_version_nodot((python_version[0], minor_version))
 | 
			
		||||
                )
 | 
			
		||||
                yield Tag(interpreter, "abi3", platform_)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _generic_abi() -> Iterator[str]:
 | 
			
		||||
    abi = sysconfig.get_config_var("SOABI")
 | 
			
		||||
    if abi:
 | 
			
		||||
        yield _normalize_string(abi)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def generic_tags(
 | 
			
		||||
    interpreter: str | None = None,
 | 
			
		||||
    abis: Iterable[str] | None = None,
 | 
			
		||||
    platforms: Iterable[str] | None = None,
 | 
			
		||||
    *,
 | 
			
		||||
    warn: bool = False,
 | 
			
		||||
) -> Iterator[Tag]:
 | 
			
		||||
    """
 | 
			
		||||
    Yields the tags for a generic interpreter.
 | 
			
		||||
 | 
			
		||||
    The tags consist of:
 | 
			
		||||
    - <interpreter>-<abi>-<platform>
 | 
			
		||||
 | 
			
		||||
    The "none" ABI will be added if it was not explicitly provided.
 | 
			
		||||
    """
 | 
			
		||||
    if not interpreter:
 | 
			
		||||
        interp_name = interpreter_name()
 | 
			
		||||
        interp_version = interpreter_version(warn=warn)
 | 
			
		||||
        interpreter = "".join([interp_name, interp_version])
 | 
			
		||||
    if abis is None:
 | 
			
		||||
        abis = _generic_abi()
 | 
			
		||||
    platforms = list(platforms or platform_tags())
 | 
			
		||||
    abis = list(abis)
 | 
			
		||||
    if "none" not in abis:
 | 
			
		||||
        abis.append("none")
 | 
			
		||||
    for abi in abis:
 | 
			
		||||
        for platform_ in platforms:
 | 
			
		||||
            yield Tag(interpreter, abi, platform_)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
 | 
			
		||||
    """
 | 
			
		||||
    Yields Python versions in descending order.
 | 
			
		||||
 | 
			
		||||
    After the latest version, the major-only version will be yielded, and then
 | 
			
		||||
    all previous versions of that major version.
 | 
			
		||||
    """
 | 
			
		||||
    if len(py_version) > 1:
 | 
			
		||||
        yield f"py{_version_nodot(py_version[:2])}"
 | 
			
		||||
    yield f"py{py_version[0]}"
 | 
			
		||||
    if len(py_version) > 1:
 | 
			
		||||
        for minor in range(py_version[1] - 1, -1, -1):
 | 
			
		||||
            yield f"py{_version_nodot((py_version[0], minor))}"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def compatible_tags(
 | 
			
		||||
    python_version: PythonVersion | None = None,
 | 
			
		||||
    interpreter: str | None = None,
 | 
			
		||||
    platforms: Iterable[str] | None = None,
 | 
			
		||||
) -> Iterator[Tag]:
 | 
			
		||||
    """
 | 
			
		||||
    Yields the sequence of tags that are compatible with a specific version of Python.
 | 
			
		||||
 | 
			
		||||
    The tags consist of:
 | 
			
		||||
    - py*-none-<platform>
 | 
			
		||||
    - <interpreter>-none-any  # ... if `interpreter` is provided.
 | 
			
		||||
    - py*-none-any
 | 
			
		||||
    """
 | 
			
		||||
    if not python_version:
 | 
			
		||||
        python_version = sys.version_info[:2]
 | 
			
		||||
    platforms = list(platforms or platform_tags())
 | 
			
		||||
    for version in _py_interpreter_range(python_version):
 | 
			
		||||
        for platform_ in platforms:
 | 
			
		||||
            yield Tag(version, "none", platform_)
 | 
			
		||||
    if interpreter:
 | 
			
		||||
        yield Tag(interpreter, "none", "any")
 | 
			
		||||
    for version in _py_interpreter_range(python_version):
 | 
			
		||||
        yield Tag(version, "none", "any")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
 | 
			
		||||
    if not is_32bit:
 | 
			
		||||
        return arch
 | 
			
		||||
 | 
			
		||||
    if arch.startswith("ppc"):
 | 
			
		||||
        return "ppc"
 | 
			
		||||
 | 
			
		||||
    return "i386"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> list[str]:
 | 
			
		||||
    formats = [cpu_arch]
 | 
			
		||||
    if cpu_arch == "x86_64":
 | 
			
		||||
        if version < (10, 4):
 | 
			
		||||
            return []
 | 
			
		||||
        formats.extend(["intel", "fat64", "fat32"])
 | 
			
		||||
 | 
			
		||||
    elif cpu_arch == "i386":
 | 
			
		||||
        if version < (10, 4):
 | 
			
		||||
            return []
 | 
			
		||||
        formats.extend(["intel", "fat32", "fat"])
 | 
			
		||||
 | 
			
		||||
    elif cpu_arch == "ppc64":
 | 
			
		||||
        # TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
 | 
			
		||||
        if version > (10, 5) or version < (10, 4):
 | 
			
		||||
            return []
 | 
			
		||||
        formats.append("fat64")
 | 
			
		||||
 | 
			
		||||
    elif cpu_arch == "ppc":
 | 
			
		||||
        if version > (10, 6):
 | 
			
		||||
            return []
 | 
			
		||||
        formats.extend(["fat32", "fat"])
 | 
			
		||||
 | 
			
		||||
    if cpu_arch in {"arm64", "x86_64"}:
 | 
			
		||||
        formats.append("universal2")
 | 
			
		||||
 | 
			
		||||
    if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
 | 
			
		||||
        formats.append("universal")
 | 
			
		||||
 | 
			
		||||
    return formats
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def mac_platforms(
 | 
			
		||||
    version: MacVersion | None = None, arch: str | None = None
 | 
			
		||||
) -> Iterator[str]:
 | 
			
		||||
    """
 | 
			
		||||
    Yields the platform tags for a macOS system.
 | 
			
		||||
 | 
			
		||||
    The `version` parameter is a two-item tuple specifying the macOS version to
 | 
			
		||||
    generate platform tags for. The `arch` parameter is the CPU architecture to
 | 
			
		||||
    generate platform tags for. Both parameters default to the appropriate value
 | 
			
		||||
    for the current system.
 | 
			
		||||
    """
 | 
			
		||||
    version_str, _, cpu_arch = platform.mac_ver()
 | 
			
		||||
    if version is None:
 | 
			
		||||
        version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
 | 
			
		||||
    else:
 | 
			
		||||
        version = version
 | 
			
		||||
    if arch is None:
 | 
			
		||||
        arch = _mac_arch(cpu_arch)
 | 
			
		||||
    else:
 | 
			
		||||
        arch = arch
 | 
			
		||||
 | 
			
		||||
    if (10, 0) <= version and version < (11, 0):
 | 
			
		||||
        # Prior to Mac OS 11, each yearly release of Mac OS bumped the
 | 
			
		||||
        # "minor" version number.  The major version was always 10.
 | 
			
		||||
        for minor_version in range(version[1], -1, -1):
 | 
			
		||||
            compat_version = 10, minor_version
 | 
			
		||||
            binary_formats = _mac_binary_formats(compat_version, arch)
 | 
			
		||||
            for binary_format in binary_formats:
 | 
			
		||||
                yield "macosx_{major}_{minor}_{binary_format}".format(
 | 
			
		||||
                    major=10, minor=minor_version, binary_format=binary_format
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
    if version >= (11, 0):
 | 
			
		||||
        # Starting with Mac OS 11, each yearly release bumps the major version
 | 
			
		||||
        # number.   The minor versions are now the midyear updates.
 | 
			
		||||
        for major_version in range(version[0], 10, -1):
 | 
			
		||||
            compat_version = major_version, 0
 | 
			
		||||
            binary_formats = _mac_binary_formats(compat_version, arch)
 | 
			
		||||
            for binary_format in binary_formats:
 | 
			
		||||
                yield "macosx_{major}_{minor}_{binary_format}".format(
 | 
			
		||||
                    major=major_version, minor=0, binary_format=binary_format
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
    if version >= (11, 0):
 | 
			
		||||
        # Mac OS 11 on x86_64 is compatible with binaries from previous releases.
 | 
			
		||||
        # Arm64 support was introduced in 11.0, so no Arm binaries from previous
 | 
			
		||||
        # releases exist.
 | 
			
		||||
        #
 | 
			
		||||
        # However, the "universal2" binary format can have a
 | 
			
		||||
        # macOS version earlier than 11.0 when the x86_64 part of the binary supports
 | 
			
		||||
        # that version of macOS.
 | 
			
		||||
        if arch == "x86_64":
 | 
			
		||||
            for minor_version in range(16, 3, -1):
 | 
			
		||||
                compat_version = 10, minor_version
 | 
			
		||||
                binary_formats = _mac_binary_formats(compat_version, arch)
 | 
			
		||||
                for binary_format in binary_formats:
 | 
			
		||||
                    yield "macosx_{major}_{minor}_{binary_format}".format(
 | 
			
		||||
                        major=compat_version[0],
 | 
			
		||||
                        minor=compat_version[1],
 | 
			
		||||
                        binary_format=binary_format,
 | 
			
		||||
                    )
 | 
			
		||||
        else:
 | 
			
		||||
            for minor_version in range(16, 3, -1):
 | 
			
		||||
                compat_version = 10, minor_version
 | 
			
		||||
                binary_format = "universal2"
 | 
			
		||||
                yield "macosx_{major}_{minor}_{binary_format}".format(
 | 
			
		||||
                    major=compat_version[0],
 | 
			
		||||
                    minor=compat_version[1],
 | 
			
		||||
                    binary_format=binary_format,
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
 | 
			
		||||
    linux = _normalize_string(sysconfig.get_platform())
 | 
			
		||||
    if is_32bit:
 | 
			
		||||
        if linux == "linux_x86_64":
 | 
			
		||||
            linux = "linux_i686"
 | 
			
		||||
        elif linux == "linux_aarch64":
 | 
			
		||||
            linux = "linux_armv7l"
 | 
			
		||||
    _, arch = linux.split("_", 1)
 | 
			
		||||
    yield from _manylinux.platform_tags(linux, arch)
 | 
			
		||||
    yield from _musllinux.platform_tags(arch)
 | 
			
		||||
    yield linux
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _generic_platforms() -> Iterator[str]:
 | 
			
		||||
    yield _normalize_string(sysconfig.get_platform())
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def platform_tags() -> Iterator[str]:
 | 
			
		||||
    """
 | 
			
		||||
    Provides the platform tags for this installation.
 | 
			
		||||
    """
 | 
			
		||||
    if platform.system() == "Darwin":
 | 
			
		||||
        return mac_platforms()
 | 
			
		||||
    elif platform.system() == "Linux":
 | 
			
		||||
        return _linux_platforms()
 | 
			
		||||
    else:
 | 
			
		||||
        return _generic_platforms()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def interpreter_name() -> str:
 | 
			
		||||
    """
 | 
			
		||||
    Returns the name of the running interpreter.
 | 
			
		||||
    """
 | 
			
		||||
    name = sys.implementation.name
 | 
			
		||||
    return INTERPRETER_SHORT_NAMES.get(name) or name
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def interpreter_version(*, warn: bool = False) -> str:
 | 
			
		||||
    """
 | 
			
		||||
    Returns the version of the running interpreter.
 | 
			
		||||
    """
 | 
			
		||||
    version = _get_config_var("py_version_nodot", warn=warn)
 | 
			
		||||
    if version:
 | 
			
		||||
        version = str(version)
 | 
			
		||||
    else:
 | 
			
		||||
        version = _version_nodot(sys.version_info[:2])
 | 
			
		||||
    return version
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _version_nodot(version: PythonVersion) -> str:
 | 
			
		||||
    return "".join(map(str, version))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
 | 
			
		||||
    """
 | 
			
		||||
    Returns the sequence of tag triples for the running interpreter.
 | 
			
		||||
 | 
			
		||||
    The order of the sequence corresponds to priority order for the
 | 
			
		||||
    interpreter, from most to least important.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    interp_name = interpreter_name()
 | 
			
		||||
    if interp_name == "cp":
 | 
			
		||||
        yield from cpython_tags(warn=warn)
 | 
			
		||||
    else:
 | 
			
		||||
        yield from generic_tags()
 | 
			
		||||
 | 
			
		||||
    if interp_name == "pp":
 | 
			
		||||
        yield from compatible_tags(interpreter="pp3")
 | 
			
		||||
    else:
 | 
			
		||||
        yield from compatible_tags()
 | 
			
		||||
							
								
								
									
										191
									
								
								teil20/lib/python3.11/site-packages/wheel/wheelfile.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										191
									
								
								teil20/lib/python3.11/site-packages/wheel/wheelfile.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,191 @@
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import csv
 | 
			
		||||
import hashlib
 | 
			
		||||
import os.path
 | 
			
		||||
import re
 | 
			
		||||
import stat
 | 
			
		||||
import time
 | 
			
		||||
from collections import OrderedDict
 | 
			
		||||
from io import StringIO, TextIOWrapper
 | 
			
		||||
from zipfile import ZIP_DEFLATED, ZipFile, ZipInfo
 | 
			
		||||
 | 
			
		||||
from wheel.cli import WheelError
 | 
			
		||||
from wheel.util import log, urlsafe_b64decode, urlsafe_b64encode
 | 
			
		||||
 | 
			
		||||
# Non-greedy matching of an optional build number may be too clever (more
 | 
			
		||||
# invalid wheel filenames will match). Separate regex for .dist-info?
 | 
			
		||||
WHEEL_INFO_RE = re.compile(
 | 
			
		||||
    r"""^(?P<namever>(?P<name>[^\s-]+?)-(?P<ver>[^\s-]+?))(-(?P<build>\d[^\s-]*))?
 | 
			
		||||
     -(?P<pyver>[^\s-]+?)-(?P<abi>[^\s-]+?)-(?P<plat>\S+)\.whl$""",
 | 
			
		||||
    re.VERBOSE,
 | 
			
		||||
)
 | 
			
		||||
MINIMUM_TIMESTAMP = 315532800  # 1980-01-01 00:00:00 UTC
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_zipinfo_datetime(timestamp=None):
 | 
			
		||||
    # Some applications need reproducible .whl files, but they can't do this without
 | 
			
		||||
    # forcing the timestamp of the individual ZipInfo objects. See issue #143.
 | 
			
		||||
    timestamp = int(os.environ.get("SOURCE_DATE_EPOCH", timestamp or time.time()))
 | 
			
		||||
    timestamp = max(timestamp, MINIMUM_TIMESTAMP)
 | 
			
		||||
    return time.gmtime(timestamp)[0:6]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class WheelFile(ZipFile):
 | 
			
		||||
    """A ZipFile derivative class that also reads SHA-256 hashes from
 | 
			
		||||
    .dist-info/RECORD and checks any read files against those.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    _default_algorithm = hashlib.sha256
 | 
			
		||||
 | 
			
		||||
    def __init__(self, file, mode="r", compression=ZIP_DEFLATED):
 | 
			
		||||
        basename = os.path.basename(file)
 | 
			
		||||
        self.parsed_filename = WHEEL_INFO_RE.match(basename)
 | 
			
		||||
        if not basename.endswith(".whl") or self.parsed_filename is None:
 | 
			
		||||
            raise WheelError(f"Bad wheel filename {basename!r}")
 | 
			
		||||
 | 
			
		||||
        ZipFile.__init__(self, file, mode, compression=compression, allowZip64=True)
 | 
			
		||||
 | 
			
		||||
        self.dist_info_path = "{}.dist-info".format(
 | 
			
		||||
            self.parsed_filename.group("namever")
 | 
			
		||||
        )
 | 
			
		||||
        self.record_path = self.dist_info_path + "/RECORD"
 | 
			
		||||
        self._file_hashes = OrderedDict()
 | 
			
		||||
        self._file_sizes = {}
 | 
			
		||||
        if mode == "r":
 | 
			
		||||
            # Ignore RECORD and any embedded wheel signatures
 | 
			
		||||
            self._file_hashes[self.record_path] = None, None
 | 
			
		||||
            self._file_hashes[self.record_path + ".jws"] = None, None
 | 
			
		||||
            self._file_hashes[self.record_path + ".p7s"] = None, None
 | 
			
		||||
 | 
			
		||||
            # Fill in the expected hashes by reading them from RECORD
 | 
			
		||||
            try:
 | 
			
		||||
                record = self.open(self.record_path)
 | 
			
		||||
            except KeyError:
 | 
			
		||||
                raise WheelError(f"Missing {self.record_path} file")
 | 
			
		||||
 | 
			
		||||
            with record:
 | 
			
		||||
                for line in csv.reader(
 | 
			
		||||
                    TextIOWrapper(record, newline="", encoding="utf-8")
 | 
			
		||||
                ):
 | 
			
		||||
                    path, hash_sum, size = line
 | 
			
		||||
                    if not hash_sum:
 | 
			
		||||
                        continue
 | 
			
		||||
 | 
			
		||||
                    algorithm, hash_sum = hash_sum.split("=")
 | 
			
		||||
                    try:
 | 
			
		||||
                        hashlib.new(algorithm)
 | 
			
		||||
                    except ValueError:
 | 
			
		||||
                        raise WheelError(f"Unsupported hash algorithm: {algorithm}")
 | 
			
		||||
 | 
			
		||||
                    if algorithm.lower() in {"md5", "sha1"}:
 | 
			
		||||
                        raise WheelError(
 | 
			
		||||
                            "Weak hash algorithm ({}) is not permitted by PEP "
 | 
			
		||||
                            "427".format(algorithm)
 | 
			
		||||
                        )
 | 
			
		||||
 | 
			
		||||
                    self._file_hashes[path] = (
 | 
			
		||||
                        algorithm,
 | 
			
		||||
                        urlsafe_b64decode(hash_sum.encode("ascii")),
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
    def open(self, name_or_info, mode="r", pwd=None):
 | 
			
		||||
        def _update_crc(newdata):
 | 
			
		||||
            eof = ef._eof
 | 
			
		||||
            update_crc_orig(newdata)
 | 
			
		||||
            running_hash.update(newdata)
 | 
			
		||||
            if eof and running_hash.digest() != expected_hash:
 | 
			
		||||
                raise WheelError(f"Hash mismatch for file '{ef_name}'")
 | 
			
		||||
 | 
			
		||||
        ef_name = (
 | 
			
		||||
            name_or_info.filename if isinstance(name_or_info, ZipInfo) else name_or_info
 | 
			
		||||
        )
 | 
			
		||||
        if (
 | 
			
		||||
            mode == "r"
 | 
			
		||||
            and not ef_name.endswith("/")
 | 
			
		||||
            and ef_name not in self._file_hashes
 | 
			
		||||
        ):
 | 
			
		||||
            raise WheelError(f"No hash found for file '{ef_name}'")
 | 
			
		||||
 | 
			
		||||
        ef = ZipFile.open(self, name_or_info, mode, pwd)
 | 
			
		||||
        if mode == "r" and not ef_name.endswith("/"):
 | 
			
		||||
            algorithm, expected_hash = self._file_hashes[ef_name]
 | 
			
		||||
            if expected_hash is not None:
 | 
			
		||||
                # Monkey patch the _update_crc method to also check for the hash from
 | 
			
		||||
                # RECORD
 | 
			
		||||
                running_hash = hashlib.new(algorithm)
 | 
			
		||||
                update_crc_orig, ef._update_crc = ef._update_crc, _update_crc
 | 
			
		||||
 | 
			
		||||
        return ef
 | 
			
		||||
 | 
			
		||||
    def write_files(self, base_dir):
 | 
			
		||||
        log.info(f"creating '{self.filename}' and adding '{base_dir}' to it")
 | 
			
		||||
        deferred = []
 | 
			
		||||
        for root, dirnames, filenames in os.walk(base_dir):
 | 
			
		||||
            # Sort the directory names so that `os.walk` will walk them in a
 | 
			
		||||
            # defined order on the next iteration.
 | 
			
		||||
            dirnames.sort()
 | 
			
		||||
            for name in sorted(filenames):
 | 
			
		||||
                path = os.path.normpath(os.path.join(root, name))
 | 
			
		||||
                if os.path.isfile(path):
 | 
			
		||||
                    arcname = os.path.relpath(path, base_dir).replace(os.path.sep, "/")
 | 
			
		||||
                    if arcname == self.record_path:
 | 
			
		||||
                        pass
 | 
			
		||||
                    elif root.endswith(".dist-info"):
 | 
			
		||||
                        deferred.append((path, arcname))
 | 
			
		||||
                    else:
 | 
			
		||||
                        self.write(path, arcname)
 | 
			
		||||
 | 
			
		||||
        deferred.sort()
 | 
			
		||||
        for path, arcname in deferred:
 | 
			
		||||
            self.write(path, arcname)
 | 
			
		||||
 | 
			
		||||
    def write(self, filename, arcname=None, compress_type=None):
 | 
			
		||||
        with open(filename, "rb") as f:
 | 
			
		||||
            st = os.fstat(f.fileno())
 | 
			
		||||
            data = f.read()
 | 
			
		||||
 | 
			
		||||
        zinfo = ZipInfo(
 | 
			
		||||
            arcname or filename, date_time=get_zipinfo_datetime(st.st_mtime)
 | 
			
		||||
        )
 | 
			
		||||
        zinfo.external_attr = (stat.S_IMODE(st.st_mode) | stat.S_IFMT(st.st_mode)) << 16
 | 
			
		||||
        zinfo.compress_type = compress_type or self.compression
 | 
			
		||||
        self.writestr(zinfo, data, compress_type)
 | 
			
		||||
 | 
			
		||||
    def writestr(self, zinfo_or_arcname, data, compress_type=None):
 | 
			
		||||
        if isinstance(data, str):
 | 
			
		||||
            data = data.encode("utf-8")
 | 
			
		||||
 | 
			
		||||
        ZipFile.writestr(self, zinfo_or_arcname, data, compress_type)
 | 
			
		||||
        fname = (
 | 
			
		||||
            zinfo_or_arcname.filename
 | 
			
		||||
            if isinstance(zinfo_or_arcname, ZipInfo)
 | 
			
		||||
            else zinfo_or_arcname
 | 
			
		||||
        )
 | 
			
		||||
        log.info(f"adding '{fname}'")
 | 
			
		||||
        if fname != self.record_path:
 | 
			
		||||
            hash_ = self._default_algorithm(data)
 | 
			
		||||
            self._file_hashes[fname] = (
 | 
			
		||||
                hash_.name,
 | 
			
		||||
                urlsafe_b64encode(hash_.digest()).decode("ascii"),
 | 
			
		||||
            )
 | 
			
		||||
            self._file_sizes[fname] = len(data)
 | 
			
		||||
 | 
			
		||||
    def close(self):
 | 
			
		||||
        # Write RECORD
 | 
			
		||||
        if self.fp is not None and self.mode == "w" and self._file_hashes:
 | 
			
		||||
            data = StringIO()
 | 
			
		||||
            writer = csv.writer(data, delimiter=",", quotechar='"', lineterminator="\n")
 | 
			
		||||
            writer.writerows(
 | 
			
		||||
                (
 | 
			
		||||
                    (fname, algorithm + "=" + hash_, self._file_sizes[fname])
 | 
			
		||||
                    for fname, (algorithm, hash_) in self._file_hashes.items()
 | 
			
		||||
                )
 | 
			
		||||
            )
 | 
			
		||||
            writer.writerow((format(self.record_path), "", ""))
 | 
			
		||||
            zinfo = ZipInfo(self.record_path, date_time=get_zipinfo_datetime())
 | 
			
		||||
            zinfo.compress_type = self.compression
 | 
			
		||||
            zinfo.external_attr = 0o664 << 16
 | 
			
		||||
            self.writestr(zinfo, data.getvalue())
 | 
			
		||||
 | 
			
		||||
        ZipFile.close(self)
 | 
			
		||||
		Reference in New Issue
	
	Block a user