diff --git a/bootstrap.egg-info/entry_points.txt b/bootstrap.egg-info/entry_points.txt index a21ca22709..be66b73af1 100644 --- a/bootstrap.egg-info/entry_points.txt +++ b/bootstrap.egg-info/entry_points.txt @@ -3,6 +3,7 @@ egg_info = setuptools.command.egg_info:egg_info build_py = setuptools.command.build_py:build_py sdist = setuptools.command.sdist:sdist editable_wheel = setuptools.command.editable_wheel:editable_wheel +bdist_wheel = setuptools.command.bdist_wheel:bdist_wheel [distutils.setup_keywords] include_package_data = setuptools.dist:assert_bool diff --git a/setup.cfg b/setup.cfg index 37122e93d8..176a582d99 100644 --- a/setup.cfg +++ b/setup.cfg @@ -123,6 +123,7 @@ distutils.commands = build_clib = setuptools.command.build_clib:build_clib build_ext = setuptools.command.build_ext:build_ext build_py = setuptools.command.build_py:build_py + bdist_wheel = setuptools.command.bdist_wheel:bdist_wheel develop = setuptools.command.develop:develop dist_info = setuptools.command.dist_info:dist_info easy_install = setuptools.command.easy_install:easy_install diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py index 9267cf312f..c9d1cef741 100644 --- a/setuptools/build_meta.py +++ b/setuptools/build_meta.py @@ -352,7 +352,7 @@ def run_setup(self, setup_script='setup.py'): ) def get_requires_for_build_wheel(self, config_settings=None): - return self._get_build_requires(config_settings, requirements=['wheel']) + return self._get_build_requires(config_settings, requirements=[]) def get_requires_for_build_sdist(self, config_settings=None): return self._get_build_requires(config_settings, requirements=[]) @@ -430,9 +430,13 @@ def _build_with_temp_dir( def build_wheel( self, wheel_directory, config_settings=None, metadata_directory=None ): + info_dir = self._get_dist_info_dir(metadata_directory) + cmd = ["bdist_wheel"] + if info_dir: + cmd = ["dist_info", "--use-cached", "--dist-info-dir", info_dir] + cmd with suppress_known_deprecation(): return self._build_with_temp_dir( - ['bdist_wheel'], '.whl', wheel_directory, config_settings + cmd, ".whl", wheel_directory, config_settings ) def build_sdist(self, sdist_directory, config_settings=None): diff --git a/setuptools/command/_macosx_libfile.py b/setuptools/command/_macosx_libfile.py new file mode 100644 index 0000000000..0c060b3a42 --- /dev/null +++ b/setuptools/command/_macosx_libfile.py @@ -0,0 +1,474 @@ +# The original version of this file was adopted from pypa/wheel, +# initially distributed under the MIT License: +# Copyright (c) 2012 Daniel Holth and contributors +""" +This module contains function to analyse dynamic library +headers to extract system information + +Currently only for MacOSX + +Library file on macosx system starts with Mach-O or Fat field. +This can be distinguish by first 32 bites and it is called magic number. +Proper value of magic number is with suffix _MAGIC. Suffix _CIGAM means +reversed bytes order. +Both fields can occur in two types: 32 and 64 bytes. + +FAT field inform that this library contains few version of library +(typically for different types version). It contains +information where Mach-O headers starts. + +Each section started with Mach-O header contains one library +(So if file starts with this field it contains only one version). + +After filed Mach-O there are section fields. +Each of them starts with two fields: +cmd - magic number for this command +cmdsize - total size occupied by this section information. + +In this case only sections LC_VERSION_MIN_MACOSX (for macosx 10.13 and earlier) +and LC_BUILD_VERSION (for macosx 10.14 and newer) are interesting, +because them contains information about minimal system version. + +Important remarks: +- For fat files this implementation looks for maximum number version. + It not check if it is 32 or 64 and do not compare it with currently built package. + So it is possible to false report higher version that needed. +- All structures signatures are taken form macosx header files. +- I think that binary format will be more stable than `otool` output. + and if apple introduce some changes both implementation will need to be updated. +- The system compile will set the deployment target no lower than + 11.0 for arm64 builds. For "Universal 2" builds use the x86_64 deployment + target when the arm64 target is 11.0. +""" + +from __future__ import annotations + +import ctypes +import os +import sys + +"""here the needed const and struct from mach-o header files""" + +FAT_MAGIC = 0xCAFEBABE +FAT_CIGAM = 0xBEBAFECA +FAT_MAGIC_64 = 0xCAFEBABF +FAT_CIGAM_64 = 0xBFBAFECA +MH_MAGIC = 0xFEEDFACE +MH_CIGAM = 0xCEFAEDFE +MH_MAGIC_64 = 0xFEEDFACF +MH_CIGAM_64 = 0xCFFAEDFE + +LC_VERSION_MIN_MACOSX = 0x24 +LC_BUILD_VERSION = 0x32 + +CPU_TYPE_ARM64 = 0x0100000C + +mach_header_fields = [ + ("magic", ctypes.c_uint32), + ("cputype", ctypes.c_int), + ("cpusubtype", ctypes.c_int), + ("filetype", ctypes.c_uint32), + ("ncmds", ctypes.c_uint32), + ("sizeofcmds", ctypes.c_uint32), + ("flags", ctypes.c_uint32), +] +""" +struct mach_header { + uint32_t magic; /* mach magic number identifier */ + cpu_type_t cputype; /* cpu specifier */ + cpu_subtype_t cpusubtype; /* machine specifier */ + uint32_t filetype; /* type of file */ + uint32_t ncmds; /* number of load commands */ + uint32_t sizeofcmds; /* the size of all the load commands */ + uint32_t flags; /* flags */ +}; +typedef integer_t cpu_type_t; +typedef integer_t cpu_subtype_t; +""" + +mach_header_fields_64 = mach_header_fields + [("reserved", ctypes.c_uint32)] +""" +struct mach_header_64 { + uint32_t magic; /* mach magic number identifier */ + cpu_type_t cputype; /* cpu specifier */ + cpu_subtype_t cpusubtype; /* machine specifier */ + uint32_t filetype; /* type of file */ + uint32_t ncmds; /* number of load commands */ + uint32_t sizeofcmds; /* the size of all the load commands */ + uint32_t flags; /* flags */ + uint32_t reserved; /* reserved */ +}; +""" + +fat_header_fields = [("magic", ctypes.c_uint32), ("nfat_arch", ctypes.c_uint32)] +""" +struct fat_header { + uint32_t magic; /* FAT_MAGIC or FAT_MAGIC_64 */ + uint32_t nfat_arch; /* number of structs that follow */ +}; +""" + +fat_arch_fields = [ + ("cputype", ctypes.c_int), + ("cpusubtype", ctypes.c_int), + ("offset", ctypes.c_uint32), + ("size", ctypes.c_uint32), + ("align", ctypes.c_uint32), +] +""" +struct fat_arch { + cpu_type_t cputype; /* cpu specifier (int) */ + cpu_subtype_t cpusubtype; /* machine specifier (int) */ + uint32_t offset; /* file offset to this object file */ + uint32_t size; /* size of this object file */ + uint32_t align; /* alignment as a power of 2 */ +}; +""" + +fat_arch_64_fields = [ + ("cputype", ctypes.c_int), + ("cpusubtype", ctypes.c_int), + ("offset", ctypes.c_uint64), + ("size", ctypes.c_uint64), + ("align", ctypes.c_uint32), + ("reserved", ctypes.c_uint32), +] +""" +struct fat_arch_64 { + cpu_type_t cputype; /* cpu specifier (int) */ + cpu_subtype_t cpusubtype; /* machine specifier (int) */ + uint64_t offset; /* file offset to this object file */ + uint64_t size; /* size of this object file */ + uint32_t align; /* alignment as a power of 2 */ + uint32_t reserved; /* reserved */ +}; +""" + +segment_base_fields = [("cmd", ctypes.c_uint32), ("cmdsize", ctypes.c_uint32)] +"""base for reading segment info""" + +segment_command_fields = [ + ("cmd", ctypes.c_uint32), + ("cmdsize", ctypes.c_uint32), + ("segname", ctypes.c_char * 16), + ("vmaddr", ctypes.c_uint32), + ("vmsize", ctypes.c_uint32), + ("fileoff", ctypes.c_uint32), + ("filesize", ctypes.c_uint32), + ("maxprot", ctypes.c_int), + ("initprot", ctypes.c_int), + ("nsects", ctypes.c_uint32), + ("flags", ctypes.c_uint32), +] +""" +struct segment_command { /* for 32-bit architectures */ + uint32_t cmd; /* LC_SEGMENT */ + uint32_t cmdsize; /* includes sizeof section structs */ + char segname[16]; /* segment name */ + uint32_t vmaddr; /* memory address of this segment */ + uint32_t vmsize; /* memory size of this segment */ + uint32_t fileoff; /* file offset of this segment */ + uint32_t filesize; /* amount to map from the file */ + vm_prot_t maxprot; /* maximum VM protection */ + vm_prot_t initprot; /* initial VM protection */ + uint32_t nsects; /* number of sections in segment */ + uint32_t flags; /* flags */ +}; +typedef int vm_prot_t; +""" + +segment_command_fields_64 = [ + ("cmd", ctypes.c_uint32), + ("cmdsize", ctypes.c_uint32), + ("segname", ctypes.c_char * 16), + ("vmaddr", ctypes.c_uint64), + ("vmsize", ctypes.c_uint64), + ("fileoff", ctypes.c_uint64), + ("filesize", ctypes.c_uint64), + ("maxprot", ctypes.c_int), + ("initprot", ctypes.c_int), + ("nsects", ctypes.c_uint32), + ("flags", ctypes.c_uint32), +] +""" +struct segment_command_64 { /* for 64-bit architectures */ + uint32_t cmd; /* LC_SEGMENT_64 */ + uint32_t cmdsize; /* includes sizeof section_64 structs */ + char segname[16]; /* segment name */ + uint64_t vmaddr; /* memory address of this segment */ + uint64_t vmsize; /* memory size of this segment */ + uint64_t fileoff; /* file offset of this segment */ + uint64_t filesize; /* amount to map from the file */ + vm_prot_t maxprot; /* maximum VM protection */ + vm_prot_t initprot; /* initial VM protection */ + uint32_t nsects; /* number of sections in segment */ + uint32_t flags; /* flags */ +}; +""" + +version_min_command_fields = segment_base_fields + [ + ("version", ctypes.c_uint32), + ("sdk", ctypes.c_uint32), +] +""" +struct version_min_command { + uint32_t cmd; /* LC_VERSION_MIN_MACOSX or + LC_VERSION_MIN_IPHONEOS or + LC_VERSION_MIN_WATCHOS or + LC_VERSION_MIN_TVOS */ + uint32_t cmdsize; /* sizeof(struct min_version_command) */ + uint32_t version; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */ + uint32_t sdk; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */ +}; +""" + +build_version_command_fields = segment_base_fields + [ + ("platform", ctypes.c_uint32), + ("minos", ctypes.c_uint32), + ("sdk", ctypes.c_uint32), + ("ntools", ctypes.c_uint32), +] +""" +struct build_version_command { + uint32_t cmd; /* LC_BUILD_VERSION */ + uint32_t cmdsize; /* sizeof(struct build_version_command) plus */ + /* ntools * sizeof(struct build_tool_version) */ + uint32_t platform; /* platform */ + uint32_t minos; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */ + uint32_t sdk; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */ + uint32_t ntools; /* number of tool entries following this */ +}; +""" + + +def swap32(x): + return ( + ((x << 24) & 0xFF000000) + | ((x << 8) & 0x00FF0000) + | ((x >> 8) & 0x0000FF00) + | ((x >> 24) & 0x000000FF) + ) + + +def get_base_class_and_magic_number(lib_file, seek=None): + if seek is None: + seek = lib_file.tell() + else: + lib_file.seek(seek) + magic_number = ctypes.c_uint32.from_buffer_copy( + lib_file.read(ctypes.sizeof(ctypes.c_uint32)) + ).value + + # Handle wrong byte order + if magic_number in [FAT_CIGAM, FAT_CIGAM_64, MH_CIGAM, MH_CIGAM_64]: + if sys.byteorder == "little": + BaseClass = ctypes.BigEndianStructure + else: + BaseClass = ctypes.LittleEndianStructure + + magic_number = swap32(magic_number) + else: + BaseClass = ctypes.Structure + + lib_file.seek(seek) + return BaseClass, magic_number + + +def read_data(struct_class, lib_file): + return struct_class.from_buffer_copy(lib_file.read(ctypes.sizeof(struct_class))) + + +def extract_macosx_min_system_version(path_to_lib): # noqa: C901 + with open(path_to_lib, "rb") as lib_file: + BaseClass, magic_number = get_base_class_and_magic_number(lib_file, 0) + if magic_number not in [FAT_MAGIC, FAT_MAGIC_64, MH_MAGIC, MH_MAGIC_64]: + return + + if magic_number in [FAT_MAGIC, FAT_CIGAM_64]: + + class FatHeader(BaseClass): + _fields_ = fat_header_fields + + fat_header = read_data(FatHeader, lib_file) + if magic_number == FAT_MAGIC: + + class FatArch(BaseClass): + _fields_ = fat_arch_fields + + else: + + class FatArch(BaseClass): + _fields_ = fat_arch_64_fields + + fat_arch_list = [ + read_data(FatArch, lib_file) for _ in range(fat_header.nfat_arch) + ] + + versions_list = [] + for el in fat_arch_list: + try: + version = read_mach_header(lib_file, el.offset) + if version is not None: + if el.cputype == CPU_TYPE_ARM64 and len(fat_arch_list) != 1: + # Xcode will not set the deployment target below 11.0.0 + # for the arm64 architecture. Ignore the arm64 deployment + # in fat binaries when the target is 11.0.0, that way + # the other architectures can select a lower deployment + # target. + # This is safe because there is no arm64 variant for + # macOS 10.15 or earlier. + if version == (11, 0, 0): + continue + versions_list.append(version) + except ValueError: + pass + + if len(versions_list) > 0: + return max(versions_list) + else: + return None + + else: + try: + return read_mach_header(lib_file, 0) + except ValueError: + """when some error during read library files""" + return None + + +def read_mach_header(lib_file, seek=None): + """ + This funcition parse mach-O header and extract + information about minimal system version + + :param lib_file: reference to opened library file with pointer + """ + if seek is not None: + lib_file.seek(seek) + base_class, magic_number = get_base_class_and_magic_number(lib_file) + arch = "32" if magic_number == MH_MAGIC else "64" + + class SegmentBase(base_class): + _fields_ = segment_base_fields + + if arch == "32": + + class MachHeader(base_class): + _fields_ = mach_header_fields + + else: + + class MachHeader(base_class): + _fields_ = mach_header_fields_64 + + mach_header = read_data(MachHeader, lib_file) + for _i in range(mach_header.ncmds): + pos = lib_file.tell() + segment_base = read_data(SegmentBase, lib_file) + lib_file.seek(pos) + if segment_base.cmd == LC_VERSION_MIN_MACOSX: + + class VersionMinCommand(base_class): + _fields_ = version_min_command_fields + + version_info = read_data(VersionMinCommand, lib_file) + return parse_version(version_info.version) + elif segment_base.cmd == LC_BUILD_VERSION: + + class VersionBuild(base_class): + _fields_ = build_version_command_fields + + version_info = read_data(VersionBuild, lib_file) + return parse_version(version_info.minos) + else: + lib_file.seek(pos + segment_base.cmdsize) + continue + + +def parse_version(version): + x = (version & 0xFFFF0000) >> 16 + y = (version & 0x0000FF00) >> 8 + z = version & 0x000000FF + return x, y, z + + +def calculate_macosx_platform_tag(archive_root, platform_tag): # noqa: C901 + """ + Calculate proper macosx platform tag basing on files which are included to wheel + + Example platform tag `macosx-10.14-x86_64` + """ + prefix, base_version, suffix = platform_tag.split("-") + base_version = tuple(int(x) for x in base_version.split(".")) + base_version = base_version[:2] + if base_version[0] > 10: + base_version = (base_version[0], 0) + assert len(base_version) == 2 + if "MACOSX_DEPLOYMENT_TARGET" in os.environ: + deploy_target = tuple( + int(x) for x in os.environ["MACOSX_DEPLOYMENT_TARGET"].split(".") + ) + deploy_target = deploy_target[:2] + if deploy_target[0] > 10: + deploy_target = (deploy_target[0], 0) + if deploy_target < base_version: + sys.stderr.write( + "[WARNING] MACOSX_DEPLOYMENT_TARGET is set to a lower value ({}) than " + "the version on which the Python interpreter was compiled ({}), and " + "will be ignored.\n".format( + ".".join(str(x) for x in deploy_target), + ".".join(str(x) for x in base_version), + ) + ) + else: + base_version = deploy_target + + assert len(base_version) == 2 + start_version = base_version + versions_dict = {} + for dirpath, _dirnames, filenames in os.walk(archive_root): + for filename in filenames: + if filename.endswith(".dylib") or filename.endswith(".so"): + lib_path = os.path.join(dirpath, filename) + min_ver = extract_macosx_min_system_version(lib_path) + if min_ver is not None: + min_ver = min_ver[0:2] + if min_ver[0] > 10: + min_ver = (min_ver[0], 0) + versions_dict[lib_path] = min_ver + + if len(versions_dict) > 0: + base_version = max(base_version, max(versions_dict.values())) + + # macosx platform tag do not support minor bugfix release + fin_base_version = "_".join([str(x) for x in base_version]) + if start_version < base_version: + problematic_files = [k for k, v in versions_dict.items() if v > start_version] + problematic_files = "\n".join(problematic_files) + if len(problematic_files) == 1: + files_form = "this file" + else: + files_form = "these files" + error_message = ( + "[WARNING] This wheel needs a higher macOS version than {} " + "To silence this warning, set MACOSX_DEPLOYMENT_TARGET to at least " + + fin_base_version + + " or recreate " + + files_form + + " with lower " + "MACOSX_DEPLOYMENT_TARGET: \n" + problematic_files + ) + + if "MACOSX_DEPLOYMENT_TARGET" in os.environ: + error_message = error_message.format( + "is set in MACOSX_DEPLOYMENT_TARGET variable." + ) + else: + error_message = error_message.format( + "the version your Python interpreter is compiled against." + ) + + sys.stderr.write(error_message) + + platform_tag = prefix + "_" + fin_base_version + "_" + suffix + return platform_tag diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py new file mode 100644 index 0000000000..a87a4f2717 --- /dev/null +++ b/setuptools/command/bdist_wheel.py @@ -0,0 +1,383 @@ +# The original version of this file was adopted from pypa/wheel, +# initially distributed under the MIT License: +# Copyright (c) 2012 Daniel Holth and contributors +""" +Create a wheel (.whl) distribution. + +A wheel is a built archive format. +""" +import logging +import os +import re +import shutil +import stat +import sys +import sysconfig +import warnings +from collections import OrderedDict +from pathlib import Path +from zipfile import ZIP_DEFLATED, ZIP_STORED + +from distutils import dir_util # prefer dir_util for log/cache consistency + +from .. import Command, _normalization +from .._wheelbuilder import WheelBuilder +from ..extern.packaging import tags +from ._macosx_libfile import calculate_macosx_platform_tag + +_logger = logging.getLogger(__name__) +_PYTHON_TAG = f"py{sys.version_info[0]}" +_PY_LIMITED_API_PATTERN = r"cp3\d" + + +def _get_platform(archive_root): + """Return our platform name 'win32', 'linux_x86_64'""" + result = sysconfig.get_platform() + if result.startswith("macosx") and archive_root is not None: + result = calculate_macosx_platform_tag(archive_root, result) + elif result == "linux-x86_64" and sys.maxsize == 2147483647: + # pip pull request #3497 + result = "linux-i686" + + return result.replace("-", "_") + + +def _get_flag(var, fallback, expected=True, warn=True): + """Use a fallback value for determining SOABI flags if the needed config + var is unset or unavailable.""" + val = sysconfig.get_config_var(var) + if val is None: + if warn: + warnings.warn( + "Config variable '{}' is unset, Python ABI tag may " + "be incorrect".format(var), + RuntimeWarning, + 2, + ) + return fallback + return val == expected + + +def _get_abi_tag(): + """Return the ABI tag based on SOABI (if available) or emulate SOABI (PyPy2).""" + soabi = sysconfig.get_config_var("SOABI") + impl = tags.interpreter_name() + if not soabi and impl in ("cp", "pp") and hasattr(sys, "maxunicode"): + d = "" + m = "" + u = "" + if _get_flag("Py_DEBUG", hasattr(sys, "gettotalrefcount"), warn=(impl == "cp")): + d = "d" + + if _get_flag( + "WITH_PYMALLOC", + impl == "cp", + warn=(impl == "cp" and sys.version_info < (3, 8)), + ) and sys.version_info < (3, 8): + m = "m" + + abi = f"{impl}{tags.interpreter_version()}{d}{m}{u}" + elif soabi and impl == "cp": + abi = "cp" + soabi.split("-")[1] + elif soabi and impl == "pp": + # we want something like pypy36-pp73 + abi = "-".join(soabi.split("-")[:2]) + abi = abi.replace(".", "_").replace("-", "_") + elif soabi: + abi = soabi.replace(".", "_").replace("-", "_") + else: + abi = None + + return abi + + +class bdist_wheel(Command): + description = "create a wheel distribution" + + supported_compressions = OrderedDict( + [("stored", ZIP_STORED), ("deflated", ZIP_DEFLATED)] + ) + + user_options = [ + ("bdist-dir=", "b", "temporary directory for creating the distribution"), + ( + "plat-name=", + "p", + "platform name to embed in generated filenames " + "(default: %s)" % _get_platform(None), + ), + ( + "keep-temp", + "k", + "keep the pseudo-installation tree around after " + + "creating the distribution archive", + ), + ("dist-dir=", "d", "directory to put final built distributions in"), + ("skip-build", None, "skip rebuilding everything (for testing/debugging)"), + ( + "relative", + None, + "build the archive using relative paths " "(default: false)", + ), + ( + "owner=", + "u", + "Owner name used when creating a tar file" " [default: current user]", + ), + ( + "group=", + "g", + "Group name used when creating a tar file" " [default: current group]", + ), + ("universal", None, "make a universal wheel" " (default: false)"), + ( + "compression=", + None, + "zipfile compression (one of: {})" + " (default: 'deflated')".format(", ".join(supported_compressions)), + ), + ( + "python-tag=", + None, + "Python implementation compatibility tag" f" (default: {_PYTHON_TAG!r})", + ), + ( + "build-number=", + None, + "Build number for this particular version. " + "As specified in PEP-0427, this must start with a digit. " + "[default: None]", + ), + ( + "py-limited-api=", + None, + "Python tag (cp32|cp33|cpNN) for abi3 wheel tag" " (default: false)", + ), + ("dist-info-dir=", "I", "*DO NOT USE/PRIVATE* pre-build .dist-info directory"), + ] + + boolean_options = ["keep-temp", "skip-build", "relative", "universal"] + + def initialize_options(self): + self._name = None + self.bdist_dir = None + self.data_dir = None + self.plat_name = None + self.plat_tag = None + self.format = "zip" + self.keep_temp = False + self.dist_dir = None + self.root_is_pure = None + self.skip_build = None + self.relative = False + self.owner = None + self.group = None + self.universal = False + self.compression = "deflated" + self.python_tag = _PYTHON_TAG + self.build_number = None + self.py_limited_api = False + self.plat_name_supplied = False + self.dist_info_dir = None + + def finalize_options(self): + if self.dist_info_dir is not None: + self.dist_info_dir = Path(self.dist_info_dir) + + if self.bdist_dir is None: + bdist_base = self.get_finalized_command("bdist").bdist_base + self.bdist_dir = os.path.join(bdist_base, "wheel") + + self._wheel_dist_name = self._get_dist_info_name() + if self.build_number: + build_tag = _normalization.filename_component(self.build_number) + self._wheel_dist_name += f"-{build_tag}" + + self.data_dir = self._wheel_dist_name + ".data" + self.plat_name_supplied = self.plat_name is not None + + try: + self.compression = self.supported_compressions[self.compression] + except KeyError: + raise ValueError(f"Unsupported compression: {self.compression}") + + need_options = ("dist_dir", "plat_name", "skip_build") + + self.set_undefined_options("bdist", *zip(need_options, need_options)) + + self.root_is_pure = not ( + self.distribution.has_ext_modules() or self.distribution.has_c_libraries() + ) + + if self.py_limited_api and not re.match( + _PY_LIMITED_API_PATTERN, self.py_limited_api + ): + raise ValueError("py-limited-api must match '%s'" % _PY_LIMITED_API_PATTERN) + + if self.build_number is not None and not self.build_number[:1].isdigit(): + raise ValueError("Build tag (build-number) must start with a digit.") + + def _get_dist_info_name(self): + if self.dist_info_dir is None: + dist_info = self.get_finalized_command("dist_info") + self.dist_info_dir = dist_info.dist_info_dir + return dist_info.name + + assert str(self.dist_info_dir).endswith(".dist-info") + assert (self.dist_info_dir / "METADATA").exists() + return self.dist_info_dir.name[: -len(".dist-info")] + + def _ensure_dist_info(self): + if not Path(self.dist_info_dir, "METADATA").exists(): + self.run_command("dist_info") + + def get_tag(self): + # bdist sets self.plat_name if unset, we should only use it for purepy + # wheels if the user supplied it. + + # cibuildwheel recommends users to override get_tag, so we cannot make it + # explicitly private, to avoid breaking people's script, see: + # https://cibuildwheel.readthedocs.io/en/stable/faq/#abi3 + # https://github.com/joerick/python-abi3-package-sample/blob/main/setup.py + + if self.plat_name_supplied: + plat_name = self.plat_name + elif self.root_is_pure: + plat_name = "any" + else: + # macosx contains system version in platform name so need special handle + if self.plat_name and not self.plat_name.startswith("macosx"): + plat_name = self.plat_name + else: + # on macosx always limit the platform name to comply with any + # c-extension modules in bdist_dir, since the user can specify + # a higher MACOSX_DEPLOYMENT_TARGET via tools like CMake + + # on other platforms, and on macosx if there are no c-extension + # modules, use the default platform name. + plat_name = _get_platform(self.bdist_dir) + + if ( + plat_name in ("linux-x86_64", "linux_x86_64") + and sys.maxsize == 2147483647 + ): + plat_name = "linux_i686" + + plat_name = ( + plat_name.lower().replace("-", "_").replace(".", "_").replace(" ", "_") + ) + + if self.root_is_pure: + if self.universal: + impl = "py2.py3" + else: + impl = self.python_tag + tag = (impl, "none", plat_name) + else: + impl_name = tags.interpreter_name() + impl_ver = tags.interpreter_version() + impl = impl_name + impl_ver + # We don't work on CPython 3.1, 3.0. + if self.py_limited_api and (impl_name + impl_ver).startswith("cp3"): + impl = self.py_limited_api + abi_tag = "abi3" + else: + abi_tag = str(_get_abi_tag()).lower() + tag = (impl, abi_tag, plat_name) + # issue gh-374: allow overriding plat_name + supported_tags = [ + (t.interpreter, t.abi, plat_name) for t in tags.sys_tags() + ] + assert ( + tag in supported_tags + ), f"would build wheel with unsupported tag {tag}" + return tag + + def run(self): + self.mkpath(self.bdist_dir) + build_scripts = self.reinitialize_command("build_scripts") + build_scripts.executable = "python" + build_scripts.force = True + + build_ext = self.reinitialize_command("build_ext") + build_ext.inplace = False + + if not self.skip_build: + self.run_command("build") + + install = self.reinitialize_command("install", reinit_subcommands=True) + install.root = self.bdist_dir + install.compile = False + install.skip_build = self.skip_build + install.warn_dir = False + # install.skip_egg_info = True + + # A wheel without setuptools scripts is more cross-platform. + # Use the (undocumented) `no_ep` option to setuptools' + # install_scripts command to avoid creating entry point scripts. + install_scripts = self.reinitialize_command("install_scripts") + install_scripts.no_ep = True + + # Use a custom scheme for the archive, because we have to decide + # at installation time which scheme to use. + for key in ("headers", "scripts", "data", "purelib", "platlib"): + setattr(install, "install_" + key, os.path.join(self.data_dir, key)) + + basedir_observed = "" + + if os.name == "nt": + # win32 barfs if any of these are ''; could be '.'? + # (distutils.command.install:change_roots bug) + basedir_observed = os.path.normpath(os.path.join(self.data_dir, "..")) + self.install_libbase = self.install_lib = basedir_observed + + setattr( + install, + "install_purelib" if self.root_is_pure else "install_platlib", + basedir_observed, + ) + + _logger.info(f"installing to {self.bdist_dir}") + + self.run_command("install") + + impl_tag, abi_tag, plat_tag = self.get_tag() + archive_basename = f"{self._wheel_dist_name}-{impl_tag}-{abi_tag}-{plat_tag}" + if not self.relative: + archive_root = self.bdist_dir + else: + archive_root = os.path.join( + self.bdist_dir, dir_util.ensure_relative(install.install_base) + ) + + # Make the archive + if not os.path.exists(self.dist_dir): + self.mkpath(self.dist_dir) + + wheel_path = os.path.join(self.dist_dir, archive_basename + ".whl") + with WheelBuilder(wheel_path, compression=self.compression) as builder: + builder.add_tree(archive_root, exclude=["*.dist-info/*", "*.egg-info/*"]) + self._ensure_dist_info() + builder.add_tree(self.dist_info_dir, prefix=self.dist_info_dir.name) + + # Add to 'Distribution.dist_files' so that the "upload" command works + getattr(self.distribution, "dist_files", []).append( + ( + "bdist_wheel", + "{}.{}".format(*sys.version_info[:2]), # like 3.7 + wheel_path, + ) + ) + + if not self.keep_temp: + # use dir_util to keep its state caching consistent + dir_util.remove_tree(self.bdist_dir, self.verbose, self.dry_run) + if os.path.isdir(self.bdist_dir) and not self.dry_run: + # force removal in the case dir_util cannot handle it + shutil.rmtree(self.bdist_dir, onerror=_remove_readonly) + + +def _remove_readonly(func, path, excinfo): + print(str(excinfo[1])) + os.chmod(path, stat.S_IWRITE) + func(path) diff --git a/setuptools/command/install.py b/setuptools/command/install.py index 606cce9d89..44c485ab94 100644 --- a/setuptools/command/install.py +++ b/setuptools/command/install.py @@ -22,13 +22,15 @@ class install(orig.install): None, "used by system package builders to create 'flat' eggs", ), + ('skip-egg-info', None, "*TRANSITIONAL* will be removed in the future"), ] boolean_options = orig.install.boolean_options + [ 'old-and-unmanageable', 'single-version-externally-managed', + 'skip-egg-info', ] new_commands = [ - ('install_egg_info', lambda self: True), + ('install_egg_info', lambda self: not self.skip_egg_info), ('install_scripts', lambda self: True), ] _nc = dict(new_commands) @@ -50,6 +52,7 @@ def initialize_options(self): orig.install.initialize_options(self) self.old_and_unmanageable = None self.single_version_externally_managed = None + self.skip_egg_info = False def finalize_options(self): orig.install.finalize_options(self) diff --git a/setuptools/tests/test_bdist_wheel.py b/setuptools/tests/test_bdist_wheel.py new file mode 100644 index 0000000000..bfb7b26418 --- /dev/null +++ b/setuptools/tests/test_bdist_wheel.py @@ -0,0 +1,440 @@ +# The original version of this file was adopted from pypa/wheel, +# initially distributed under the MIT License: +# Copyright (c) 2012 Daniel Holth and contributors +import os.path +import shutil +import stat +import sys +import sysconfig +from inspect import cleandoc +from zipfile import ZipFile + +import pytest +from jaraco.path import DirectoryStack +from jaraco.path import build as build_paths + +from distutils.core import run_setup +from setuptools.command.bdist_wheel import _get_abi_tag, bdist_wheel +from setuptools.dist import Distribution +from setuptools.extern.packaging import tags +from setuptools.warnings import SetuptoolsDeprecationWarning + +DEFAULT_FILES = { + "dummy_dist-1.0.dist-info/top_level.txt", + "dummy_dist-1.0.dist-info/METADATA", + "dummy_dist-1.0.dist-info/WHEEL", + "dummy_dist-1.0.dist-info/RECORD", +} +DEFAULT_LICENSE_FILES = { + "LICENSE", + "LICENSE.txt", + "LICENCE", + "LICENCE.txt", + "COPYING", + "COPYING.md", + "NOTICE", + "NOTICE.rst", + "AUTHORS", + "AUTHORS.txt", +} +OTHER_IGNORED_FILES = { + "LICENSE~", + "AUTHORS~", +} +SETUPPY_EXAMPLE = """\ +from setuptools import setup + +setup( + name='dummy_dist', + version='1.0', +) +""" + + +EXAMPLES = { + "dummy-dist": { + "setup.py": SETUPPY_EXAMPLE, + "licenses": {"DUMMYFILE": ""}, + **dict.fromkeys(DEFAULT_LICENSE_FILES | OTHER_IGNORED_FILES, ""), + }, + "simple-dist": { + "setup.py": cleandoc( + """ + from setuptools import setup + + setup( + name="simple.dist", + version="0.1", + description="A testing distribution \N{SNOWMAN}", + extras_require={"voting": ["beaglevote"]}, + ) + """ + ), + "simpledist": "", + }, + "complex-dist": { + "setup.py": cleandoc( + """ + from setuptools import setup + + setup( + name="complex-dist", + version="0.1", + description="Another testing distribution \N{SNOWMAN}", + long_description="Another testing distribution \N{SNOWMAN}", + author="Illustrious Author", + author_email="illustrious@example.org", + url="http://example.org/exemplary", + packages=["complexdist"], + setup_requires=["setuptools"], + install_requires=["quux", "splort"], + extras_require={"simple": ["simple.dist"]}, + tests_require=["foo", "bar>=10.0.0"], + entry_points={ + "console_scripts": [ + "complex-dist=complexdist:main", + "complex-dist2=complexdist:main", + ], + }, + ) + """ + ), + "complexdist": {"__init__.py": "def main(): return"}, + }, + "headers-dist": { + "setup.py": cleandoc( + """ + from setuptools import setup + + setup( + name="headers.dist", + version="0.1", + description="A distribution with headers", + headers=["header.h"], + ) + """ + ), + "setup.cfg": "[bdist_wheel]\nuniversal=1", + "headersdist.py": "", + "header.h": "", + }, + "commasinfilenames-dist": { + "setup.py": cleandoc( + """ + from setuptools import setup + + setup( + name="testrepo", + version="0.1", + packages=["mypackage"], + description="A test package with commas in file names", + include_package_data=True, + package_data={"mypackage.data": ["*"]}, + ) + """ + ), + "mypackage": { + "__init__.py": "", + "data": {"__init__.py": "", "1,2,3.txt": ""}, + }, + "testrepo-0.1.0": { + "mypackage": {"__init__.py": ""}, + }, + }, + "unicode-dist": { + "setup.py": cleandoc( + """ + from setuptools import setup + + setup( + name="unicode.dist", + version="0.1", + description="A testing distribution \N{SNOWMAN}", + packages=["unicodedist"], + ) + """ + ), + "unicodedist": {"__init__.py": "", "åäö_日本語.py": ""}, + }, + "utf8-metadata-dist": { + "setup.cfg": cleandoc( + """ + [metadata] + name = utf8-metadata-dist + version = 42 + author_email = "John X. Ãørçeč" , Γαμα קּ 東 + long_description = file: README.rst + """ + ), + "README.rst": "UTF-8 描述 説明", + }, +} + + +if sys.platform != "win32": + EXAMPLES["abi3extension-dist"] = { + "setup.py": cleandoc( + """ + from setuptools import Extension, setup + + setup( + name="extension.dist", + version="0.1", + description="A testing distribution \N{SNOWMAN}", + ext_modules=[ + Extension( + name="extension", sources=["extension.c"], py_limited_api=True + ) + ], + ) + """ + ), + "setup.cfg": "[bdist_wheel]\npy_limited_api=cp32", + "extension.c": "#define Py_LIMITED_API 0x03020000\n#include ", + } + + +def bdist_wheel_cmd(**kwargs): + dist_obj = ( + run_setup("setup.py", stop_after="init") + if os.path.exists("setup.py") + else Distribution({"script_name": "%%build_meta%%"}) + ) + dist_obj.parse_config_files() + cmd = bdist_wheel(dist_obj) + for attr, value in kwargs.items(): + setattr(cmd, attr, value) + cmd.finalize_options() + return cmd + + +def mkexample(tmp_path_factory, name): + """Make sure entry point scripts are not generated.""" + basedir = tmp_path_factory.mktemp(name) + build_paths(EXAMPLES[name], prefix=str(basedir)) + return basedir + + +def mkwheel(build_dir=".build", dist_dir=".dist"): + bdist_wheel_cmd(bdist_dir=str(build_dir), dist_dir=str(dist_dir)).run() + + +@pytest.fixture(scope="session") +def wheel_paths(tmp_path_factory): + build_dir = tmp_path_factory.mktemp("build") + dist_dir = tmp_path_factory.mktemp("dist") + for name in EXAMPLES: + basedir = mkexample(tmp_path_factory, name) + with DirectoryStack().context(basedir): + bdist_wheel_cmd(bdist_dir=str(build_dir), dist_dir=str(dist_dir)).run() + + return sorted(str(fname) for fname in dist_dir.glob("*.whl")) + + +@pytest.fixture +def dummy_dist(tmp_path_factory): + return mkexample(tmp_path_factory, "dummy-dist") + + +def test_no_scripts(wheel_paths): + """Make sure entry point scripts are not generated.""" + path = next(path for path in wheel_paths if "complex_dist" in path) + for entry in ZipFile(path).infolist(): + assert ".data/scripts/" not in entry.filename + + +def test_unicode_record(wheel_paths): + path = next(path for path in wheel_paths if "unicode.dist" in path) + with ZipFile(path) as zf: + record = zf.read("unicode.dist-0.1.dist-info/RECORD") + + assert "åäö_日本語.py".encode() in record + + +def test_unicode_metadata(wheel_paths): + path = next(path for path in wheel_paths if "utf8_metadata_dist" in path) + with ZipFile(path) as zf: + metadata = str(zf.read("utf8_metadata_dist-42.dist-info/METADATA"), "utf-8") + assert 'Author-email: "John X. Ãørçeč"' in metadata + assert "Γαμα קּ 東 " in metadata + assert "UTF-8 描述 説明" in metadata + + +def test_licenses_default(dummy_dist, monkeypatch, tmp_path): + monkeypatch.chdir(dummy_dist) + + bdist_wheel_cmd(bdist_dir=str(tmp_path), universal=True).run() + with ZipFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf: + license_files = { + "dummy_dist-1.0.dist-info/" + fname for fname in DEFAULT_LICENSE_FILES + } + assert set(wf.namelist()) == DEFAULT_FILES | license_files + + +@pytest.mark.filterwarnings("ignore:.*license_file parameter is deprecated") +def test_licenses_deprecated(dummy_dist, monkeypatch, tmp_path): + dummy_dist.joinpath("setup.cfg").write_text( + "[metadata]\nlicense_file=licenses/DUMMYFILE", encoding="utf-8" + ) + monkeypatch.chdir(dummy_dist) + + with pytest.warns(SetuptoolsDeprecationWarning, match="use license_files"): + bdist_wheel_cmd(bdist_dir=str(tmp_path), universal=True).run() + + with ZipFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf: + license_files = {"dummy_dist-1.0.dist-info/DUMMYFILE"} + assert set(wf.namelist()) == DEFAULT_FILES | license_files + + +@pytest.mark.parametrize( + "config_file, config", + [ + ("setup.cfg", "[metadata]\nlicense_files=licenses/*\n LICENSE"), + ("setup.cfg", "[metadata]\nlicense_files=licenses/*, LICENSE"), + ( + "setup.py", + SETUPPY_EXAMPLE.replace( + ")", " license_files=['licenses/DUMMYFILE', 'LICENSE'])" + ), + ), + ], +) +def test_licenses_override(dummy_dist, monkeypatch, tmp_path, config_file, config): + dummy_dist.joinpath(config_file).write_text(config, encoding="utf-8") + monkeypatch.chdir(dummy_dist) + bdist_wheel_cmd(bdist_dir=str(tmp_path), universal=True).run() + with ZipFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf: + license_files = { + "dummy_dist-1.0.dist-info/" + fname for fname in {"DUMMYFILE", "LICENSE"} + } + assert set(wf.namelist()) == DEFAULT_FILES | license_files + + +def test_licenses_disabled(dummy_dist, monkeypatch, tmp_path): + dummy_dist.joinpath("setup.cfg").write_text( + "[metadata]\nlicense_files=\n", encoding="utf-8" + ) + monkeypatch.chdir(dummy_dist) + bdist_wheel_cmd(bdist_dir=str(tmp_path), universal=True).run() + with ZipFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf: + assert set(wf.namelist()) == DEFAULT_FILES + + +def test_build_number(dummy_dist, monkeypatch, tmp_path): + monkeypatch.chdir(dummy_dist) + bdist_wheel_cmd(bdist_dir=str(tmp_path), build_number="2", universal=True).run() + with ZipFile("dist/dummy_dist-1.0-2-py2.py3-none-any.whl") as wf: + filenames = set(wf.namelist()) + assert "dummy_dist-1.0.dist-info/RECORD" in filenames + assert "dummy_dist-1.0.dist-info/METADATA" in filenames + + +EXTENSION_EXAMPLE = """\ +#include + +static PyMethodDef methods[] = { + { NULL, NULL, 0, NULL } +}; + +static struct PyModuleDef module_def = { + PyModuleDef_HEAD_INIT, + "extension", + "Dummy extension module", + -1, + methods +}; + +PyMODINIT_FUNC PyInit_extension(void) { + return PyModule_Create(&module_def); +} +""" +EXTENSION_SETUPPY = """\ +from __future__ import annotations + +from setuptools import Extension, setup + +setup( + name="extension.dist", + version="0.1", + description="A testing distribution \N{SNOWMAN}", + ext_modules=[Extension(name="extension", sources=["extension.c"])], +) +""" + + +@pytest.mark.filterwarnings( + "once:Config variable '.*' is unset.*, Python ABI tag may be incorrect" +) +def test_limited_abi(monkeypatch, tmp_path, tmp_path_factory): + """Test that building a binary wheel with the limited ABI works.""" + proj_dir = tmp_path_factory.mktemp("dummy_dist") + (proj_dir / "setup.py").write_text(EXTENSION_SETUPPY, encoding="utf-8") + (proj_dir / "extension.c").write_text(EXTENSION_EXAMPLE, encoding="utf-8") + build_dir = tmp_path.joinpath("build") + dist_dir = tmp_path.joinpath("dist") + monkeypatch.chdir(proj_dir) + bdist_wheel_cmd(bdist_dir=str(build_dir), dist_dir=str(dist_dir)).run() + + +def test_build_from_readonly_tree(dummy_dist, monkeypatch, tmp_path): + basedir = str(tmp_path.joinpath("dummy")) + shutil.copytree(str(dummy_dist), basedir) + monkeypatch.chdir(basedir) + + # Make the tree read-only + for root, _dirs, files in os.walk(basedir): + for fname in files: + os.chmod(os.path.join(root, fname), stat.S_IREAD) + + bdist_wheel_cmd().run() + + +@pytest.mark.parametrize( + "option, compress_type", + list(bdist_wheel.supported_compressions.items()), + ids=list(bdist_wheel.supported_compressions), +) +def test_compression(dummy_dist, monkeypatch, tmp_path, option, compress_type): + monkeypatch.chdir(dummy_dist) + bdist_wheel_cmd(bdist_dir=str(tmp_path), universal=True, compression=option).run() + with ZipFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf: + filenames = set(wf.namelist()) + assert "dummy_dist-1.0.dist-info/RECORD" in filenames + assert "dummy_dist-1.0.dist-info/METADATA" in filenames + for zinfo in wf.filelist: + assert zinfo.compress_type == compress_type + + +def test_wheelfile_line_endings(wheel_paths): + for path in wheel_paths: + with ZipFile(path) as wf: + wheelfile = next(fn for fn in wf.filelist if fn.filename.endswith("WHEEL")) + wheelfile_contents = wf.read(wheelfile) + assert b"\r" not in wheelfile_contents + + +def test_unix_epoch_timestamps(dummy_dist, monkeypatch, tmp_path): + monkeypatch.setenv("SOURCE_DATE_EPOCH", "0") + monkeypatch.chdir(dummy_dist) + bdist_wheel_cmd(bdist_dir=str(tmp_path), build_number="2a", universal=True).run() + with ZipFile("dist/dummy_dist-1.0-2a-py2.py3-none-any.whl") as wf: + for zinfo in wf.filelist: + assert zinfo.date_time >= (1980, 1, 1, 0, 0, 0) # min epoch is used + + +def test_get_abi_tag_old(monkeypatch): + monkeypatch.setattr(tags, "interpreter_name", lambda: "pp") + monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "pypy36-pp73") + assert _get_abi_tag() == "pypy36_pp73" + + +def test_get_abi_tag_new(monkeypatch): + monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "pypy37-pp73-darwin") + monkeypatch.setattr(tags, "interpreter_name", lambda: "pp") + assert _get_abi_tag() == "pypy37_pp73" + + +def test_platform_with_space(dummy_dist, monkeypatch): + """Ensure building on platforms with a space in the name succeed.""" + monkeypatch.chdir(dummy_dist) + bdist_wheel_cmd(plat_name="isilon onefs").run() diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py index fd7cf168ce..5f58d7ca3b 100644 --- a/setuptools/tests/test_build_meta.py +++ b/setuptools/tests/test_build_meta.py @@ -1,19 +1,21 @@ +import contextlib +import importlib import os -import sys +import re import shutil import signal +import sys import tarfile -import importlib -import contextlib from concurrent import futures -import re -from zipfile import ZipFile +from email import message_from_string from pathlib import Path +from zipfile import ZipFile import pytest from jaraco import path from .textwrap import DALS +from setuptools import _reqs SETUP_SCRIPT_STUB = "__import__('setuptools').setup()" @@ -232,7 +234,7 @@ def build_backend(self, tmpdir, request): def test_get_requires_for_build_wheel(self, build_backend): actual = build_backend.get_requires_for_build_wheel() - expected = ['six', 'wheel'] + expected = ['six'] assert sorted(actual) == sorted(expected) def test_get_requires_for_build_sdist(self, build_backend): @@ -437,12 +439,17 @@ def test_build_with_pyproject_config(self, tmpdir, setup_script): "Summary: This is a Python package", "License: MIT", "Classifier: Intended Audience :: Developers", - "Requires-Dist: appdirs", - "Requires-Dist: tomli >=1 ; extra == 'all'", - "Requires-Dist: importlib ; python_version == \"2.6\" and extra == 'all'", ): assert line in metadata + reqs = [ + "appdirs", + "tomli>=1; extra == 'all'", + "importlib; python_version == '2.6' and extra == 'all'", + "pyscaffold<5,>=4; extra == 'all'", + ] + _assert_dependencies(metadata, reqs) + assert metadata.strip().endswith("This is a ``README``") assert epoints.strip() == "[console_scripts]\nfoo = foo.cli:main" @@ -792,17 +799,15 @@ def run(): build_backend = self.get_build_backend() if use_wheel: - base_requirements = ['wheel'] get_requires = build_backend.get_requires_for_build_wheel else: - base_requirements = [] get_requires = build_backend.get_requires_for_build_sdist # Ensure that the build requirements are properly parsed - expected = sorted(base_requirements + requirements) - actual = get_requires() + expected = sorted(requirements) + actual = sorted(get_requires()) - assert expected == sorted(actual) + assert expected == actual def test_setup_requires_with_auto_discovery(self, tmpdir_cwd): # Make sure patches introduced to retrieve setup_requires don't accidentally @@ -830,7 +835,7 @@ def test_setup_requires_with_auto_discovery(self, tmpdir_cwd): path.build(files) build_backend = self.get_build_backend() setup_requires = build_backend.get_requires_for_build_wheel() - assert setup_requires == ["wheel", "foo"] + assert setup_requires == ["foo"] def test_dont_install_setup_requires(self, tmpdir_cwd): files = { @@ -971,7 +976,7 @@ def test_sys_exit_0_in_setuppy(monkeypatch, tmp_path): """ (tmp_path / "setup.py").write_text(DALS(setuppy), encoding="utf-8") backend = BuildBackend(backend_name="setuptools.build_meta") - assert backend.get_requires_for_build_wheel() == ["wheel"] + assert backend.get_requires_for_build_wheel() == [] def test_system_exit_in_setuppy(monkeypatch, tmp_path): @@ -981,3 +986,10 @@ def test_system_exit_in_setuppy(monkeypatch, tmp_path): with pytest.raises(SystemExit, match="some error"): backend = BuildBackend(backend_name="setuptools.build_meta") backend.get_requires_for_build_wheel() + + +def _assert_dependencies(metadata, reqs): + expected = _reqs.parse(reqs) + message = message_from_string(metadata) + found = _reqs.parse(message.get_all("Requires-Dist")) + assert set(found) == set(expected)