diff --git a/docs/deprecated/distutils/apiref.rst b/docs/deprecated/distutils/apiref.rst index 27072f8183..de689edfa4 100644 --- a/docs/deprecated/distutils/apiref.rst +++ b/docs/deprecated/distutils/apiref.rst @@ -361,7 +361,7 @@ This module provides the following functions. are not given. -.. function:: new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0) +.. function:: new_compiler(plat=None, compiler=None, verbose=False, dry_run=False, force=False) Factory function to generate an instance of some CCompiler subclass for the supplied platform/compiler combination. *plat* defaults to ``os.name`` (eg. @@ -383,7 +383,7 @@ This module provides the following functions. to :command:`build`, :command:`build_ext`, :command:`build_clib`). -.. class:: CCompiler([verbose=0, dry_run=0, force=0]) +.. class:: CCompiler([verbose=False, dry_run=False, force=False]) The abstract base class :class:`CCompiler` defines the interface that must be implemented by real compiler classes. The class also has some utility methods @@ -517,7 +517,7 @@ This module provides the following functions. list) to do the job. - .. method:: CCompiler.find_library_file(dirs, lib[, debug=0]) + .. method:: CCompiler.find_library_file(dirs, lib[, debug=False]) Search the specified list of directories for a static or shared library file *lib* and return the full path to that file. If *debug* is true, look for a @@ -580,7 +580,7 @@ This module provides the following functions. The following methods invoke stages in the build process. - .. method:: CCompiler.compile(sources[, output_dir=None, macros=None, include_dirs=None, debug=0, extra_preargs=None, extra_postargs=None, depends=None]) + .. method:: CCompiler.compile(sources[, output_dir=None, macros=None, include_dirs=None, debug=False, extra_preargs=None, extra_postargs=None, depends=None]) Compile one or more source files. Generates object files (e.g. transforms a :file:`.c` file to a :file:`.o` file.) @@ -624,7 +624,7 @@ This module provides the following functions. Raises :exc:`CompileError` on failure. - .. method:: CCompiler.create_static_lib(objects, output_libname[, output_dir=None, debug=0, target_lang=None]) + .. method:: CCompiler.create_static_lib(objects, output_libname[, output_dir=None, debug=False, target_lang=None]) Link a bunch of stuff together to create a static library file. The "bunch of stuff" consists of the list of object files supplied as *objects*, the extra @@ -648,7 +648,7 @@ This module provides the following functions. Raises :exc:`LibError` on failure. - .. method:: CCompiler.link(target_desc, objects, output_filename[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None]) + .. method:: CCompiler.link(target_desc, objects, output_filename[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=False, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None]) Link a bunch of stuff together to create an executable or shared library file. @@ -690,21 +690,21 @@ This module provides the following functions. Raises :exc:`LinkError` on failure. - .. method:: CCompiler.link_executable(objects, output_progname[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, debug=0, extra_preargs=None, extra_postargs=None, target_lang=None]) + .. method:: CCompiler.link_executable(objects, output_progname[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, debug=False, extra_preargs=None, extra_postargs=None, target_lang=None]) Link an executable. *output_progname* is the name of the file executable, while *objects* are a list of object filenames to link in. Other arguments are as for the :meth:`link` method. - .. method:: CCompiler.link_shared_lib(objects, output_libname[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None]) + .. method:: CCompiler.link_shared_lib(objects, output_libname[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=False, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None]) Link a shared library. *output_libname* is the name of the output library, while *objects* is a list of object filenames to link in. Other arguments are as for the :meth:`link` method. - .. method:: CCompiler.link_shared_object(objects, output_filename[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None]) + .. method:: CCompiler.link_shared_object(objects, output_filename[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=False, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None]) Link a shared object. *output_filename* is the name of the shared object that will be created, while *objects* is a list of object filenames to link in. @@ -726,14 +726,14 @@ This module provides the following functions. use by the various concrete subclasses. - .. method:: CCompiler.executable_filename(basename[, strip_dir=0, output_dir='']) + .. method:: CCompiler.executable_filename(basename[, strip_dir=False, output_dir='']) Returns the filename of the executable for the given *basename*. Typically for non-Windows platforms this is the same as the basename, while Windows will get a :file:`.exe` added. - .. method:: CCompiler.library_filename(libname[, lib_type='static', strip_dir=0, output_dir='']) + .. method:: CCompiler.library_filename(libname[, lib_type='static', strip_dir=False, output_dir='']) Returns the filename for the given library name on the current platform. On Unix a library with *lib_type* of ``'static'`` will typically be of the form @@ -741,13 +741,13 @@ This module provides the following functions. :file:`liblibname.so`. - .. method:: CCompiler.object_filenames(source_filenames[, strip_dir=0, output_dir='']) + .. method:: CCompiler.object_filenames(source_filenames[, strip_dir=False, output_dir='']) Returns the name of the object files for the given source files. *source_filenames* should be a list of filenames. - .. method:: CCompiler.shared_object_filename(basename[, strip_dir=0, output_dir='']) + .. method:: CCompiler.shared_object_filename(basename[, strip_dir=False, output_dir='']) Returns the name of a shared object file for the given file name *basename*. @@ -884,7 +884,7 @@ This module provides a few functions for creating archive files, such as tarballs or zipfiles. -.. function:: make_archive(base_name, format[, root_dir=None, base_dir=None, verbose=0, dry_run=0]) +.. function:: make_archive(base_name, format[, root_dir=None, base_dir=None, verbose=False, dry_run=False]) Create an archive file (eg. ``zip`` or ``tar``). *base_name* is the name of the file to create, minus any format-specific extension; *format* is the @@ -900,7 +900,7 @@ tarballs or zipfiles. Added support for the ``xztar`` format. -.. function:: make_tarball(base_name, base_dir[, compress='gzip', verbose=0, dry_run=0]) +.. function:: make_tarball(base_name, base_dir[, compress='gzip', verbose=False, dry_run=False]) 'Create an (optional compressed) archive as a tar file from all files in and under *base_dir*. *compress* must be ``'gzip'`` (the default), @@ -915,7 +915,7 @@ tarballs or zipfiles. Added support for the ``xz`` compression. -.. function:: make_zipfile(base_name, base_dir[, verbose=0, dry_run=0]) +.. function:: make_zipfile(base_name, base_dir[, verbose=False, dry_run=False]) Create a zip file from all files in and under *base_dir*. The output zip file will be named *base_name* + :file:`.zip`. Uses either the :mod:`zipfile` Python @@ -978,7 +978,7 @@ This module provides functions for operating on directories and trees of directories. -.. function:: mkpath(name[, mode=0o777, verbose=0, dry_run=0]) +.. function:: mkpath(name[, mode=0o777, verbose=False, dry_run=False]) Create a directory and any missing ancestor directories. If the directory already exists (or if *name* is the empty string, which means the current @@ -989,7 +989,7 @@ directories. directories actually created. -.. function:: create_tree(base_dir, files[, mode=0o777, verbose=0, dry_run=0]) +.. function:: create_tree(base_dir, files[, mode=0o777, verbose=False, dry_run=False]) Create all the empty directories under *base_dir* needed to put *files* there. *base_dir* is just the name of a directory which doesn't necessarily exist @@ -999,7 +999,7 @@ directories. :func:`mkpath`. -.. function:: copy_tree(src, dst[, preserve_mode=1, preserve_times=1, preserve_symlinks=0, update=0, verbose=0, dry_run=0]) +.. function:: copy_tree(src, dst[, preserve_mode=True, preserve_times=True, preserve_symlinks=False, update=False, verbose=False, dry_run=False]) Copy an entire directory tree *src* to a new location *dst*. Both *src* and *dst* must be directory names. If *src* is not a directory, raise @@ -1026,7 +1026,7 @@ directories. .. versionchanged:: 3.3.1 NFS files are ignored. -.. function:: remove_tree(directory[, verbose=0, dry_run=0]) +.. function:: remove_tree(directory[, verbose=False, dry_run=False]) Recursively remove *directory* and all files and directories underneath it. Any errors are ignored (apart from being reported to ``sys.stdout`` if *verbose* is @@ -1043,7 +1043,7 @@ directories. This module contains some utility functions for operating on individual files. -.. function:: copy_file(src, dst[, preserve_mode=1, preserve_times=1, update=0, link=None, verbose=0, dry_run=0]) +.. function:: copy_file(src, dst[, preserve_mode=True, preserve_times=True, update=False, link=None, verbose=False, dry_run=False]) Copy file *src* to *dst*. If *dst* is a directory, then *src* is copied there with the same name; otherwise, it must be a filename. (If the file exists, it @@ -1216,7 +1216,7 @@ other utility module. .. % Should probably be moved into the standard library. -.. function:: execute(func, args[, msg=None, verbose=0, dry_run=0]) +.. function:: execute(func, args[, msg=None, verbose=False, dry_run=False]) Perform some action that affects the outside world (for instance, writing to the filesystem). Such actions are special because they are disabled by the @@ -1234,7 +1234,7 @@ other utility module. :exc:`ValueError` if *val* is anything else. -.. function:: byte_compile(py_files[, optimize=0, force=0, prefix=None, base_dir=None, verbose=1, dry_run=0, direct=None]) +.. function:: byte_compile(py_files[, optimize=0, force=False, prefix=None, base_dir=None, verbose=True, dry_run=False, direct=None]) Byte-compile a collection of Python source files to :file:`.pyc` files in a :file:`__pycache__` subdirectory (see :pep:`3147` and :pep:`488`). diff --git a/docs/deprecated/distutils/configfile.rst b/docs/deprecated/distutils/configfile.rst index b104db789a..e012e5d233 100644 --- a/docs/deprecated/distutils/configfile.rst +++ b/docs/deprecated/distutils/configfile.rst @@ -96,7 +96,7 @@ configuration file for this distribution: .. code-block:: ini [build_ext] - inplace=1 + inplace=true This will affect all builds of this module distribution, whether or not you explicitly specify :command:`build_ext`. If you include :file:`setup.cfg` in diff --git a/docs/deprecated/distutils/setupscript.rst b/docs/deprecated/distutils/setupscript.rst index ceadaf5aab..de68a5c320 100644 --- a/docs/deprecated/distutils/setupscript.rst +++ b/docs/deprecated/distutils/setupscript.rst @@ -274,7 +274,7 @@ search path, though, you can find that directory using the Distutils :mod:`distutils.sysconfig` module:: from distutils.sysconfig import get_python_inc - incdir = os.path.join(get_python_inc(plat_specific=1), 'Numerical') + incdir = os.path.join(get_python_inc(plat_specific=True), 'Numerical') setup(..., Extension(..., include_dirs=[incdir]), ) diff --git a/newsfragments/4444.feature.rst b/newsfragments/4444.feature.rst new file mode 100644 index 0000000000..20b30f8ca4 --- /dev/null +++ b/newsfragments/4444.feature.rst @@ -0,0 +1 @@ +Updated distutils including significant changes to support Cygwin and mingw compilers. diff --git a/setuptools/_distutils/_modified.py b/setuptools/_distutils/_modified.py index 78485dc25e..6532aa1073 100644 --- a/setuptools/_distutils/_modified.py +++ b/setuptools/_distutils/_modified.py @@ -4,8 +4,8 @@ import os.path from ._functools import splat +from .compat.py39 import zip_strict from .errors import DistutilsFileError -from .py39compat import zip_strict def _newer(source, target): @@ -24,7 +24,7 @@ def newer(source, target): Raises DistutilsFileError if 'source' does not exist. """ if not os.path.exists(source): - raise DistutilsFileError("file '%s' does not exist" % os.path.abspath(source)) + raise DistutilsFileError(f"file '{os.path.abspath(source)}' does not exist") return _newer(source, target) diff --git a/setuptools/_distutils/_msvccompiler.py b/setuptools/_distutils/_msvccompiler.py index a2159fef83..b0322410c5 100644 --- a/setuptools/_distutils/_msvccompiler.py +++ b/setuptools/_distutils/_msvccompiler.py @@ -218,7 +218,7 @@ class MSVCCompiler(CCompiler): static_lib_format = shared_lib_format = '%s%s' exe_extension = '.exe' - def __init__(self, verbose=0, dry_run=0, force=0): + def __init__(self, verbose=False, dry_run=False, force=False): super().__init__(verbose, dry_run, force) # target platform (.plat_name is consistent with 'bdist') self.plat_name = None @@ -334,7 +334,7 @@ def compile( # noqa: C901 output_dir=None, macros=None, include_dirs=None, - debug=0, + debug=False, extra_preargs=None, extra_postargs=None, depends=None, @@ -423,7 +423,7 @@ def compile( # noqa: C901 return objects def create_static_lib( - self, objects, output_libname, output_dir=None, debug=0, target_lang=None + self, objects, output_libname, output_dir=None, debug=False, target_lang=None ): if not self.initialized: self.initialize() @@ -452,7 +452,7 @@ def link( library_dirs=None, runtime_library_dirs=None, export_symbols=None, - debug=0, + debug=False, extra_preargs=None, extra_postargs=None, build_temp=None, @@ -551,7 +551,7 @@ def runtime_library_dir_option(self, dir): def library_option(self, lib): return self.library_filename(lib) - def find_library_file(self, dirs, lib, debug=0): + def find_library_file(self, dirs, lib, debug=False): # Prefer a debugging library if found (and requested), but deal # with it if we don't have one. if debug: diff --git a/setuptools/_distutils/_vendor/__init__.py b/setuptools/_distutils/_vendor/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/setuptools/_distutils/_vendor/packaging-24.0.dist-info/INSTALLER b/setuptools/_distutils/_vendor/packaging-24.0.dist-info/INSTALLER new file mode 100644 index 0000000000..a1b589e38a --- /dev/null +++ b/setuptools/_distutils/_vendor/packaging-24.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE b/setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE new file mode 100644 index 0000000000..6f62d44e4e --- /dev/null +++ b/setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE @@ -0,0 +1,3 @@ +This software is made available under the terms of *either* of the licenses +found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made +under the terms of *both* these licenses. diff --git a/setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE.APACHE b/setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE.APACHE new file mode 100644 index 0000000000..f433b1a53f --- /dev/null +++ b/setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE.APACHE @@ -0,0 +1,177 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE.BSD b/setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE.BSD new file mode 100644 index 0000000000..42ce7b75c9 --- /dev/null +++ b/setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE.BSD @@ -0,0 +1,23 @@ +Copyright (c) Donald Stufft and individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/setuptools/_distutils/_vendor/packaging-24.0.dist-info/METADATA b/setuptools/_distutils/_vendor/packaging-24.0.dist-info/METADATA new file mode 100644 index 0000000000..10ab4390a9 --- /dev/null +++ b/setuptools/_distutils/_vendor/packaging-24.0.dist-info/METADATA @@ -0,0 +1,102 @@ +Metadata-Version: 2.1 +Name: packaging +Version: 24.0 +Summary: Core utilities for Python packages +Author-email: Donald Stufft +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Typing :: Typed +Project-URL: Documentation, https://packaging.pypa.io/ +Project-URL: Source, https://github.com/pypa/packaging + +packaging +========= + +.. start-intro + +Reusable core utilities for various Python Packaging +`interoperability specifications `_. + +This library provides utilities that implement the interoperability +specifications which have clearly one correct behaviour (eg: :pep:`440`) +or benefit greatly from having a single shared implementation (eg: :pep:`425`). + +.. end-intro + +The ``packaging`` project includes the following: version handling, specifiers, +markers, requirements, tags, utilities. + +Documentation +------------- + +The `documentation`_ provides information and the API for the following: + +- Version Handling +- Specifiers +- Markers +- Requirements +- Tags +- Utilities + +Installation +------------ + +Use ``pip`` to install these utilities:: + + pip install packaging + +The ``packaging`` library uses calendar-based versioning (``YY.N``). + +Discussion +---------- + +If you run into bugs, you can file them in our `issue tracker`_. + +You can also join ``#pypa`` on Freenode to ask questions or get involved. + + +.. _`documentation`: https://packaging.pypa.io/ +.. _`issue tracker`: https://github.com/pypa/packaging/issues + + +Code of Conduct +--------------- + +Everyone interacting in the packaging project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. + +.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md + +Contributing +------------ + +The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as +well as how to report a potential security issue. The documentation for this +project also covers information about `project development`_ and `security`_. + +.. _`project development`: https://packaging.pypa.io/en/latest/development/ +.. _`security`: https://packaging.pypa.io/en/latest/security/ + +Project History +--------------- + +Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for +recent changes and project history. + +.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/ + diff --git a/setuptools/_distutils/_vendor/packaging-24.0.dist-info/RECORD b/setuptools/_distutils/_vendor/packaging-24.0.dist-info/RECORD new file mode 100644 index 0000000000..bcf796c2f4 --- /dev/null +++ b/setuptools/_distutils/_vendor/packaging-24.0.dist-info/RECORD @@ -0,0 +1,37 @@ +packaging-24.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +packaging-24.0.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197 +packaging-24.0.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174 +packaging-24.0.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344 +packaging-24.0.dist-info/METADATA,sha256=0dESdhY_wHValuOrbgdebiEw04EbX4dkujlxPdEsFus,3203 +packaging-24.0.dist-info/RECORD,, +packaging-24.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +packaging-24.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 +packaging/__init__.py,sha256=UzotcV07p8vcJzd80S-W0srhgY8NMVD_XvJcZ7JN-tA,496 +packaging/__pycache__/__init__.cpython-312.pyc,, +packaging/__pycache__/_elffile.cpython-312.pyc,, +packaging/__pycache__/_manylinux.cpython-312.pyc,, +packaging/__pycache__/_musllinux.cpython-312.pyc,, +packaging/__pycache__/_parser.cpython-312.pyc,, +packaging/__pycache__/_structures.cpython-312.pyc,, +packaging/__pycache__/_tokenizer.cpython-312.pyc,, +packaging/__pycache__/markers.cpython-312.pyc,, +packaging/__pycache__/metadata.cpython-312.pyc,, +packaging/__pycache__/requirements.cpython-312.pyc,, +packaging/__pycache__/specifiers.cpython-312.pyc,, +packaging/__pycache__/tags.cpython-312.pyc,, +packaging/__pycache__/utils.cpython-312.pyc,, +packaging/__pycache__/version.cpython-312.pyc,, +packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266 +packaging/_manylinux.py,sha256=1ng_TqyH49hY6s3W_zVHyoJIaogbJqbIF1jJ0fAehc4,9590 +packaging/_musllinux.py,sha256=kgmBGLFybpy8609-KTvzmt2zChCPWYvhp5BWP4JX7dE,2676 +packaging/_parser.py,sha256=zlsFB1FpMRjkUdQb6WLq7xON52ruQadxFpYsDXWhLb4,10347 +packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431 +packaging/_tokenizer.py,sha256=alCtbwXhOFAmFGZ6BQ-wCTSFoRAJ2z-ysIf7__MTJ_k,5292 +packaging/markers.py,sha256=eH-txS2zq1HdNpTd9LcZUcVIwewAiNU0grmq5wjKnOk,8208 +packaging/metadata.py,sha256=w7jPEg6mDf1FTZMn79aFxFuk4SKtynUJtxr2InTxlV4,33036 +packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +packaging/requirements.py,sha256=dgoBeVprPu2YE6Q8nGfwOPTjATHbRa_ZGLyXhFEln6Q,2933 +packaging/specifiers.py,sha256=dB2DwbmvSbEuVilEyiIQ382YfW5JfwzXTfRRPVtaENY,39784 +packaging/tags.py,sha256=fedHXiOHkBxNZTXotXv8uXPmMFU9ae-TKBujgYHigcA,18950 +packaging/utils.py,sha256=XgdmP3yx9-wQEFjO7OvMj9RjEf5JlR5HFFR69v7SQ9E,5268 +packaging/version.py,sha256=XjRBLNK17UMDgLeP8UHnqwiY3TdSi03xFQURtec211A,16236 diff --git a/setuptools/_distutils/_vendor/packaging-24.0.dist-info/REQUESTED b/setuptools/_distutils/_vendor/packaging-24.0.dist-info/REQUESTED new file mode 100644 index 0000000000..e69de29bb2 diff --git a/setuptools/_distutils/_vendor/packaging-24.0.dist-info/WHEEL b/setuptools/_distutils/_vendor/packaging-24.0.dist-info/WHEEL new file mode 100644 index 0000000000..3b5e64b5e6 --- /dev/null +++ b/setuptools/_distutils/_vendor/packaging-24.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.9.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/setuptools/_distutils/_vendor/packaging/__init__.py b/setuptools/_distutils/_vendor/packaging/__init__.py new file mode 100644 index 0000000000..e7c0aa12ca --- /dev/null +++ b/setuptools/_distutils/_vendor/packaging/__init__.py @@ -0,0 +1,15 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +__title__ = "packaging" +__summary__ = "Core utilities for Python packages" +__uri__ = "https://github.com/pypa/packaging" + +__version__ = "24.0" + +__author__ = "Donald Stufft and individual contributors" +__email__ = "donald@stufft.io" + +__license__ = "BSD-2-Clause or Apache-2.0" +__copyright__ = "2014 %s" % __author__ diff --git a/setuptools/_distutils/_vendor/packaging/_elffile.py b/setuptools/_distutils/_vendor/packaging/_elffile.py new file mode 100644 index 0000000000..6fb19b30bb --- /dev/null +++ b/setuptools/_distutils/_vendor/packaging/_elffile.py @@ -0,0 +1,108 @@ +""" +ELF file parser. + +This provides a class ``ELFFile`` that parses an ELF executable in a similar +interface to ``ZipFile``. Only the read interface is implemented. + +Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca +ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html +""" + +import enum +import os +import struct +from typing import IO, Optional, Tuple + + +class ELFInvalid(ValueError): + pass + + +class EIClass(enum.IntEnum): + C32 = 1 + C64 = 2 + + +class EIData(enum.IntEnum): + Lsb = 1 + Msb = 2 + + +class EMachine(enum.IntEnum): + I386 = 3 + S390 = 22 + Arm = 40 + X8664 = 62 + AArc64 = 183 + + +class ELFFile: + """ + Representation of an ELF executable. + """ + + def __init__(self, f: IO[bytes]) -> None: + self._f = f + + try: + ident = self._read("16B") + except struct.error: + raise ELFInvalid("unable to parse identification") + magic = bytes(ident[:4]) + if magic != b"\x7fELF": + raise ELFInvalid(f"invalid magic: {magic!r}") + + self.capacity = ident[4] # Format for program header (bitness). + self.encoding = ident[5] # Data structure encoding (endianness). + + try: + # e_fmt: Format for program header. + # p_fmt: Format for section header. + # p_idx: Indexes to find p_type, p_offset, and p_filesz. + e_fmt, self._p_fmt, self._p_idx = { + (1, 1): ("HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB. + (2, 1): ("HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB. + }[(self.capacity, self.encoding)] + except KeyError: + raise ELFInvalid( + f"unrecognized capacity ({self.capacity}) or " + f"encoding ({self.encoding})" + ) + + try: + ( + _, + self.machine, # Architecture type. + _, + _, + self._e_phoff, # Offset of program header. + _, + self.flags, # Processor-specific flags. + _, + self._e_phentsize, # Size of section. + self._e_phnum, # Number of sections. + ) = self._read(e_fmt) + except struct.error as e: + raise ELFInvalid("unable to parse machine and section information") from e + + def _read(self, fmt: str) -> Tuple[int, ...]: + return struct.unpack(fmt, self._f.read(struct.calcsize(fmt))) + + @property + def interpreter(self) -> Optional[str]: + """ + The path recorded in the ``PT_INTERP`` section header. + """ + for index in range(self._e_phnum): + self._f.seek(self._e_phoff + self._e_phentsize * index) + try: + data = self._read(self._p_fmt) + except struct.error: + continue + if data[self._p_idx[0]] != 3: # Not PT_INTERP. + continue + self._f.seek(data[self._p_idx[1]]) + return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0") + return None diff --git a/setuptools/_distutils/_vendor/packaging/_manylinux.py b/setuptools/_distutils/_vendor/packaging/_manylinux.py new file mode 100644 index 0000000000..ad62505f3f --- /dev/null +++ b/setuptools/_distutils/_vendor/packaging/_manylinux.py @@ -0,0 +1,260 @@ +import collections +import contextlib +import functools +import os +import re +import sys +import warnings +from typing import Dict, Generator, Iterator, NamedTuple, Optional, Sequence, Tuple + +from ._elffile import EIClass, EIData, ELFFile, EMachine + +EF_ARM_ABIMASK = 0xFF000000 +EF_ARM_ABI_VER5 = 0x05000000 +EF_ARM_ABI_FLOAT_HARD = 0x00000400 + + +# `os.PathLike` not a generic type until Python 3.9, so sticking with `str` +# as the type for `path` until then. +@contextlib.contextmanager +def _parse_elf(path: str) -> Generator[Optional[ELFFile], None, None]: + try: + with open(path, "rb") as f: + yield ELFFile(f) + except (OSError, TypeError, ValueError): + yield None + + +def _is_linux_armhf(executable: str) -> bool: + # hard-float ABI can be detected from the ELF header of the running + # process + # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf + with _parse_elf(executable) as f: + return ( + f is not None + and f.capacity == EIClass.C32 + and f.encoding == EIData.Lsb + and f.machine == EMachine.Arm + and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5 + and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD + ) + + +def _is_linux_i686(executable: str) -> bool: + with _parse_elf(executable) as f: + return ( + f is not None + and f.capacity == EIClass.C32 + and f.encoding == EIData.Lsb + and f.machine == EMachine.I386 + ) + + +def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool: + if "armv7l" in archs: + return _is_linux_armhf(executable) + if "i686" in archs: + return _is_linux_i686(executable) + allowed_archs = { + "x86_64", + "aarch64", + "ppc64", + "ppc64le", + "s390x", + "loongarch64", + "riscv64", + } + return any(arch in allowed_archs for arch in archs) + + +# If glibc ever changes its major version, we need to know what the last +# minor version was, so we can build the complete list of all versions. +# For now, guess what the highest minor version might be, assume it will +# be 50 for testing. Once this actually happens, update the dictionary +# with the actual value. +_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50) + + +class _GLibCVersion(NamedTuple): + major: int + minor: int + + +def _glibc_version_string_confstr() -> Optional[str]: + """ + Primary implementation of glibc_version_string using os.confstr. + """ + # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely + # to be broken or missing. This strategy is used in the standard library + # platform module. + # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183 + try: + # Should be a string like "glibc 2.17". + version_string: Optional[str] = os.confstr("CS_GNU_LIBC_VERSION") + assert version_string is not None + _, version = version_string.rsplit() + except (AssertionError, AttributeError, OSError, ValueError): + # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... + return None + return version + + +def _glibc_version_string_ctypes() -> Optional[str]: + """ + Fallback implementation of glibc_version_string using ctypes. + """ + try: + import ctypes + except ImportError: + return None + + # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen + # manpage says, "If filename is NULL, then the returned handle is for the + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + # + # We must also handle the special case where the executable is not a + # dynamically linked executable. This can occur when using musl libc, + # for example. In this situation, dlopen() will error, leading to an + # OSError. Interestingly, at least in the case of musl, there is no + # errno set on the OSError. The single string argument used to construct + # OSError comes from libc itself and is therefore not portable to + # hard code here. In any case, failure to call dlopen() means we + # can proceed, so we bail on our attempt. + try: + process_namespace = ctypes.CDLL(None) + except OSError: + return None + + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + # Symbol doesn't exist -> therefore, we are not linked to + # glibc. + return None + + # Call gnu_get_libc_version, which returns a string like "2.5" + gnu_get_libc_version.restype = ctypes.c_char_p + version_str: str = gnu_get_libc_version() + # py2 / py3 compatibility: + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + + return version_str + + +def _glibc_version_string() -> Optional[str]: + """Returns glibc version string, or None if not using glibc.""" + return _glibc_version_string_confstr() or _glibc_version_string_ctypes() + + +def _parse_glibc_version(version_str: str) -> Tuple[int, int]: + """Parse glibc version. + + We use a regexp instead of str.split because we want to discard any + random junk that might come after the minor version -- this might happen + in patched/forked versions of glibc (e.g. Linaro's version of glibc + uses version strings like "2.20-2014.11"). See gh-3588. + """ + m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) + if not m: + warnings.warn( + f"Expected glibc version with 2 components major.minor," + f" got: {version_str}", + RuntimeWarning, + ) + return -1, -1 + return int(m.group("major")), int(m.group("minor")) + + +@functools.lru_cache() +def _get_glibc_version() -> Tuple[int, int]: + version_str = _glibc_version_string() + if version_str is None: + return (-1, -1) + return _parse_glibc_version(version_str) + + +# From PEP 513, PEP 600 +def _is_compatible(arch: str, version: _GLibCVersion) -> bool: + sys_glibc = _get_glibc_version() + if sys_glibc < version: + return False + # Check for presence of _manylinux module. + try: + import _manylinux + except ImportError: + return True + if hasattr(_manylinux, "manylinux_compatible"): + result = _manylinux.manylinux_compatible(version[0], version[1], arch) + if result is not None: + return bool(result) + return True + if version == _GLibCVersion(2, 5): + if hasattr(_manylinux, "manylinux1_compatible"): + return bool(_manylinux.manylinux1_compatible) + if version == _GLibCVersion(2, 12): + if hasattr(_manylinux, "manylinux2010_compatible"): + return bool(_manylinux.manylinux2010_compatible) + if version == _GLibCVersion(2, 17): + if hasattr(_manylinux, "manylinux2014_compatible"): + return bool(_manylinux.manylinux2014_compatible) + return True + + +_LEGACY_MANYLINUX_MAP = { + # CentOS 7 w/ glibc 2.17 (PEP 599) + (2, 17): "manylinux2014", + # CentOS 6 w/ glibc 2.12 (PEP 571) + (2, 12): "manylinux2010", + # CentOS 5 w/ glibc 2.5 (PEP 513) + (2, 5): "manylinux1", +} + + +def platform_tags(archs: Sequence[str]) -> Iterator[str]: + """Generate manylinux tags compatible to the current platform. + + :param archs: Sequence of compatible architectures. + The first one shall be the closest to the actual architecture and be the part of + platform tag after the ``linux_`` prefix, e.g. ``x86_64``. + The ``linux_`` prefix is assumed as a prerequisite for the current platform to + be manylinux-compatible. + + :returns: An iterator of compatible manylinux tags. + """ + if not _have_compatible_abi(sys.executable, archs): + return + # Oldest glibc to be supported regardless of architecture is (2, 17). + too_old_glibc2 = _GLibCVersion(2, 16) + if set(archs) & {"x86_64", "i686"}: + # On x86/i686 also oldest glibc to be supported is (2, 5). + too_old_glibc2 = _GLibCVersion(2, 4) + current_glibc = _GLibCVersion(*_get_glibc_version()) + glibc_max_list = [current_glibc] + # We can assume compatibility across glibc major versions. + # https://sourceware.org/bugzilla/show_bug.cgi?id=24636 + # + # Build a list of maximum glibc versions so that we can + # output the canonical list of all glibc from current_glibc + # down to too_old_glibc2, including all intermediary versions. + for glibc_major in range(current_glibc.major - 1, 1, -1): + glibc_minor = _LAST_GLIBC_MINOR[glibc_major] + glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor)) + for arch in archs: + for glibc_max in glibc_max_list: + if glibc_max.major == too_old_glibc2.major: + min_minor = too_old_glibc2.minor + else: + # For other glibc major versions oldest supported is (x, 0). + min_minor = -1 + for glibc_minor in range(glibc_max.minor, min_minor, -1): + glibc_version = _GLibCVersion(glibc_max.major, glibc_minor) + tag = "manylinux_{}_{}".format(*glibc_version) + if _is_compatible(arch, glibc_version): + yield f"{tag}_{arch}" + # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. + if glibc_version in _LEGACY_MANYLINUX_MAP: + legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version] + if _is_compatible(arch, glibc_version): + yield f"{legacy_tag}_{arch}" diff --git a/setuptools/_distutils/_vendor/packaging/_musllinux.py b/setuptools/_distutils/_vendor/packaging/_musllinux.py new file mode 100644 index 0000000000..86419df9d7 --- /dev/null +++ b/setuptools/_distutils/_vendor/packaging/_musllinux.py @@ -0,0 +1,83 @@ +"""PEP 656 support. + +This module implements logic to detect if the currently running Python is +linked against musl, and what musl version is used. +""" + +import functools +import re +import subprocess +import sys +from typing import Iterator, NamedTuple, Optional, Sequence + +from ._elffile import ELFFile + + +class _MuslVersion(NamedTuple): + major: int + minor: int + + +def _parse_musl_version(output: str) -> Optional[_MuslVersion]: + lines = [n for n in (n.strip() for n in output.splitlines()) if n] + if len(lines) < 2 or lines[0][:4] != "musl": + return None + m = re.match(r"Version (\d+)\.(\d+)", lines[1]) + if not m: + return None + return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2))) + + +@functools.lru_cache() +def _get_musl_version(executable: str) -> Optional[_MuslVersion]: + """Detect currently-running musl runtime version. + + This is done by checking the specified executable's dynamic linking + information, and invoking the loader to parse its output for a version + string. If the loader is musl, the output would be something like:: + + musl libc (x86_64) + Version 1.2.2 + Dynamic Program Loader + """ + try: + with open(executable, "rb") as f: + ld = ELFFile(f).interpreter + except (OSError, TypeError, ValueError): + return None + if ld is None or "musl" not in ld: + return None + proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True) + return _parse_musl_version(proc.stderr) + + +def platform_tags(archs: Sequence[str]) -> Iterator[str]: + """Generate musllinux tags compatible to the current platform. + + :param archs: Sequence of compatible architectures. + The first one shall be the closest to the actual architecture and be the part of + platform tag after the ``linux_`` prefix, e.g. ``x86_64``. + The ``linux_`` prefix is assumed as a prerequisite for the current platform to + be musllinux-compatible. + + :returns: An iterator of compatible musllinux tags. + """ + sys_musl = _get_musl_version(sys.executable) + if sys_musl is None: # Python not dynamically linked against musl. + return + for arch in archs: + for minor in range(sys_musl.minor, -1, -1): + yield f"musllinux_{sys_musl.major}_{minor}_{arch}" + + +if __name__ == "__main__": # pragma: no cover + import sysconfig + + plat = sysconfig.get_platform() + assert plat.startswith("linux-"), "not linux" + + print("plat:", plat) + print("musl:", _get_musl_version(sys.executable)) + print("tags:", end=" ") + for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])): + print(t, end="\n ") diff --git a/setuptools/_distutils/_vendor/packaging/_parser.py b/setuptools/_distutils/_vendor/packaging/_parser.py new file mode 100644 index 0000000000..684df75457 --- /dev/null +++ b/setuptools/_distutils/_vendor/packaging/_parser.py @@ -0,0 +1,356 @@ +"""Handwritten parser of dependency specifiers. + +The docstring for each __parse_* function contains ENBF-inspired grammar representing +the implementation. +""" + +import ast +from typing import Any, List, NamedTuple, Optional, Tuple, Union + +from ._tokenizer import DEFAULT_RULES, Tokenizer + + +class Node: + def __init__(self, value: str) -> None: + self.value = value + + def __str__(self) -> str: + return self.value + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}('{self}')>" + + def serialize(self) -> str: + raise NotImplementedError + + +class Variable(Node): + def serialize(self) -> str: + return str(self) + + +class Value(Node): + def serialize(self) -> str: + return f'"{self}"' + + +class Op(Node): + def serialize(self) -> str: + return str(self) + + +MarkerVar = Union[Variable, Value] +MarkerItem = Tuple[MarkerVar, Op, MarkerVar] +# MarkerAtom = Union[MarkerItem, List["MarkerAtom"]] +# MarkerList = List[Union["MarkerList", MarkerAtom, str]] +# mypy does not support recursive type definition +# https://github.com/python/mypy/issues/731 +MarkerAtom = Any +MarkerList = List[Any] + + +class ParsedRequirement(NamedTuple): + name: str + url: str + extras: List[str] + specifier: str + marker: Optional[MarkerList] + + +# -------------------------------------------------------------------------------------- +# Recursive descent parser for dependency specifier +# -------------------------------------------------------------------------------------- +def parse_requirement(source: str) -> ParsedRequirement: + return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES)) + + +def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement: + """ + requirement = WS? IDENTIFIER WS? extras WS? requirement_details + """ + tokenizer.consume("WS") + + name_token = tokenizer.expect( + "IDENTIFIER", expected="package name at the start of dependency specifier" + ) + name = name_token.text + tokenizer.consume("WS") + + extras = _parse_extras(tokenizer) + tokenizer.consume("WS") + + url, specifier, marker = _parse_requirement_details(tokenizer) + tokenizer.expect("END", expected="end of dependency specifier") + + return ParsedRequirement(name, url, extras, specifier, marker) + + +def _parse_requirement_details( + tokenizer: Tokenizer, +) -> Tuple[str, str, Optional[MarkerList]]: + """ + requirement_details = AT URL (WS requirement_marker?)? + | specifier WS? (requirement_marker)? + """ + + specifier = "" + url = "" + marker = None + + if tokenizer.check("AT"): + tokenizer.read() + tokenizer.consume("WS") + + url_start = tokenizer.position + url = tokenizer.expect("URL", expected="URL after @").text + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + tokenizer.expect("WS", expected="whitespace after URL") + + # The input might end after whitespace. + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + marker = _parse_requirement_marker( + tokenizer, span_start=url_start, after="URL and whitespace" + ) + else: + specifier_start = tokenizer.position + specifier = _parse_specifier(tokenizer) + tokenizer.consume("WS") + + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + marker = _parse_requirement_marker( + tokenizer, + span_start=specifier_start, + after=( + "version specifier" + if specifier + else "name and no valid version specifier" + ), + ) + + return (url, specifier, marker) + + +def _parse_requirement_marker( + tokenizer: Tokenizer, *, span_start: int, after: str +) -> MarkerList: + """ + requirement_marker = SEMICOLON marker WS? + """ + + if not tokenizer.check("SEMICOLON"): + tokenizer.raise_syntax_error( + f"Expected end or semicolon (after {after})", + span_start=span_start, + ) + tokenizer.read() + + marker = _parse_marker(tokenizer) + tokenizer.consume("WS") + + return marker + + +def _parse_extras(tokenizer: Tokenizer) -> List[str]: + """ + extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)? + """ + if not tokenizer.check("LEFT_BRACKET", peek=True): + return [] + + with tokenizer.enclosing_tokens( + "LEFT_BRACKET", + "RIGHT_BRACKET", + around="extras", + ): + tokenizer.consume("WS") + extras = _parse_extras_list(tokenizer) + tokenizer.consume("WS") + + return extras + + +def _parse_extras_list(tokenizer: Tokenizer) -> List[str]: + """ + extras_list = identifier (wsp* ',' wsp* identifier)* + """ + extras: List[str] = [] + + if not tokenizer.check("IDENTIFIER"): + return extras + + extras.append(tokenizer.read().text) + + while True: + tokenizer.consume("WS") + if tokenizer.check("IDENTIFIER", peek=True): + tokenizer.raise_syntax_error("Expected comma between extra names") + elif not tokenizer.check("COMMA"): + break + + tokenizer.read() + tokenizer.consume("WS") + + extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma") + extras.append(extra_token.text) + + return extras + + +def _parse_specifier(tokenizer: Tokenizer) -> str: + """ + specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS + | WS? version_many WS? + """ + with tokenizer.enclosing_tokens( + "LEFT_PARENTHESIS", + "RIGHT_PARENTHESIS", + around="version specifier", + ): + tokenizer.consume("WS") + parsed_specifiers = _parse_version_many(tokenizer) + tokenizer.consume("WS") + + return parsed_specifiers + + +def _parse_version_many(tokenizer: Tokenizer) -> str: + """ + version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)? + """ + parsed_specifiers = "" + while tokenizer.check("SPECIFIER"): + span_start = tokenizer.position + parsed_specifiers += tokenizer.read().text + if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True): + tokenizer.raise_syntax_error( + ".* suffix can only be used with `==` or `!=` operators", + span_start=span_start, + span_end=tokenizer.position + 1, + ) + if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True): + tokenizer.raise_syntax_error( + "Local version label can only be used with `==` or `!=` operators", + span_start=span_start, + span_end=tokenizer.position, + ) + tokenizer.consume("WS") + if not tokenizer.check("COMMA"): + break + parsed_specifiers += tokenizer.read().text + tokenizer.consume("WS") + + return parsed_specifiers + + +# -------------------------------------------------------------------------------------- +# Recursive descent parser for marker expression +# -------------------------------------------------------------------------------------- +def parse_marker(source: str) -> MarkerList: + return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES)) + + +def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList: + retval = _parse_marker(tokenizer) + tokenizer.expect("END", expected="end of marker expression") + return retval + + +def _parse_marker(tokenizer: Tokenizer) -> MarkerList: + """ + marker = marker_atom (BOOLOP marker_atom)+ + """ + expression = [_parse_marker_atom(tokenizer)] + while tokenizer.check("BOOLOP"): + token = tokenizer.read() + expr_right = _parse_marker_atom(tokenizer) + expression.extend((token.text, expr_right)) + return expression + + +def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom: + """ + marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS? + | WS? marker_item WS? + """ + + tokenizer.consume("WS") + if tokenizer.check("LEFT_PARENTHESIS", peek=True): + with tokenizer.enclosing_tokens( + "LEFT_PARENTHESIS", + "RIGHT_PARENTHESIS", + around="marker expression", + ): + tokenizer.consume("WS") + marker: MarkerAtom = _parse_marker(tokenizer) + tokenizer.consume("WS") + else: + marker = _parse_marker_item(tokenizer) + tokenizer.consume("WS") + return marker + + +def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem: + """ + marker_item = WS? marker_var WS? marker_op WS? marker_var WS? + """ + tokenizer.consume("WS") + marker_var_left = _parse_marker_var(tokenizer) + tokenizer.consume("WS") + marker_op = _parse_marker_op(tokenizer) + tokenizer.consume("WS") + marker_var_right = _parse_marker_var(tokenizer) + tokenizer.consume("WS") + return (marker_var_left, marker_op, marker_var_right) + + +def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar: + """ + marker_var = VARIABLE | QUOTED_STRING + """ + if tokenizer.check("VARIABLE"): + return process_env_var(tokenizer.read().text.replace(".", "_")) + elif tokenizer.check("QUOTED_STRING"): + return process_python_str(tokenizer.read().text) + else: + tokenizer.raise_syntax_error( + message="Expected a marker variable or quoted string" + ) + + +def process_env_var(env_var: str) -> Variable: + if env_var in ("platform_python_implementation", "python_implementation"): + return Variable("platform_python_implementation") + else: + return Variable(env_var) + + +def process_python_str(python_str: str) -> Value: + value = ast.literal_eval(python_str) + return Value(str(value)) + + +def _parse_marker_op(tokenizer: Tokenizer) -> Op: + """ + marker_op = IN | NOT IN | OP + """ + if tokenizer.check("IN"): + tokenizer.read() + return Op("in") + elif tokenizer.check("NOT"): + tokenizer.read() + tokenizer.expect("WS", expected="whitespace after 'not'") + tokenizer.expect("IN", expected="'in' after 'not'") + return Op("not in") + elif tokenizer.check("OP"): + return Op(tokenizer.read().text) + else: + return tokenizer.raise_syntax_error( + "Expected marker operator, one of " + "<=, <, !=, ==, >=, >, ~=, ===, in, not in" + ) diff --git a/setuptools/_distutils/_vendor/packaging/_structures.py b/setuptools/_distutils/_vendor/packaging/_structures.py new file mode 100644 index 0000000000..90a6465f96 --- /dev/null +++ b/setuptools/_distutils/_vendor/packaging/_structures.py @@ -0,0 +1,61 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +class InfinityType: + def __repr__(self) -> str: + return "Infinity" + + def __hash__(self) -> int: + return hash(repr(self)) + + def __lt__(self, other: object) -> bool: + return False + + def __le__(self, other: object) -> bool: + return False + + def __eq__(self, other: object) -> bool: + return isinstance(other, self.__class__) + + def __gt__(self, other: object) -> bool: + return True + + def __ge__(self, other: object) -> bool: + return True + + def __neg__(self: object) -> "NegativeInfinityType": + return NegativeInfinity + + +Infinity = InfinityType() + + +class NegativeInfinityType: + def __repr__(self) -> str: + return "-Infinity" + + def __hash__(self) -> int: + return hash(repr(self)) + + def __lt__(self, other: object) -> bool: + return True + + def __le__(self, other: object) -> bool: + return True + + def __eq__(self, other: object) -> bool: + return isinstance(other, self.__class__) + + def __gt__(self, other: object) -> bool: + return False + + def __ge__(self, other: object) -> bool: + return False + + def __neg__(self: object) -> InfinityType: + return Infinity + + +NegativeInfinity = NegativeInfinityType() diff --git a/setuptools/_distutils/_vendor/packaging/_tokenizer.py b/setuptools/_distutils/_vendor/packaging/_tokenizer.py new file mode 100644 index 0000000000..dd0d648d49 --- /dev/null +++ b/setuptools/_distutils/_vendor/packaging/_tokenizer.py @@ -0,0 +1,192 @@ +import contextlib +import re +from dataclasses import dataclass +from typing import Dict, Iterator, NoReturn, Optional, Tuple, Union + +from .specifiers import Specifier + + +@dataclass +class Token: + name: str + text: str + position: int + + +class ParserSyntaxError(Exception): + """The provided source text could not be parsed correctly.""" + + def __init__( + self, + message: str, + *, + source: str, + span: Tuple[int, int], + ) -> None: + self.span = span + self.message = message + self.source = source + + super().__init__() + + def __str__(self) -> str: + marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^" + return "\n ".join([self.message, self.source, marker]) + + +DEFAULT_RULES: "Dict[str, Union[str, re.Pattern[str]]]" = { + "LEFT_PARENTHESIS": r"\(", + "RIGHT_PARENTHESIS": r"\)", + "LEFT_BRACKET": r"\[", + "RIGHT_BRACKET": r"\]", + "SEMICOLON": r";", + "COMMA": r",", + "QUOTED_STRING": re.compile( + r""" + ( + ('[^']*') + | + ("[^"]*") + ) + """, + re.VERBOSE, + ), + "OP": r"(===|==|~=|!=|<=|>=|<|>)", + "BOOLOP": r"\b(or|and)\b", + "IN": r"\bin\b", + "NOT": r"\bnot\b", + "VARIABLE": re.compile( + r""" + \b( + python_version + |python_full_version + |os[._]name + |sys[._]platform + |platform_(release|system) + |platform[._](version|machine|python_implementation) + |python_implementation + |implementation_(name|version) + |extra + )\b + """, + re.VERBOSE, + ), + "SPECIFIER": re.compile( + Specifier._operator_regex_str + Specifier._version_regex_str, + re.VERBOSE | re.IGNORECASE, + ), + "AT": r"\@", + "URL": r"[^ \t]+", + "IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b", + "VERSION_PREFIX_TRAIL": r"\.\*", + "VERSION_LOCAL_LABEL_TRAIL": r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*", + "WS": r"[ \t]+", + "END": r"$", +} + + +class Tokenizer: + """Context-sensitive token parsing. + + Provides methods to examine the input stream to check whether the next token + matches. + """ + + def __init__( + self, + source: str, + *, + rules: "Dict[str, Union[str, re.Pattern[str]]]", + ) -> None: + self.source = source + self.rules: Dict[str, re.Pattern[str]] = { + name: re.compile(pattern) for name, pattern in rules.items() + } + self.next_token: Optional[Token] = None + self.position = 0 + + def consume(self, name: str) -> None: + """Move beyond provided token name, if at current position.""" + if self.check(name): + self.read() + + def check(self, name: str, *, peek: bool = False) -> bool: + """Check whether the next token has the provided name. + + By default, if the check succeeds, the token *must* be read before + another check. If `peek` is set to `True`, the token is not loaded and + would need to be checked again. + """ + assert ( + self.next_token is None + ), f"Cannot check for {name!r}, already have {self.next_token!r}" + assert name in self.rules, f"Unknown token name: {name!r}" + + expression = self.rules[name] + + match = expression.match(self.source, self.position) + if match is None: + return False + if not peek: + self.next_token = Token(name, match[0], self.position) + return True + + def expect(self, name: str, *, expected: str) -> Token: + """Expect a certain token name next, failing with a syntax error otherwise. + + The token is *not* read. + """ + if not self.check(name): + raise self.raise_syntax_error(f"Expected {expected}") + return self.read() + + def read(self) -> Token: + """Consume the next token and return it.""" + token = self.next_token + assert token is not None + + self.position += len(token.text) + self.next_token = None + + return token + + def raise_syntax_error( + self, + message: str, + *, + span_start: Optional[int] = None, + span_end: Optional[int] = None, + ) -> NoReturn: + """Raise ParserSyntaxError at the given position.""" + span = ( + self.position if span_start is None else span_start, + self.position if span_end is None else span_end, + ) + raise ParserSyntaxError( + message, + source=self.source, + span=span, + ) + + @contextlib.contextmanager + def enclosing_tokens( + self, open_token: str, close_token: str, *, around: str + ) -> Iterator[None]: + if self.check(open_token): + open_position = self.position + self.read() + else: + open_position = None + + yield + + if open_position is None: + return + + if not self.check(close_token): + self.raise_syntax_error( + f"Expected matching {close_token} for {open_token}, after {around}", + span_start=open_position, + ) + + self.read() diff --git a/setuptools/_distutils/_vendor/packaging/markers.py b/setuptools/_distutils/_vendor/packaging/markers.py new file mode 100644 index 0000000000..8b98fca723 --- /dev/null +++ b/setuptools/_distutils/_vendor/packaging/markers.py @@ -0,0 +1,252 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import operator +import os +import platform +import sys +from typing import Any, Callable, Dict, List, Optional, Tuple, Union + +from ._parser import ( + MarkerAtom, + MarkerList, + Op, + Value, + Variable, + parse_marker as _parse_marker, +) +from ._tokenizer import ParserSyntaxError +from .specifiers import InvalidSpecifier, Specifier +from .utils import canonicalize_name + +__all__ = [ + "InvalidMarker", + "UndefinedComparison", + "UndefinedEnvironmentName", + "Marker", + "default_environment", +] + +Operator = Callable[[str, str], bool] + + +class InvalidMarker(ValueError): + """ + An invalid marker was found, users should refer to PEP 508. + """ + + +class UndefinedComparison(ValueError): + """ + An invalid operation was attempted on a value that doesn't support it. + """ + + +class UndefinedEnvironmentName(ValueError): + """ + A name was attempted to be used that does not exist inside of the + environment. + """ + + +def _normalize_extra_values(results: Any) -> Any: + """ + Normalize extra values. + """ + if isinstance(results[0], tuple): + lhs, op, rhs = results[0] + if isinstance(lhs, Variable) and lhs.value == "extra": + normalized_extra = canonicalize_name(rhs.value) + rhs = Value(normalized_extra) + elif isinstance(rhs, Variable) and rhs.value == "extra": + normalized_extra = canonicalize_name(lhs.value) + lhs = Value(normalized_extra) + results[0] = lhs, op, rhs + return results + + +def _format_marker( + marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True +) -> str: + + assert isinstance(marker, (list, tuple, str)) + + # Sometimes we have a structure like [[...]] which is a single item list + # where the single item is itself it's own list. In that case we want skip + # the rest of this function so that we don't get extraneous () on the + # outside. + if ( + isinstance(marker, list) + and len(marker) == 1 + and isinstance(marker[0], (list, tuple)) + ): + return _format_marker(marker[0]) + + if isinstance(marker, list): + inner = (_format_marker(m, first=False) for m in marker) + if first: + return " ".join(inner) + else: + return "(" + " ".join(inner) + ")" + elif isinstance(marker, tuple): + return " ".join([m.serialize() for m in marker]) + else: + return marker + + +_operators: Dict[str, Operator] = { + "in": lambda lhs, rhs: lhs in rhs, + "not in": lambda lhs, rhs: lhs not in rhs, + "<": operator.lt, + "<=": operator.le, + "==": operator.eq, + "!=": operator.ne, + ">=": operator.ge, + ">": operator.gt, +} + + +def _eval_op(lhs: str, op: Op, rhs: str) -> bool: + try: + spec = Specifier("".join([op.serialize(), rhs])) + except InvalidSpecifier: + pass + else: + return spec.contains(lhs, prereleases=True) + + oper: Optional[Operator] = _operators.get(op.serialize()) + if oper is None: + raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.") + + return oper(lhs, rhs) + + +def _normalize(*values: str, key: str) -> Tuple[str, ...]: + # PEP 685 – Comparison of extra names for optional distribution dependencies + # https://peps.python.org/pep-0685/ + # > When comparing extra names, tools MUST normalize the names being + # > compared using the semantics outlined in PEP 503 for names + if key == "extra": + return tuple(canonicalize_name(v) for v in values) + + # other environment markers don't have such standards + return values + + +def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool: + groups: List[List[bool]] = [[]] + + for marker in markers: + assert isinstance(marker, (list, tuple, str)) + + if isinstance(marker, list): + groups[-1].append(_evaluate_markers(marker, environment)) + elif isinstance(marker, tuple): + lhs, op, rhs = marker + + if isinstance(lhs, Variable): + environment_key = lhs.value + lhs_value = environment[environment_key] + rhs_value = rhs.value + else: + lhs_value = lhs.value + environment_key = rhs.value + rhs_value = environment[environment_key] + + lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key) + groups[-1].append(_eval_op(lhs_value, op, rhs_value)) + else: + assert marker in ["and", "or"] + if marker == "or": + groups.append([]) + + return any(all(item) for item in groups) + + +def format_full_version(info: "sys._version_info") -> str: + version = "{0.major}.{0.minor}.{0.micro}".format(info) + kind = info.releaselevel + if kind != "final": + version += kind[0] + str(info.serial) + return version + + +def default_environment() -> Dict[str, str]: + iver = format_full_version(sys.implementation.version) + implementation_name = sys.implementation.name + return { + "implementation_name": implementation_name, + "implementation_version": iver, + "os_name": os.name, + "platform_machine": platform.machine(), + "platform_release": platform.release(), + "platform_system": platform.system(), + "platform_version": platform.version(), + "python_full_version": platform.python_version(), + "platform_python_implementation": platform.python_implementation(), + "python_version": ".".join(platform.python_version_tuple()[:2]), + "sys_platform": sys.platform, + } + + +class Marker: + def __init__(self, marker: str) -> None: + # Note: We create a Marker object without calling this constructor in + # packaging.requirements.Requirement. If any additional logic is + # added here, make sure to mirror/adapt Requirement. + try: + self._markers = _normalize_extra_values(_parse_marker(marker)) + # The attribute `_markers` can be described in terms of a recursive type: + # MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]] + # + # For example, the following expression: + # python_version > "3.6" or (python_version == "3.6" and os_name == "unix") + # + # is parsed into: + # [ + # (, ')>, ), + # 'and', + # [ + # (, , ), + # 'or', + # (, , ) + # ] + # ] + except ParserSyntaxError as e: + raise InvalidMarker(str(e)) from e + + def __str__(self) -> str: + return _format_marker(self._markers) + + def __repr__(self) -> str: + return f"" + + def __hash__(self) -> int: + return hash((self.__class__.__name__, str(self))) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Marker): + return NotImplemented + + return str(self) == str(other) + + def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool: + """Evaluate a marker. + + Return the boolean from evaluating the given marker against the + environment. environment is an optional argument to override all or + part of the determined environment. + + The environment is determined from the current Python process. + """ + current_environment = default_environment() + current_environment["extra"] = "" + if environment is not None: + current_environment.update(environment) + # The API used to allow setting extra to None. We need to handle this + # case for backwards compatibility. + if current_environment["extra"] is None: + current_environment["extra"] = "" + + return _evaluate_markers(self._markers, current_environment) diff --git a/setuptools/_distutils/_vendor/packaging/metadata.py b/setuptools/_distutils/_vendor/packaging/metadata.py new file mode 100644 index 0000000000..fb27493079 --- /dev/null +++ b/setuptools/_distutils/_vendor/packaging/metadata.py @@ -0,0 +1,825 @@ +import email.feedparser +import email.header +import email.message +import email.parser +import email.policy +import sys +import typing +from typing import ( + Any, + Callable, + Dict, + Generic, + List, + Optional, + Tuple, + Type, + Union, + cast, +) + +from . import requirements, specifiers, utils, version as version_module + +T = typing.TypeVar("T") +if sys.version_info[:2] >= (3, 8): # pragma: no cover + from typing import Literal, TypedDict +else: # pragma: no cover + if typing.TYPE_CHECKING: + from typing_extensions import Literal, TypedDict + else: + try: + from typing_extensions import Literal, TypedDict + except ImportError: + + class Literal: + def __init_subclass__(*_args, **_kwargs): + pass + + class TypedDict: + def __init_subclass__(*_args, **_kwargs): + pass + + +try: + ExceptionGroup +except NameError: # pragma: no cover + + class ExceptionGroup(Exception): # noqa: N818 + """A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11. + + If :external:exc:`ExceptionGroup` is already defined by Python itself, + that version is used instead. + """ + + message: str + exceptions: List[Exception] + + def __init__(self, message: str, exceptions: List[Exception]) -> None: + self.message = message + self.exceptions = exceptions + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})" + +else: # pragma: no cover + ExceptionGroup = ExceptionGroup + + +class InvalidMetadata(ValueError): + """A metadata field contains invalid data.""" + + field: str + """The name of the field that contains invalid data.""" + + def __init__(self, field: str, message: str) -> None: + self.field = field + super().__init__(message) + + +# The RawMetadata class attempts to make as few assumptions about the underlying +# serialization formats as possible. The idea is that as long as a serialization +# formats offer some very basic primitives in *some* way then we can support +# serializing to and from that format. +class RawMetadata(TypedDict, total=False): + """A dictionary of raw core metadata. + + Each field in core metadata maps to a key of this dictionary (when data is + provided). The key is lower-case and underscores are used instead of dashes + compared to the equivalent core metadata field. Any core metadata field that + can be specified multiple times or can hold multiple values in a single + field have a key with a plural name. See :class:`Metadata` whose attributes + match the keys of this dictionary. + + Core metadata fields that can be specified multiple times are stored as a + list or dict depending on which is appropriate for the field. Any fields + which hold multiple values in a single field are stored as a list. + + """ + + # Metadata 1.0 - PEP 241 + metadata_version: str + name: str + version: str + platforms: List[str] + summary: str + description: str + keywords: List[str] + home_page: str + author: str + author_email: str + license: str + + # Metadata 1.1 - PEP 314 + supported_platforms: List[str] + download_url: str + classifiers: List[str] + requires: List[str] + provides: List[str] + obsoletes: List[str] + + # Metadata 1.2 - PEP 345 + maintainer: str + maintainer_email: str + requires_dist: List[str] + provides_dist: List[str] + obsoletes_dist: List[str] + requires_python: str + requires_external: List[str] + project_urls: Dict[str, str] + + # Metadata 2.0 + # PEP 426 attempted to completely revamp the metadata format + # but got stuck without ever being able to build consensus on + # it and ultimately ended up withdrawn. + # + # However, a number of tools had started emitting METADATA with + # `2.0` Metadata-Version, so for historical reasons, this version + # was skipped. + + # Metadata 2.1 - PEP 566 + description_content_type: str + provides_extra: List[str] + + # Metadata 2.2 - PEP 643 + dynamic: List[str] + + # Metadata 2.3 - PEP 685 + # No new fields were added in PEP 685, just some edge case were + # tightened up to provide better interoptability. + + +_STRING_FIELDS = { + "author", + "author_email", + "description", + "description_content_type", + "download_url", + "home_page", + "license", + "maintainer", + "maintainer_email", + "metadata_version", + "name", + "requires_python", + "summary", + "version", +} + +_LIST_FIELDS = { + "classifiers", + "dynamic", + "obsoletes", + "obsoletes_dist", + "platforms", + "provides", + "provides_dist", + "provides_extra", + "requires", + "requires_dist", + "requires_external", + "supported_platforms", +} + +_DICT_FIELDS = { + "project_urls", +} + + +def _parse_keywords(data: str) -> List[str]: + """Split a string of comma-separate keyboards into a list of keywords.""" + return [k.strip() for k in data.split(",")] + + +def _parse_project_urls(data: List[str]) -> Dict[str, str]: + """Parse a list of label/URL string pairings separated by a comma.""" + urls = {} + for pair in data: + # Our logic is slightly tricky here as we want to try and do + # *something* reasonable with malformed data. + # + # The main thing that we have to worry about, is data that does + # not have a ',' at all to split the label from the Value. There + # isn't a singular right answer here, and we will fail validation + # later on (if the caller is validating) so it doesn't *really* + # matter, but since the missing value has to be an empty str + # and our return value is dict[str, str], if we let the key + # be the missing value, then they'd have multiple '' values that + # overwrite each other in a accumulating dict. + # + # The other potentional issue is that it's possible to have the + # same label multiple times in the metadata, with no solid "right" + # answer with what to do in that case. As such, we'll do the only + # thing we can, which is treat the field as unparseable and add it + # to our list of unparsed fields. + parts = [p.strip() for p in pair.split(",", 1)] + parts.extend([""] * (max(0, 2 - len(parts)))) # Ensure 2 items + + # TODO: The spec doesn't say anything about if the keys should be + # considered case sensitive or not... logically they should + # be case-preserving and case-insensitive, but doing that + # would open up more cases where we might have duplicate + # entries. + label, url = parts + if label in urls: + # The label already exists in our set of urls, so this field + # is unparseable, and we can just add the whole thing to our + # unparseable data and stop processing it. + raise KeyError("duplicate labels in project urls") + urls[label] = url + + return urls + + +def _get_payload(msg: email.message.Message, source: Union[bytes, str]) -> str: + """Get the body of the message.""" + # If our source is a str, then our caller has managed encodings for us, + # and we don't need to deal with it. + if isinstance(source, str): + payload: str = msg.get_payload() + return payload + # If our source is a bytes, then we're managing the encoding and we need + # to deal with it. + else: + bpayload: bytes = msg.get_payload(decode=True) + try: + return bpayload.decode("utf8", "strict") + except UnicodeDecodeError: + raise ValueError("payload in an invalid encoding") + + +# The various parse_FORMAT functions here are intended to be as lenient as +# possible in their parsing, while still returning a correctly typed +# RawMetadata. +# +# To aid in this, we also generally want to do as little touching of the +# data as possible, except where there are possibly some historic holdovers +# that make valid data awkward to work with. +# +# While this is a lower level, intermediate format than our ``Metadata`` +# class, some light touch ups can make a massive difference in usability. + +# Map METADATA fields to RawMetadata. +_EMAIL_TO_RAW_MAPPING = { + "author": "author", + "author-email": "author_email", + "classifier": "classifiers", + "description": "description", + "description-content-type": "description_content_type", + "download-url": "download_url", + "dynamic": "dynamic", + "home-page": "home_page", + "keywords": "keywords", + "license": "license", + "maintainer": "maintainer", + "maintainer-email": "maintainer_email", + "metadata-version": "metadata_version", + "name": "name", + "obsoletes": "obsoletes", + "obsoletes-dist": "obsoletes_dist", + "platform": "platforms", + "project-url": "project_urls", + "provides": "provides", + "provides-dist": "provides_dist", + "provides-extra": "provides_extra", + "requires": "requires", + "requires-dist": "requires_dist", + "requires-external": "requires_external", + "requires-python": "requires_python", + "summary": "summary", + "supported-platform": "supported_platforms", + "version": "version", +} +_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()} + + +def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[str]]]: + """Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``). + + This function returns a two-item tuple of dicts. The first dict is of + recognized fields from the core metadata specification. Fields that can be + parsed and translated into Python's built-in types are converted + appropriately. All other fields are left as-is. Fields that are allowed to + appear multiple times are stored as lists. + + The second dict contains all other fields from the metadata. This includes + any unrecognized fields. It also includes any fields which are expected to + be parsed into a built-in type but were not formatted appropriately. Finally, + any fields that are expected to appear only once but are repeated are + included in this dict. + + """ + raw: Dict[str, Union[str, List[str], Dict[str, str]]] = {} + unparsed: Dict[str, List[str]] = {} + + if isinstance(data, str): + parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data) + else: + parsed = email.parser.BytesParser(policy=email.policy.compat32).parsebytes(data) + + # We have to wrap parsed.keys() in a set, because in the case of multiple + # values for a key (a list), the key will appear multiple times in the + # list of keys, but we're avoiding that by using get_all(). + for name in frozenset(parsed.keys()): + # Header names in RFC are case insensitive, so we'll normalize to all + # lower case to make comparisons easier. + name = name.lower() + + # We use get_all() here, even for fields that aren't multiple use, + # because otherwise someone could have e.g. two Name fields, and we + # would just silently ignore it rather than doing something about it. + headers = parsed.get_all(name) or [] + + # The way the email module works when parsing bytes is that it + # unconditionally decodes the bytes as ascii using the surrogateescape + # handler. When you pull that data back out (such as with get_all() ), + # it looks to see if the str has any surrogate escapes, and if it does + # it wraps it in a Header object instead of returning the string. + # + # As such, we'll look for those Header objects, and fix up the encoding. + value = [] + # Flag if we have run into any issues processing the headers, thus + # signalling that the data belongs in 'unparsed'. + valid_encoding = True + for h in headers: + # It's unclear if this can return more types than just a Header or + # a str, so we'll just assert here to make sure. + assert isinstance(h, (email.header.Header, str)) + + # If it's a header object, we need to do our little dance to get + # the real data out of it. In cases where there is invalid data + # we're going to end up with mojibake, but there's no obvious, good + # way around that without reimplementing parts of the Header object + # ourselves. + # + # That should be fine since, if mojibacked happens, this key is + # going into the unparsed dict anyways. + if isinstance(h, email.header.Header): + # The Header object stores it's data as chunks, and each chunk + # can be independently encoded, so we'll need to check each + # of them. + chunks: List[Tuple[bytes, Optional[str]]] = [] + for bin, encoding in email.header.decode_header(h): + try: + bin.decode("utf8", "strict") + except UnicodeDecodeError: + # Enable mojibake. + encoding = "latin1" + valid_encoding = False + else: + encoding = "utf8" + chunks.append((bin, encoding)) + + # Turn our chunks back into a Header object, then let that + # Header object do the right thing to turn them into a + # string for us. + value.append(str(email.header.make_header(chunks))) + # This is already a string, so just add it. + else: + value.append(h) + + # We've processed all of our values to get them into a list of str, + # but we may have mojibake data, in which case this is an unparsed + # field. + if not valid_encoding: + unparsed[name] = value + continue + + raw_name = _EMAIL_TO_RAW_MAPPING.get(name) + if raw_name is None: + # This is a bit of a weird situation, we've encountered a key that + # we don't know what it means, so we don't know whether it's meant + # to be a list or not. + # + # Since we can't really tell one way or another, we'll just leave it + # as a list, even though it may be a single item list, because that's + # what makes the most sense for email headers. + unparsed[name] = value + continue + + # If this is one of our string fields, then we'll check to see if our + # value is a list of a single item. If it is then we'll assume that + # it was emitted as a single string, and unwrap the str from inside + # the list. + # + # If it's any other kind of data, then we haven't the faintest clue + # what we should parse it as, and we have to just add it to our list + # of unparsed stuff. + if raw_name in _STRING_FIELDS and len(value) == 1: + raw[raw_name] = value[0] + # If this is one of our list of string fields, then we can just assign + # the value, since email *only* has strings, and our get_all() call + # above ensures that this is a list. + elif raw_name in _LIST_FIELDS: + raw[raw_name] = value + # Special Case: Keywords + # The keywords field is implemented in the metadata spec as a str, + # but it conceptually is a list of strings, and is serialized using + # ", ".join(keywords), so we'll do some light data massaging to turn + # this into what it logically is. + elif raw_name == "keywords" and len(value) == 1: + raw[raw_name] = _parse_keywords(value[0]) + # Special Case: Project-URL + # The project urls is implemented in the metadata spec as a list of + # specially-formatted strings that represent a key and a value, which + # is fundamentally a mapping, however the email format doesn't support + # mappings in a sane way, so it was crammed into a list of strings + # instead. + # + # We will do a little light data massaging to turn this into a map as + # it logically should be. + elif raw_name == "project_urls": + try: + raw[raw_name] = _parse_project_urls(value) + except KeyError: + unparsed[name] = value + # Nothing that we've done has managed to parse this, so it'll just + # throw it in our unparseable data and move on. + else: + unparsed[name] = value + + # We need to support getting the Description from the message payload in + # addition to getting it from the the headers. This does mean, though, there + # is the possibility of it being set both ways, in which case we put both + # in 'unparsed' since we don't know which is right. + try: + payload = _get_payload(parsed, data) + except ValueError: + unparsed.setdefault("description", []).append( + parsed.get_payload(decode=isinstance(data, bytes)) + ) + else: + if payload: + # Check to see if we've already got a description, if so then both + # it, and this body move to unparseable. + if "description" in raw: + description_header = cast(str, raw.pop("description")) + unparsed.setdefault("description", []).extend( + [description_header, payload] + ) + elif "description" in unparsed: + unparsed["description"].append(payload) + else: + raw["description"] = payload + + # We need to cast our `raw` to a metadata, because a TypedDict only support + # literal key names, but we're computing our key names on purpose, but the + # way this function is implemented, our `TypedDict` can only have valid key + # names. + return cast(RawMetadata, raw), unparsed + + +_NOT_FOUND = object() + + +# Keep the two values in sync. +_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"] +_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"] + +_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"]) + + +class _Validator(Generic[T]): + """Validate a metadata field. + + All _process_*() methods correspond to a core metadata field. The method is + called with the field's raw value. If the raw value is valid it is returned + in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field). + If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause + as appropriate). + """ + + name: str + raw_name: str + added: _MetadataVersion + + def __init__( + self, + *, + added: _MetadataVersion = "1.0", + ) -> None: + self.added = added + + def __set_name__(self, _owner: "Metadata", name: str) -> None: + self.name = name + self.raw_name = _RAW_TO_EMAIL_MAPPING[name] + + def __get__(self, instance: "Metadata", _owner: Type["Metadata"]) -> T: + # With Python 3.8, the caching can be replaced with functools.cached_property(). + # No need to check the cache as attribute lookup will resolve into the + # instance's __dict__ before __get__ is called. + cache = instance.__dict__ + value = instance._raw.get(self.name) + + # To make the _process_* methods easier, we'll check if the value is None + # and if this field is NOT a required attribute, and if both of those + # things are true, we'll skip the the converter. This will mean that the + # converters never have to deal with the None union. + if self.name in _REQUIRED_ATTRS or value is not None: + try: + converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}") + except AttributeError: + pass + else: + value = converter(value) + + cache[self.name] = value + try: + del instance._raw[self.name] # type: ignore[misc] + except KeyError: + pass + + return cast(T, value) + + def _invalid_metadata( + self, msg: str, cause: Optional[Exception] = None + ) -> InvalidMetadata: + exc = InvalidMetadata( + self.raw_name, msg.format_map({"field": repr(self.raw_name)}) + ) + exc.__cause__ = cause + return exc + + def _process_metadata_version(self, value: str) -> _MetadataVersion: + # Implicitly makes Metadata-Version required. + if value not in _VALID_METADATA_VERSIONS: + raise self._invalid_metadata(f"{value!r} is not a valid metadata version") + return cast(_MetadataVersion, value) + + def _process_name(self, value: str) -> str: + if not value: + raise self._invalid_metadata("{field} is a required field") + # Validate the name as a side-effect. + try: + utils.canonicalize_name(value, validate=True) + except utils.InvalidName as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) + else: + return value + + def _process_version(self, value: str) -> version_module.Version: + if not value: + raise self._invalid_metadata("{field} is a required field") + try: + return version_module.parse(value) + except version_module.InvalidVersion as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) + + def _process_summary(self, value: str) -> str: + """Check the field contains no newlines.""" + if "\n" in value: + raise self._invalid_metadata("{field} must be a single line") + return value + + def _process_description_content_type(self, value: str) -> str: + content_types = {"text/plain", "text/x-rst", "text/markdown"} + message = email.message.EmailMessage() + message["content-type"] = value + + content_type, parameters = ( + # Defaults to `text/plain` if parsing failed. + message.get_content_type().lower(), + message["content-type"].params, + ) + # Check if content-type is valid or defaulted to `text/plain` and thus was + # not parseable. + if content_type not in content_types or content_type not in value.lower(): + raise self._invalid_metadata( + f"{{field}} must be one of {list(content_types)}, not {value!r}" + ) + + charset = parameters.get("charset", "UTF-8") + if charset != "UTF-8": + raise self._invalid_metadata( + f"{{field}} can only specify the UTF-8 charset, not {list(charset)}" + ) + + markdown_variants = {"GFM", "CommonMark"} + variant = parameters.get("variant", "GFM") # Use an acceptable default. + if content_type == "text/markdown" and variant not in markdown_variants: + raise self._invalid_metadata( + f"valid Markdown variants for {{field}} are {list(markdown_variants)}, " + f"not {variant!r}", + ) + return value + + def _process_dynamic(self, value: List[str]) -> List[str]: + for dynamic_field in map(str.lower, value): + if dynamic_field in {"name", "version", "metadata-version"}: + raise self._invalid_metadata( + f"{value!r} is not allowed as a dynamic field" + ) + elif dynamic_field not in _EMAIL_TO_RAW_MAPPING: + raise self._invalid_metadata(f"{value!r} is not a valid dynamic field") + return list(map(str.lower, value)) + + def _process_provides_extra( + self, + value: List[str], + ) -> List[utils.NormalizedName]: + normalized_names = [] + try: + for name in value: + normalized_names.append(utils.canonicalize_name(name, validate=True)) + except utils.InvalidName as exc: + raise self._invalid_metadata( + f"{name!r} is invalid for {{field}}", cause=exc + ) + else: + return normalized_names + + def _process_requires_python(self, value: str) -> specifiers.SpecifierSet: + try: + return specifiers.SpecifierSet(value) + except specifiers.InvalidSpecifier as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) + + def _process_requires_dist( + self, + value: List[str], + ) -> List[requirements.Requirement]: + reqs = [] + try: + for req in value: + reqs.append(requirements.Requirement(req)) + except requirements.InvalidRequirement as exc: + raise self._invalid_metadata(f"{req!r} is invalid for {{field}}", cause=exc) + else: + return reqs + + +class Metadata: + """Representation of distribution metadata. + + Compared to :class:`RawMetadata`, this class provides objects representing + metadata fields instead of only using built-in types. Any invalid metadata + will cause :exc:`InvalidMetadata` to be raised (with a + :py:attr:`~BaseException.__cause__` attribute as appropriate). + """ + + _raw: RawMetadata + + @classmethod + def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata": + """Create an instance from :class:`RawMetadata`. + + If *validate* is true, all metadata will be validated. All exceptions + related to validation will be gathered and raised as an :class:`ExceptionGroup`. + """ + ins = cls() + ins._raw = data.copy() # Mutations occur due to caching enriched values. + + if validate: + exceptions: List[Exception] = [] + try: + metadata_version = ins.metadata_version + metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version) + except InvalidMetadata as metadata_version_exc: + exceptions.append(metadata_version_exc) + metadata_version = None + + # Make sure to check for the fields that are present, the required + # fields (so their absence can be reported). + fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS + # Remove fields that have already been checked. + fields_to_check -= {"metadata_version"} + + for key in fields_to_check: + try: + if metadata_version: + # Can't use getattr() as that triggers descriptor protocol which + # will fail due to no value for the instance argument. + try: + field_metadata_version = cls.__dict__[key].added + except KeyError: + exc = InvalidMetadata(key, f"unrecognized field: {key!r}") + exceptions.append(exc) + continue + field_age = _VALID_METADATA_VERSIONS.index( + field_metadata_version + ) + if field_age > metadata_age: + field = _RAW_TO_EMAIL_MAPPING[key] + exc = InvalidMetadata( + field, + "{field} introduced in metadata version " + "{field_metadata_version}, not {metadata_version}", + ) + exceptions.append(exc) + continue + getattr(ins, key) + except InvalidMetadata as exc: + exceptions.append(exc) + + if exceptions: + raise ExceptionGroup("invalid metadata", exceptions) + + return ins + + @classmethod + def from_email( + cls, data: Union[bytes, str], *, validate: bool = True + ) -> "Metadata": + """Parse metadata from email headers. + + If *validate* is true, the metadata will be validated. All exceptions + related to validation will be gathered and raised as an :class:`ExceptionGroup`. + """ + raw, unparsed = parse_email(data) + + if validate: + exceptions: list[Exception] = [] + for unparsed_key in unparsed: + if unparsed_key in _EMAIL_TO_RAW_MAPPING: + message = f"{unparsed_key!r} has invalid data" + else: + message = f"unrecognized field: {unparsed_key!r}" + exceptions.append(InvalidMetadata(unparsed_key, message)) + + if exceptions: + raise ExceptionGroup("unparsed", exceptions) + + try: + return cls.from_raw(raw, validate=validate) + except ExceptionGroup as exc_group: + raise ExceptionGroup( + "invalid or unparsed metadata", exc_group.exceptions + ) from None + + metadata_version: _Validator[_MetadataVersion] = _Validator() + """:external:ref:`core-metadata-metadata-version` + (required; validated to be a valid metadata version)""" + name: _Validator[str] = _Validator() + """:external:ref:`core-metadata-name` + (required; validated using :func:`~packaging.utils.canonicalize_name` and its + *validate* parameter)""" + version: _Validator[version_module.Version] = _Validator() + """:external:ref:`core-metadata-version` (required)""" + dynamic: _Validator[Optional[List[str]]] = _Validator( + added="2.2", + ) + """:external:ref:`core-metadata-dynamic` + (validated against core metadata field names and lowercased)""" + platforms: _Validator[Optional[List[str]]] = _Validator() + """:external:ref:`core-metadata-platform`""" + supported_platforms: _Validator[Optional[List[str]]] = _Validator(added="1.1") + """:external:ref:`core-metadata-supported-platform`""" + summary: _Validator[Optional[str]] = _Validator() + """:external:ref:`core-metadata-summary` (validated to contain no newlines)""" + description: _Validator[Optional[str]] = _Validator() # TODO 2.1: can be in body + """:external:ref:`core-metadata-description`""" + description_content_type: _Validator[Optional[str]] = _Validator(added="2.1") + """:external:ref:`core-metadata-description-content-type` (validated)""" + keywords: _Validator[Optional[List[str]]] = _Validator() + """:external:ref:`core-metadata-keywords`""" + home_page: _Validator[Optional[str]] = _Validator() + """:external:ref:`core-metadata-home-page`""" + download_url: _Validator[Optional[str]] = _Validator(added="1.1") + """:external:ref:`core-metadata-download-url`""" + author: _Validator[Optional[str]] = _Validator() + """:external:ref:`core-metadata-author`""" + author_email: _Validator[Optional[str]] = _Validator() + """:external:ref:`core-metadata-author-email`""" + maintainer: _Validator[Optional[str]] = _Validator(added="1.2") + """:external:ref:`core-metadata-maintainer`""" + maintainer_email: _Validator[Optional[str]] = _Validator(added="1.2") + """:external:ref:`core-metadata-maintainer-email`""" + license: _Validator[Optional[str]] = _Validator() + """:external:ref:`core-metadata-license`""" + classifiers: _Validator[Optional[List[str]]] = _Validator(added="1.1") + """:external:ref:`core-metadata-classifier`""" + requires_dist: _Validator[Optional[List[requirements.Requirement]]] = _Validator( + added="1.2" + ) + """:external:ref:`core-metadata-requires-dist`""" + requires_python: _Validator[Optional[specifiers.SpecifierSet]] = _Validator( + added="1.2" + ) + """:external:ref:`core-metadata-requires-python`""" + # Because `Requires-External` allows for non-PEP 440 version specifiers, we + # don't do any processing on the values. + requires_external: _Validator[Optional[List[str]]] = _Validator(added="1.2") + """:external:ref:`core-metadata-requires-external`""" + project_urls: _Validator[Optional[Dict[str, str]]] = _Validator(added="1.2") + """:external:ref:`core-metadata-project-url`""" + # PEP 685 lets us raise an error if an extra doesn't pass `Name` validation + # regardless of metadata version. + provides_extra: _Validator[Optional[List[utils.NormalizedName]]] = _Validator( + added="2.1", + ) + """:external:ref:`core-metadata-provides-extra`""" + provides_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2") + """:external:ref:`core-metadata-provides-dist`""" + obsoletes_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2") + """:external:ref:`core-metadata-obsoletes-dist`""" + requires: _Validator[Optional[List[str]]] = _Validator(added="1.1") + """``Requires`` (deprecated)""" + provides: _Validator[Optional[List[str]]] = _Validator(added="1.1") + """``Provides`` (deprecated)""" + obsoletes: _Validator[Optional[List[str]]] = _Validator(added="1.1") + """``Obsoletes`` (deprecated)""" diff --git a/setuptools/_distutils/_vendor/packaging/py.typed b/setuptools/_distutils/_vendor/packaging/py.typed new file mode 100644 index 0000000000..e69de29bb2 diff --git a/setuptools/_distutils/_vendor/packaging/requirements.py b/setuptools/_distutils/_vendor/packaging/requirements.py new file mode 100644 index 0000000000..bdc43a7e98 --- /dev/null +++ b/setuptools/_distutils/_vendor/packaging/requirements.py @@ -0,0 +1,90 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from typing import Any, Iterator, Optional, Set + +from ._parser import parse_requirement as _parse_requirement +from ._tokenizer import ParserSyntaxError +from .markers import Marker, _normalize_extra_values +from .specifiers import SpecifierSet +from .utils import canonicalize_name + + +class InvalidRequirement(ValueError): + """ + An invalid requirement was found, users should refer to PEP 508. + """ + + +class Requirement: + """Parse a requirement. + + Parse a given requirement string into its parts, such as name, specifier, + URL, and extras. Raises InvalidRequirement on a badly-formed requirement + string. + """ + + # TODO: Can we test whether something is contained within a requirement? + # If so how do we do that? Do we need to test against the _name_ of + # the thing as well as the version? What about the markers? + # TODO: Can we normalize the name and extra name? + + def __init__(self, requirement_string: str) -> None: + try: + parsed = _parse_requirement(requirement_string) + except ParserSyntaxError as e: + raise InvalidRequirement(str(e)) from e + + self.name: str = parsed.name + self.url: Optional[str] = parsed.url or None + self.extras: Set[str] = set(parsed.extras or []) + self.specifier: SpecifierSet = SpecifierSet(parsed.specifier) + self.marker: Optional[Marker] = None + if parsed.marker is not None: + self.marker = Marker.__new__(Marker) + self.marker._markers = _normalize_extra_values(parsed.marker) + + def _iter_parts(self, name: str) -> Iterator[str]: + yield name + + if self.extras: + formatted_extras = ",".join(sorted(self.extras)) + yield f"[{formatted_extras}]" + + if self.specifier: + yield str(self.specifier) + + if self.url: + yield f"@ {self.url}" + if self.marker: + yield " " + + if self.marker: + yield f"; {self.marker}" + + def __str__(self) -> str: + return "".join(self._iter_parts(self.name)) + + def __repr__(self) -> str: + return f"" + + def __hash__(self) -> int: + return hash( + ( + self.__class__.__name__, + *self._iter_parts(canonicalize_name(self.name)), + ) + ) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Requirement): + return NotImplemented + + return ( + canonicalize_name(self.name) == canonicalize_name(other.name) + and self.extras == other.extras + and self.specifier == other.specifier + and self.url == other.url + and self.marker == other.marker + ) diff --git a/setuptools/_distutils/_vendor/packaging/specifiers.py b/setuptools/_distutils/_vendor/packaging/specifiers.py new file mode 100644 index 0000000000..2d015bab59 --- /dev/null +++ b/setuptools/_distutils/_vendor/packaging/specifiers.py @@ -0,0 +1,1017 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +""" +.. testsetup:: + + from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier + from packaging.version import Version +""" + +import abc +import itertools +import re +from typing import Callable, Iterable, Iterator, List, Optional, Tuple, TypeVar, Union + +from .utils import canonicalize_version +from .version import Version + +UnparsedVersion = Union[Version, str] +UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion) +CallableOperator = Callable[[Version, str], bool] + + +def _coerce_version(version: UnparsedVersion) -> Version: + if not isinstance(version, Version): + version = Version(version) + return version + + +class InvalidSpecifier(ValueError): + """ + Raised when attempting to create a :class:`Specifier` with a specifier + string that is invalid. + + >>> Specifier("lolwat") + Traceback (most recent call last): + ... + packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat' + """ + + +class BaseSpecifier(metaclass=abc.ABCMeta): + @abc.abstractmethod + def __str__(self) -> str: + """ + Returns the str representation of this Specifier-like object. This + should be representative of the Specifier itself. + """ + + @abc.abstractmethod + def __hash__(self) -> int: + """ + Returns a hash value for this Specifier-like object. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Returns a boolean representing whether or not the two Specifier-like + objects are equal. + + :param other: The other object to check against. + """ + + @property + @abc.abstractmethod + def prereleases(self) -> Optional[bool]: + """Whether or not pre-releases as a whole are allowed. + + This can be set to either ``True`` or ``False`` to explicitly enable or disable + prereleases or it can be set to ``None`` (the default) to use default semantics. + """ + + @prereleases.setter + def prereleases(self, value: bool) -> None: + """Setter for :attr:`prereleases`. + + :param value: The value to set. + """ + + @abc.abstractmethod + def contains(self, item: str, prereleases: Optional[bool] = None) -> bool: + """ + Determines if the given item is contained within this specifier. + """ + + @abc.abstractmethod + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None + ) -> Iterator[UnparsedVersionVar]: + """ + Takes an iterable of items and filters them so that only items which + are contained within this specifier are allowed in it. + """ + + +class Specifier(BaseSpecifier): + """This class abstracts handling of version specifiers. + + .. tip:: + + It is generally not required to instantiate this manually. You should instead + prefer to work with :class:`SpecifierSet` instead, which can parse + comma-separated version specifiers (which is what package metadata contains). + """ + + _operator_regex_str = r""" + (?P(~=|==|!=|<=|>=|<|>|===)) + """ + _version_regex_str = r""" + (?P + (?: + # The identity operators allow for an escape hatch that will + # do an exact string match of the version you wish to install. + # This will not be parsed by PEP 440 and we cannot determine + # any semantic meaning from it. This operator is discouraged + # but included entirely as an escape hatch. + (?<====) # Only match for the identity operator + \s* + [^\s;)]* # The arbitrary version can be just about anything, + # we match everything except for whitespace, a + # semi-colon for marker support, and a closing paren + # since versions can be enclosed in them. + ) + | + (?: + # The (non)equality operators allow for wild card and local + # versions to be specified so we have to define these two + # operators separately to enable that. + (?<===|!=) # Only match for equals and not equals + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + + # You cannot use a wild card and a pre-release, post-release, a dev or + # local version together so group them with a | and make them optional. + (?: + \.\* # Wild card syntax of .* + | + (?: # pre release + [-_\.]? + (alpha|beta|preview|pre|a|b|c|rc) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local + )? + ) + | + (?: + # The compatible operator requires at least two digits in the + # release segment. + (?<=~=) # Only match for the compatible operator + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) + (?: # pre release + [-_\.]? + (alpha|beta|preview|pre|a|b|c|rc) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + | + (?: + # All other operators only allow a sub set of what the + # (non)equality operators do. Specifically they do not allow + # local versions to be specified nor do they allow the prefix + # matching wild cards. + (?=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + "===": "arbitrary", + } + + def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: + """Initialize a Specifier instance. + + :param spec: + The string representation of a specifier which will be parsed and + normalized before use. + :param prereleases: + This tells the specifier if it should accept prerelease versions if + applicable or not. The default of ``None`` will autodetect it from the + given specifiers. + :raises InvalidSpecifier: + If the given specifier is invalid (i.e. bad syntax). + """ + match = self._regex.search(spec) + if not match: + raise InvalidSpecifier(f"Invalid specifier: '{spec}'") + + self._spec: Tuple[str, str] = ( + match.group("operator").strip(), + match.group("version").strip(), + ) + + # Store whether or not this Specifier should accept prereleases + self._prereleases = prereleases + + # https://github.com/python/mypy/pull/13475#pullrequestreview-1079784515 + @property # type: ignore[override] + def prereleases(self) -> bool: + # If there is an explicit prereleases set for this, then we'll just + # blindly use that. + if self._prereleases is not None: + return self._prereleases + + # Look at all of our specifiers and determine if they are inclusive + # operators, and if they are if they are including an explicit + # prerelease. + operator, version = self._spec + if operator in ["==", ">=", "<=", "~=", "==="]: + # The == specifier can include a trailing .*, if it does we + # want to remove before parsing. + if operator == "==" and version.endswith(".*"): + version = version[:-2] + + # Parse the version, and if it is a pre-release than this + # specifier allows pre-releases. + if Version(version).is_prerelease: + return True + + return False + + @prereleases.setter + def prereleases(self, value: bool) -> None: + self._prereleases = value + + @property + def operator(self) -> str: + """The operator of this specifier. + + >>> Specifier("==1.2.3").operator + '==' + """ + return self._spec[0] + + @property + def version(self) -> str: + """The version of this specifier. + + >>> Specifier("==1.2.3").version + '1.2.3' + """ + return self._spec[1] + + def __repr__(self) -> str: + """A representation of the Specifier that shows all internal state. + + >>> Specifier('>=1.0.0') + =1.0.0')> + >>> Specifier('>=1.0.0', prereleases=False) + =1.0.0', prereleases=False)> + >>> Specifier('>=1.0.0', prereleases=True) + =1.0.0', prereleases=True)> + """ + pre = ( + f", prereleases={self.prereleases!r}" + if self._prereleases is not None + else "" + ) + + return f"<{self.__class__.__name__}({str(self)!r}{pre})>" + + def __str__(self) -> str: + """A string representation of the Specifier that can be round-tripped. + + >>> str(Specifier('>=1.0.0')) + '>=1.0.0' + >>> str(Specifier('>=1.0.0', prereleases=False)) + '>=1.0.0' + """ + return "{}{}".format(*self._spec) + + @property + def _canonical_spec(self) -> Tuple[str, str]: + canonical_version = canonicalize_version( + self._spec[1], + strip_trailing_zero=(self._spec[0] != "~="), + ) + return self._spec[0], canonical_version + + def __hash__(self) -> int: + return hash(self._canonical_spec) + + def __eq__(self, other: object) -> bool: + """Whether or not the two Specifier-like objects are equal. + + :param other: The other object to check against. + + The value of :attr:`prereleases` is ignored. + + >>> Specifier("==1.2.3") == Specifier("== 1.2.3.0") + True + >>> (Specifier("==1.2.3", prereleases=False) == + ... Specifier("==1.2.3", prereleases=True)) + True + >>> Specifier("==1.2.3") == "==1.2.3" + True + >>> Specifier("==1.2.3") == Specifier("==1.2.4") + False + >>> Specifier("==1.2.3") == Specifier("~=1.2.3") + False + """ + if isinstance(other, str): + try: + other = self.__class__(str(other)) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._canonical_spec == other._canonical_spec + + def _get_operator(self, op: str) -> CallableOperator: + operator_callable: CallableOperator = getattr( + self, f"_compare_{self._operators[op]}" + ) + return operator_callable + + def _compare_compatible(self, prospective: Version, spec: str) -> bool: + + # Compatible releases have an equivalent combination of >= and ==. That + # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to + # implement this in terms of the other specifiers instead of + # implementing it ourselves. The only thing we need to do is construct + # the other specifiers. + + # We want everything but the last item in the version, but we want to + # ignore suffix segments. + prefix = _version_join( + list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1] + ) + + # Add the prefix notation to the end of our string + prefix += ".*" + + return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( + prospective, prefix + ) + + def _compare_equal(self, prospective: Version, spec: str) -> bool: + + # We need special logic to handle prefix matching + if spec.endswith(".*"): + # In the case of prefix matching we want to ignore local segment. + normalized_prospective = canonicalize_version( + prospective.public, strip_trailing_zero=False + ) + # Get the normalized version string ignoring the trailing .* + normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False) + # Split the spec out by bangs and dots, and pretend that there is + # an implicit dot in between a release segment and a pre-release segment. + split_spec = _version_split(normalized_spec) + + # Split the prospective version out by bangs and dots, and pretend + # that there is an implicit dot in between a release segment and + # a pre-release segment. + split_prospective = _version_split(normalized_prospective) + + # 0-pad the prospective version before shortening it to get the correct + # shortened version. + padded_prospective, _ = _pad_version(split_prospective, split_spec) + + # Shorten the prospective version to be the same length as the spec + # so that we can determine if the specifier is a prefix of the + # prospective version or not. + shortened_prospective = padded_prospective[: len(split_spec)] + + return shortened_prospective == split_spec + else: + # Convert our spec string into a Version + spec_version = Version(spec) + + # If the specifier does not have a local segment, then we want to + # act as if the prospective version also does not have a local + # segment. + if not spec_version.local: + prospective = Version(prospective.public) + + return prospective == spec_version + + def _compare_not_equal(self, prospective: Version, spec: str) -> bool: + return not self._compare_equal(prospective, spec) + + def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool: + + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) <= Version(spec) + + def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool: + + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) >= Version(spec) + + def _compare_less_than(self, prospective: Version, spec_str: str) -> bool: + + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec_str) + + # Check to see if the prospective version is less than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective < spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a pre-release version, that we do not accept pre-release + # versions for the version mentioned in the specifier (e.g. <3.1 should + # not match 3.1.dev0, but should match 3.0.dev0). + if not spec.is_prerelease and prospective.is_prerelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # less than the spec version *and* it's not a pre-release of the same + # version in the spec. + return True + + def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool: + + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec_str) + + # Check to see if the prospective version is greater than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective > spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a post-release version, that we do not accept + # post-release versions for the version mentioned in the specifier + # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). + if not spec.is_postrelease and prospective.is_postrelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # Ensure that we do not allow a local version of the version mentioned + # in the specifier, which is technically greater than, to match. + if prospective.local is not None: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # greater than the spec version *and* it's not a pre-release of the + # same version in the spec. + return True + + def _compare_arbitrary(self, prospective: Version, spec: str) -> bool: + return str(prospective).lower() == str(spec).lower() + + def __contains__(self, item: Union[str, Version]) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: The item to check for. + + This is used for the ``in`` operator and behaves the same as + :meth:`contains` with no ``prereleases`` argument passed. + + >>> "1.2.3" in Specifier(">=1.2.3") + True + >>> Version("1.2.3") in Specifier(">=1.2.3") + True + >>> "1.0.0" in Specifier(">=1.2.3") + False + >>> "1.3.0a1" in Specifier(">=1.2.3") + False + >>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True) + True + """ + return self.contains(item) + + def contains( + self, item: UnparsedVersion, prereleases: Optional[bool] = None + ) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: + The item to check for, which can be a version string or a + :class:`Version` instance. + :param prereleases: + Whether or not to match prereleases with this Specifier. If set to + ``None`` (the default), it uses :attr:`prereleases` to determine + whether or not prereleases are allowed. + + >>> Specifier(">=1.2.3").contains("1.2.3") + True + >>> Specifier(">=1.2.3").contains(Version("1.2.3")) + True + >>> Specifier(">=1.2.3").contains("1.0.0") + False + >>> Specifier(">=1.2.3").contains("1.3.0a1") + False + >>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1") + True + >>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True) + True + """ + + # Determine if prereleases are to be allowed or not. + if prereleases is None: + prereleases = self.prereleases + + # Normalize item to a Version, this allows us to have a shortcut for + # "2.0" in Specifier(">=2") + normalized_item = _coerce_version(item) + + # Determine if we should be supporting prereleases in this specifier + # or not, if we do not support prereleases than we can short circuit + # logic if this version is a prereleases. + if normalized_item.is_prerelease and not prereleases: + return False + + # Actually do the comparison to determine if this item is contained + # within this Specifier or not. + operator_callable: CallableOperator = self._get_operator(self.operator) + return operator_callable(normalized_item, self.version) + + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None + ) -> Iterator[UnparsedVersionVar]: + """Filter items in the given iterable, that match the specifier. + + :param iterable: + An iterable that can contain version strings and :class:`Version` instances. + The items in the iterable will be filtered according to the specifier. + :param prereleases: + Whether or not to allow prereleases in the returned iterator. If set to + ``None`` (the default), it will be intelligently decide whether to allow + prereleases or not (based on the :attr:`prereleases` attribute, and + whether the only versions matching are prereleases). + + This method is smarter than just ``filter(Specifier().contains, [...])`` + because it implements the rule from :pep:`440` that a prerelease item + SHOULD be accepted if no other versions match the given specifier. + + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) + ['1.3'] + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")])) + ['1.2.3', '1.3', ] + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"])) + ['1.5a1'] + >>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + >>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + """ + + yielded = False + found_prereleases = [] + + kw = {"prereleases": prereleases if prereleases is not None else True} + + # Attempt to iterate over all the values in the iterable and if any of + # them match, yield them. + for version in iterable: + parsed_version = _coerce_version(version) + + if self.contains(parsed_version, **kw): + # If our version is a prerelease, and we were not set to allow + # prereleases, then we'll store it for later in case nothing + # else matches this specifier. + if parsed_version.is_prerelease and not ( + prereleases or self.prereleases + ): + found_prereleases.append(version) + # Either this is not a prerelease, or we should have been + # accepting prereleases from the beginning. + else: + yielded = True + yield version + + # Now that we've iterated over everything, determine if we've yielded + # any values, and if we have not and we have any prereleases stored up + # then we will go ahead and yield the prereleases. + if not yielded and found_prereleases: + for version in found_prereleases: + yield version + + +_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") + + +def _version_split(version: str) -> List[str]: + """Split version into components. + + The split components are intended for version comparison. The logic does + not attempt to retain the original version string, so joining the + components back with :func:`_version_join` may not produce the original + version string. + """ + result: List[str] = [] + + epoch, _, rest = version.rpartition("!") + result.append(epoch or "0") + + for item in rest.split("."): + match = _prefix_regex.search(item) + if match: + result.extend(match.groups()) + else: + result.append(item) + return result + + +def _version_join(components: List[str]) -> str: + """Join split version components into a version string. + + This function assumes the input came from :func:`_version_split`, where the + first component must be the epoch (either empty or numeric), and all other + components numeric. + """ + epoch, *rest = components + return f"{epoch}!{'.'.join(rest)}" + + +def _is_not_suffix(segment: str) -> bool: + return not any( + segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post") + ) + + +def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]: + left_split, right_split = [], [] + + # Get the release segment of our versions + left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) + right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) + + # Get the rest of our versions + left_split.append(left[len(left_split[0]) :]) + right_split.append(right[len(right_split[0]) :]) + + # Insert our padding + left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) + right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) + + return ( + list(itertools.chain.from_iterable(left_split)), + list(itertools.chain.from_iterable(right_split)), + ) + + +class SpecifierSet(BaseSpecifier): + """This class abstracts handling of a set of version specifiers. + + It can be passed a single specifier (``>=3.0``), a comma-separated list of + specifiers (``>=3.0,!=3.1``), or no specifier at all. + """ + + def __init__( + self, specifiers: str = "", prereleases: Optional[bool] = None + ) -> None: + """Initialize a SpecifierSet instance. + + :param specifiers: + The string representation of a specifier or a comma-separated list of + specifiers which will be parsed and normalized before use. + :param prereleases: + This tells the SpecifierSet if it should accept prerelease versions if + applicable or not. The default of ``None`` will autodetect it from the + given specifiers. + + :raises InvalidSpecifier: + If the given ``specifiers`` are not parseable than this exception will be + raised. + """ + + # Split on `,` to break each individual specifier into it's own item, and + # strip each item to remove leading/trailing whitespace. + split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + + # Make each individual specifier a Specifier and save in a frozen set for later. + self._specs = frozenset(map(Specifier, split_specifiers)) + + # Store our prereleases value so we can use it later to determine if + # we accept prereleases or not. + self._prereleases = prereleases + + @property + def prereleases(self) -> Optional[bool]: + # If we have been given an explicit prerelease modifier, then we'll + # pass that through here. + if self._prereleases is not None: + return self._prereleases + + # If we don't have any specifiers, and we don't have a forced value, + # then we'll just return None since we don't know if this should have + # pre-releases or not. + if not self._specs: + return None + + # Otherwise we'll see if any of the given specifiers accept + # prereleases, if any of them do we'll return True, otherwise False. + return any(s.prereleases for s in self._specs) + + @prereleases.setter + def prereleases(self, value: bool) -> None: + self._prereleases = value + + def __repr__(self) -> str: + """A representation of the specifier set that shows all internal state. + + Note that the ordering of the individual specifiers within the set may not + match the input string. + + >>> SpecifierSet('>=1.0.0,!=2.0.0') + =1.0.0')> + >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False) + =1.0.0', prereleases=False)> + >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True) + =1.0.0', prereleases=True)> + """ + pre = ( + f", prereleases={self.prereleases!r}" + if self._prereleases is not None + else "" + ) + + return f"" + + def __str__(self) -> str: + """A string representation of the specifier set that can be round-tripped. + + Note that the ordering of the individual specifiers within the set may not + match the input string. + + >>> str(SpecifierSet(">=1.0.0,!=1.0.1")) + '!=1.0.1,>=1.0.0' + >>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False)) + '!=1.0.1,>=1.0.0' + """ + return ",".join(sorted(str(s) for s in self._specs)) + + def __hash__(self) -> int: + return hash(self._specs) + + def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet": + """Return a SpecifierSet which is a combination of the two sets. + + :param other: The other object to combine with. + + >>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1' + =1.0.0')> + >>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1') + =1.0.0')> + """ + if isinstance(other, str): + other = SpecifierSet(other) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + specifier = SpecifierSet() + specifier._specs = frozenset(self._specs | other._specs) + + if self._prereleases is None and other._prereleases is not None: + specifier._prereleases = other._prereleases + elif self._prereleases is not None and other._prereleases is None: + specifier._prereleases = self._prereleases + elif self._prereleases == other._prereleases: + specifier._prereleases = self._prereleases + else: + raise ValueError( + "Cannot combine SpecifierSets with True and False prerelease " + "overrides." + ) + + return specifier + + def __eq__(self, other: object) -> bool: + """Whether or not the two SpecifierSet-like objects are equal. + + :param other: The other object to check against. + + The value of :attr:`prereleases` is ignored. + + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) == + ... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)) + True + >>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1" + True + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2") + False + """ + if isinstance(other, (str, Specifier)): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs == other._specs + + def __len__(self) -> int: + """Returns the number of specifiers in this specifier set.""" + return len(self._specs) + + def __iter__(self) -> Iterator[Specifier]: + """ + Returns an iterator over all the underlying :class:`Specifier` instances + in this specifier set. + + >>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str) + [, =1.0.0')>] + """ + return iter(self._specs) + + def __contains__(self, item: UnparsedVersion) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: The item to check for. + + This is used for the ``in`` operator and behaves the same as + :meth:`contains` with no ``prereleases`` argument passed. + + >>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1") + False + >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1") + False + >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True) + True + """ + return self.contains(item) + + def contains( + self, + item: UnparsedVersion, + prereleases: Optional[bool] = None, + installed: Optional[bool] = None, + ) -> bool: + """Return whether or not the item is contained in this SpecifierSet. + + :param item: + The item to check for, which can be a version string or a + :class:`Version` instance. + :param prereleases: + Whether or not to match prereleases with this SpecifierSet. If set to + ``None`` (the default), it uses :attr:`prereleases` to determine + whether or not prereleases are allowed. + + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3") + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3")) + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1") + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True) + True + """ + # Ensure that our item is a Version instance. + if not isinstance(item, Version): + item = Version(item) + + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # We can determine if we're going to allow pre-releases by looking to + # see if any of the underlying items supports them. If none of them do + # and this item is a pre-release then we do not allow it and we can + # short circuit that here. + # Note: This means that 1.0.dev1 would not be contained in something + # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 + if not prereleases and item.is_prerelease: + return False + + if installed and item.is_prerelease: + item = Version(item.base_version) + + # We simply dispatch to the underlying specs here to make sure that the + # given version is contained within all of them. + # Note: This use of all() here means that an empty set of specifiers + # will always return True, this is an explicit design decision. + return all(s.contains(item, prereleases=prereleases) for s in self._specs) + + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None + ) -> Iterator[UnparsedVersionVar]: + """Filter items in the given iterable, that match the specifiers in this set. + + :param iterable: + An iterable that can contain version strings and :class:`Version` instances. + The items in the iterable will be filtered according to the specifier. + :param prereleases: + Whether or not to allow prereleases in the returned iterator. If set to + ``None`` (the default), it will be intelligently decide whether to allow + prereleases or not (based on the :attr:`prereleases` attribute, and + whether the only versions matching are prereleases). + + This method is smarter than just ``filter(SpecifierSet(...).contains, [...])`` + because it implements the rule from :pep:`440` that a prerelease item + SHOULD be accepted if no other versions match the given specifier. + + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) + ['1.3'] + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")])) + ['1.3', ] + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"])) + [] + >>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + >>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + + An "empty" SpecifierSet will filter items based on the presence of prerelease + versions in the set. + + >>> list(SpecifierSet("").filter(["1.3", "1.5a1"])) + ['1.3'] + >>> list(SpecifierSet("").filter(["1.5a1"])) + ['1.5a1'] + >>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + >>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + """ + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # If we have any specifiers, then we want to wrap our iterable in the + # filter method for each one, this will act as a logical AND amongst + # each specifier. + if self._specs: + for spec in self._specs: + iterable = spec.filter(iterable, prereleases=bool(prereleases)) + return iter(iterable) + # If we do not have any specifiers, then we need to have a rough filter + # which will filter out any pre-releases, unless there are no final + # releases. + else: + filtered: List[UnparsedVersionVar] = [] + found_prereleases: List[UnparsedVersionVar] = [] + + for item in iterable: + parsed_version = _coerce_version(item) + + # Store any item which is a pre-release for later unless we've + # already found a final version or we are accepting prereleases + if parsed_version.is_prerelease and not prereleases: + if not filtered: + found_prereleases.append(item) + else: + filtered.append(item) + + # If we've found no items except for pre-releases, then we'll go + # ahead and use the pre-releases + if not filtered and found_prereleases and prereleases is None: + return iter(found_prereleases) + + return iter(filtered) diff --git a/setuptools/_distutils/_vendor/packaging/tags.py b/setuptools/_distutils/_vendor/packaging/tags.py new file mode 100644 index 0000000000..89f1926137 --- /dev/null +++ b/setuptools/_distutils/_vendor/packaging/tags.py @@ -0,0 +1,571 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import logging +import platform +import re +import struct +import subprocess +import sys +import sysconfig +from importlib.machinery import EXTENSION_SUFFIXES +from typing import ( + Dict, + FrozenSet, + Iterable, + Iterator, + List, + Optional, + Sequence, + Tuple, + Union, + cast, +) + +from . import _manylinux, _musllinux + +logger = logging.getLogger(__name__) + +PythonVersion = Sequence[int] +MacVersion = Tuple[int, int] + +INTERPRETER_SHORT_NAMES: Dict[str, str] = { + "python": "py", # Generic. + "cpython": "cp", + "pypy": "pp", + "ironpython": "ip", + "jython": "jy", +} + + +_32_BIT_INTERPRETER = struct.calcsize("P") == 4 + + +class Tag: + """ + A representation of the tag triple for a wheel. + + Instances are considered immutable and thus are hashable. Equality checking + is also supported. + """ + + __slots__ = ["_interpreter", "_abi", "_platform", "_hash"] + + def __init__(self, interpreter: str, abi: str, platform: str) -> None: + self._interpreter = interpreter.lower() + self._abi = abi.lower() + self._platform = platform.lower() + # The __hash__ of every single element in a Set[Tag] will be evaluated each time + # that a set calls its `.disjoint()` method, which may be called hundreds of + # times when scanning a page of links for packages with tags matching that + # Set[Tag]. Pre-computing the value here produces significant speedups for + # downstream consumers. + self._hash = hash((self._interpreter, self._abi, self._platform)) + + @property + def interpreter(self) -> str: + return self._interpreter + + @property + def abi(self) -> str: + return self._abi + + @property + def platform(self) -> str: + return self._platform + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Tag): + return NotImplemented + + return ( + (self._hash == other._hash) # Short-circuit ASAP for perf reasons. + and (self._platform == other._platform) + and (self._abi == other._abi) + and (self._interpreter == other._interpreter) + ) + + def __hash__(self) -> int: + return self._hash + + def __str__(self) -> str: + return f"{self._interpreter}-{self._abi}-{self._platform}" + + def __repr__(self) -> str: + return f"<{self} @ {id(self)}>" + + +def parse_tag(tag: str) -> FrozenSet[Tag]: + """ + Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. + + Returning a set is required due to the possibility that the tag is a + compressed tag set. + """ + tags = set() + interpreters, abis, platforms = tag.split("-") + for interpreter in interpreters.split("."): + for abi in abis.split("."): + for platform_ in platforms.split("."): + tags.add(Tag(interpreter, abi, platform_)) + return frozenset(tags) + + +def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]: + value: Union[int, str, None] = sysconfig.get_config_var(name) + if value is None and warn: + logger.debug( + "Config variable '%s' is unset, Python ABI tag may be incorrect", name + ) + return value + + +def _normalize_string(string: str) -> str: + return string.replace(".", "_").replace("-", "_").replace(" ", "_") + + +def _is_threaded_cpython(abis: List[str]) -> bool: + """ + Determine if the ABI corresponds to a threaded (`--disable-gil`) build. + + The threaded builds are indicated by a "t" in the abiflags. + """ + if len(abis) == 0: + return False + # expect e.g., cp313 + m = re.match(r"cp\d+(.*)", abis[0]) + if not m: + return False + abiflags = m.group(1) + return "t" in abiflags + + +def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool: + """ + Determine if the Python version supports abi3. + + PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`) + builds do not support abi3. + """ + return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading + + +def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]: + py_version = tuple(py_version) # To allow for version comparison. + abis = [] + version = _version_nodot(py_version[:2]) + threading = debug = pymalloc = ucs4 = "" + with_debug = _get_config_var("Py_DEBUG", warn) + has_refcount = hasattr(sys, "gettotalrefcount") + # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled + # extension modules is the best option. + # https://github.com/pypa/pip/issues/3383#issuecomment-173267692 + has_ext = "_d.pyd" in EXTENSION_SUFFIXES + if with_debug or (with_debug is None and (has_refcount or has_ext)): + debug = "d" + if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn): + threading = "t" + if py_version < (3, 8): + with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) + if with_pymalloc or with_pymalloc is None: + pymalloc = "m" + if py_version < (3, 3): + unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) + if unicode_size == 4 or ( + unicode_size is None and sys.maxunicode == 0x10FFFF + ): + ucs4 = "u" + elif debug: + # Debug builds can also load "normal" extension modules. + # We can also assume no UCS-4 or pymalloc requirement. + abis.append(f"cp{version}{threading}") + abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}") + return abis + + +def cpython_tags( + python_version: Optional[PythonVersion] = None, + abis: Optional[Iterable[str]] = None, + platforms: Optional[Iterable[str]] = None, + *, + warn: bool = False, +) -> Iterator[Tag]: + """ + Yields the tags for a CPython interpreter. + + The tags consist of: + - cp-- + - cp-abi3- + - cp-none- + - cp-abi3- # Older Python versions down to 3.2. + + If python_version only specifies a major version then user-provided ABIs and + the 'none' ABItag will be used. + + If 'abi3' or 'none' are specified in 'abis' then they will be yielded at + their normal position and not at the beginning. + """ + if not python_version: + python_version = sys.version_info[:2] + + interpreter = f"cp{_version_nodot(python_version[:2])}" + + if abis is None: + if len(python_version) > 1: + abis = _cpython_abis(python_version, warn) + else: + abis = [] + abis = list(abis) + # 'abi3' and 'none' are explicitly handled later. + for explicit_abi in ("abi3", "none"): + try: + abis.remove(explicit_abi) + except ValueError: + pass + + platforms = list(platforms or platform_tags()) + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + + threading = _is_threaded_cpython(abis) + use_abi3 = _abi3_applies(python_version, threading) + if use_abi3: + yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms) + yield from (Tag(interpreter, "none", platform_) for platform_ in platforms) + + if use_abi3: + for minor_version in range(python_version[1] - 1, 1, -1): + for platform_ in platforms: + interpreter = "cp{version}".format( + version=_version_nodot((python_version[0], minor_version)) + ) + yield Tag(interpreter, "abi3", platform_) + + +def _generic_abi() -> List[str]: + """ + Return the ABI tag based on EXT_SUFFIX. + """ + # The following are examples of `EXT_SUFFIX`. + # We want to keep the parts which are related to the ABI and remove the + # parts which are related to the platform: + # - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310 + # - mac: '.cpython-310-darwin.so' => cp310 + # - win: '.cp310-win_amd64.pyd' => cp310 + # - win: '.pyd' => cp37 (uses _cpython_abis()) + # - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73 + # - graalpy: '.graalpy-38-native-x86_64-darwin.dylib' + # => graalpy_38_native + + ext_suffix = _get_config_var("EXT_SUFFIX", warn=True) + if not isinstance(ext_suffix, str) or ext_suffix[0] != ".": + raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')") + parts = ext_suffix.split(".") + if len(parts) < 3: + # CPython3.7 and earlier uses ".pyd" on Windows. + return _cpython_abis(sys.version_info[:2]) + soabi = parts[1] + if soabi.startswith("cpython"): + # non-windows + abi = "cp" + soabi.split("-")[1] + elif soabi.startswith("cp"): + # windows + abi = soabi.split("-")[0] + elif soabi.startswith("pypy"): + abi = "-".join(soabi.split("-")[:2]) + elif soabi.startswith("graalpy"): + abi = "-".join(soabi.split("-")[:3]) + elif soabi: + # pyston, ironpython, others? + abi = soabi + else: + return [] + return [_normalize_string(abi)] + + +def generic_tags( + interpreter: Optional[str] = None, + abis: Optional[Iterable[str]] = None, + platforms: Optional[Iterable[str]] = None, + *, + warn: bool = False, +) -> Iterator[Tag]: + """ + Yields the tags for a generic interpreter. + + The tags consist of: + - -- + + The "none" ABI will be added if it was not explicitly provided. + """ + if not interpreter: + interp_name = interpreter_name() + interp_version = interpreter_version(warn=warn) + interpreter = "".join([interp_name, interp_version]) + if abis is None: + abis = _generic_abi() + else: + abis = list(abis) + platforms = list(platforms or platform_tags()) + if "none" not in abis: + abis.append("none") + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + + +def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]: + """ + Yields Python versions in descending order. + + After the latest version, the major-only version will be yielded, and then + all previous versions of that major version. + """ + if len(py_version) > 1: + yield f"py{_version_nodot(py_version[:2])}" + yield f"py{py_version[0]}" + if len(py_version) > 1: + for minor in range(py_version[1] - 1, -1, -1): + yield f"py{_version_nodot((py_version[0], minor))}" + + +def compatible_tags( + python_version: Optional[PythonVersion] = None, + interpreter: Optional[str] = None, + platforms: Optional[Iterable[str]] = None, +) -> Iterator[Tag]: + """ + Yields the sequence of tags that are compatible with a specific version of Python. + + The tags consist of: + - py*-none- + - -none-any # ... if `interpreter` is provided. + - py*-none-any + """ + if not python_version: + python_version = sys.version_info[:2] + platforms = list(platforms or platform_tags()) + for version in _py_interpreter_range(python_version): + for platform_ in platforms: + yield Tag(version, "none", platform_) + if interpreter: + yield Tag(interpreter, "none", "any") + for version in _py_interpreter_range(python_version): + yield Tag(version, "none", "any") + + +def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str: + if not is_32bit: + return arch + + if arch.startswith("ppc"): + return "ppc" + + return "i386" + + +def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]: + formats = [cpu_arch] + if cpu_arch == "x86_64": + if version < (10, 4): + return [] + formats.extend(["intel", "fat64", "fat32"]) + + elif cpu_arch == "i386": + if version < (10, 4): + return [] + formats.extend(["intel", "fat32", "fat"]) + + elif cpu_arch == "ppc64": + # TODO: Need to care about 32-bit PPC for ppc64 through 10.2? + if version > (10, 5) or version < (10, 4): + return [] + formats.append("fat64") + + elif cpu_arch == "ppc": + if version > (10, 6): + return [] + formats.extend(["fat32", "fat"]) + + if cpu_arch in {"arm64", "x86_64"}: + formats.append("universal2") + + if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}: + formats.append("universal") + + return formats + + +def mac_platforms( + version: Optional[MacVersion] = None, arch: Optional[str] = None +) -> Iterator[str]: + """ + Yields the platform tags for a macOS system. + + The `version` parameter is a two-item tuple specifying the macOS version to + generate platform tags for. The `arch` parameter is the CPU architecture to + generate platform tags for. Both parameters default to the appropriate value + for the current system. + """ + version_str, _, cpu_arch = platform.mac_ver() + if version is None: + version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) + if version == (10, 16): + # When built against an older macOS SDK, Python will report macOS 10.16 + # instead of the real version. + version_str = subprocess.run( + [ + sys.executable, + "-sS", + "-c", + "import platform; print(platform.mac_ver()[0])", + ], + check=True, + env={"SYSTEM_VERSION_COMPAT": "0"}, + stdout=subprocess.PIPE, + text=True, + ).stdout + version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) + else: + version = version + if arch is None: + arch = _mac_arch(cpu_arch) + else: + arch = arch + + if (10, 0) <= version and version < (11, 0): + # Prior to Mac OS 11, each yearly release of Mac OS bumped the + # "minor" version number. The major version was always 10. + for minor_version in range(version[1], -1, -1): + compat_version = 10, minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format( + major=10, minor=minor_version, binary_format=binary_format + ) + + if version >= (11, 0): + # Starting with Mac OS 11, each yearly release bumps the major version + # number. The minor versions are now the midyear updates. + for major_version in range(version[0], 10, -1): + compat_version = major_version, 0 + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format( + major=major_version, minor=0, binary_format=binary_format + ) + + if version >= (11, 0): + # Mac OS 11 on x86_64 is compatible with binaries from previous releases. + # Arm64 support was introduced in 11.0, so no Arm binaries from previous + # releases exist. + # + # However, the "universal2" binary format can have a + # macOS version earlier than 11.0 when the x86_64 part of the binary supports + # that version of macOS. + if arch == "x86_64": + for minor_version in range(16, 3, -1): + compat_version = 10, minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format( + major=compat_version[0], + minor=compat_version[1], + binary_format=binary_format, + ) + else: + for minor_version in range(16, 3, -1): + compat_version = 10, minor_version + binary_format = "universal2" + yield "macosx_{major}_{minor}_{binary_format}".format( + major=compat_version[0], + minor=compat_version[1], + binary_format=binary_format, + ) + + +def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]: + linux = _normalize_string(sysconfig.get_platform()) + if not linux.startswith("linux_"): + # we should never be here, just yield the sysconfig one and return + yield linux + return + if is_32bit: + if linux == "linux_x86_64": + linux = "linux_i686" + elif linux == "linux_aarch64": + linux = "linux_armv8l" + _, arch = linux.split("_", 1) + archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch]) + yield from _manylinux.platform_tags(archs) + yield from _musllinux.platform_tags(archs) + for arch in archs: + yield f"linux_{arch}" + + +def _generic_platforms() -> Iterator[str]: + yield _normalize_string(sysconfig.get_platform()) + + +def platform_tags() -> Iterator[str]: + """ + Provides the platform tags for this installation. + """ + if platform.system() == "Darwin": + return mac_platforms() + elif platform.system() == "Linux": + return _linux_platforms() + else: + return _generic_platforms() + + +def interpreter_name() -> str: + """ + Returns the name of the running interpreter. + + Some implementations have a reserved, two-letter abbreviation which will + be returned when appropriate. + """ + name = sys.implementation.name + return INTERPRETER_SHORT_NAMES.get(name) or name + + +def interpreter_version(*, warn: bool = False) -> str: + """ + Returns the version of the running interpreter. + """ + version = _get_config_var("py_version_nodot", warn=warn) + if version: + version = str(version) + else: + version = _version_nodot(sys.version_info[:2]) + return version + + +def _version_nodot(version: PythonVersion) -> str: + return "".join(map(str, version)) + + +def sys_tags(*, warn: bool = False) -> Iterator[Tag]: + """ + Returns the sequence of tag triples for the running interpreter. + + The order of the sequence corresponds to priority order for the + interpreter, from most to least important. + """ + + interp_name = interpreter_name() + if interp_name == "cp": + yield from cpython_tags(warn=warn) + else: + yield from generic_tags() + + if interp_name == "pp": + interp = "pp3" + elif interp_name == "cp": + interp = "cp" + interpreter_version(warn=warn) + else: + interp = None + yield from compatible_tags(interpreter=interp) diff --git a/setuptools/_distutils/_vendor/packaging/utils.py b/setuptools/_distutils/_vendor/packaging/utils.py new file mode 100644 index 0000000000..c2c2f75aa8 --- /dev/null +++ b/setuptools/_distutils/_vendor/packaging/utils.py @@ -0,0 +1,172 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import re +from typing import FrozenSet, NewType, Tuple, Union, cast + +from .tags import Tag, parse_tag +from .version import InvalidVersion, Version + +BuildTag = Union[Tuple[()], Tuple[int, str]] +NormalizedName = NewType("NormalizedName", str) + + +class InvalidName(ValueError): + """ + An invalid distribution name; users should refer to the packaging user guide. + """ + + +class InvalidWheelFilename(ValueError): + """ + An invalid wheel filename was found, users should refer to PEP 427. + """ + + +class InvalidSdistFilename(ValueError): + """ + An invalid sdist filename was found, users should refer to the packaging user guide. + """ + + +# Core metadata spec for `Name` +_validate_regex = re.compile( + r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE +) +_canonicalize_regex = re.compile(r"[-_.]+") +_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$") +# PEP 427: The build number must start with a digit. +_build_tag_regex = re.compile(r"(\d+)(.*)") + + +def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName: + if validate and not _validate_regex.match(name): + raise InvalidName(f"name is invalid: {name!r}") + # This is taken from PEP 503. + value = _canonicalize_regex.sub("-", name).lower() + return cast(NormalizedName, value) + + +def is_normalized_name(name: str) -> bool: + return _normalized_regex.match(name) is not None + + +def canonicalize_version( + version: Union[Version, str], *, strip_trailing_zero: bool = True +) -> str: + """ + This is very similar to Version.__str__, but has one subtle difference + with the way it handles the release segment. + """ + if isinstance(version, str): + try: + parsed = Version(version) + except InvalidVersion: + # Legacy versions cannot be normalized + return version + else: + parsed = version + + parts = [] + + # Epoch + if parsed.epoch != 0: + parts.append(f"{parsed.epoch}!") + + # Release segment + release_segment = ".".join(str(x) for x in parsed.release) + if strip_trailing_zero: + # NB: This strips trailing '.0's to normalize + release_segment = re.sub(r"(\.0)+$", "", release_segment) + parts.append(release_segment) + + # Pre-release + if parsed.pre is not None: + parts.append("".join(str(x) for x in parsed.pre)) + + # Post-release + if parsed.post is not None: + parts.append(f".post{parsed.post}") + + # Development release + if parsed.dev is not None: + parts.append(f".dev{parsed.dev}") + + # Local version segment + if parsed.local is not None: + parts.append(f"+{parsed.local}") + + return "".join(parts) + + +def parse_wheel_filename( + filename: str, +) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]: + if not filename.endswith(".whl"): + raise InvalidWheelFilename( + f"Invalid wheel filename (extension must be '.whl'): {filename}" + ) + + filename = filename[:-4] + dashes = filename.count("-") + if dashes not in (4, 5): + raise InvalidWheelFilename( + f"Invalid wheel filename (wrong number of parts): {filename}" + ) + + parts = filename.split("-", dashes - 2) + name_part = parts[0] + # See PEP 427 for the rules on escaping the project name. + if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None: + raise InvalidWheelFilename(f"Invalid project name: {filename}") + name = canonicalize_name(name_part) + + try: + version = Version(parts[1]) + except InvalidVersion as e: + raise InvalidWheelFilename( + f"Invalid wheel filename (invalid version): {filename}" + ) from e + + if dashes == 5: + build_part = parts[2] + build_match = _build_tag_regex.match(build_part) + if build_match is None: + raise InvalidWheelFilename( + f"Invalid build number: {build_part} in '{filename}'" + ) + build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2))) + else: + build = () + tags = parse_tag(parts[-1]) + return (name, version, build, tags) + + +def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]: + if filename.endswith(".tar.gz"): + file_stem = filename[: -len(".tar.gz")] + elif filename.endswith(".zip"): + file_stem = filename[: -len(".zip")] + else: + raise InvalidSdistFilename( + f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):" + f" {filename}" + ) + + # We are requiring a PEP 440 version, which cannot contain dashes, + # so we split on the last dash. + name_part, sep, version_part = file_stem.rpartition("-") + if not sep: + raise InvalidSdistFilename(f"Invalid sdist filename: {filename}") + + name = canonicalize_name(name_part) + + try: + version = Version(version_part) + except InvalidVersion as e: + raise InvalidSdistFilename( + f"Invalid sdist filename (invalid version): {filename}" + ) from e + + return (name, version) diff --git a/setuptools/_distutils/_vendor/packaging/version.py b/setuptools/_distutils/_vendor/packaging/version.py new file mode 100644 index 0000000000..5faab9bd0d --- /dev/null +++ b/setuptools/_distutils/_vendor/packaging/version.py @@ -0,0 +1,563 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +""" +.. testsetup:: + + from packaging.version import parse, Version +""" + +import itertools +import re +from typing import Any, Callable, NamedTuple, Optional, SupportsInt, Tuple, Union + +from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType + +__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"] + +LocalType = Tuple[Union[int, str], ...] + +CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]] +CmpLocalType = Union[ + NegativeInfinityType, + Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...], +] +CmpKey = Tuple[ + int, + Tuple[int, ...], + CmpPrePostDevType, + CmpPrePostDevType, + CmpPrePostDevType, + CmpLocalType, +] +VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool] + + +class _Version(NamedTuple): + epoch: int + release: Tuple[int, ...] + dev: Optional[Tuple[str, int]] + pre: Optional[Tuple[str, int]] + post: Optional[Tuple[str, int]] + local: Optional[LocalType] + + +def parse(version: str) -> "Version": + """Parse the given version string. + + >>> parse('1.0.dev1') + + + :param version: The version string to parse. + :raises InvalidVersion: When the version string is not a valid version. + """ + return Version(version) + + +class InvalidVersion(ValueError): + """Raised when a version string is not a valid version. + + >>> Version("invalid") + Traceback (most recent call last): + ... + packaging.version.InvalidVersion: Invalid version: 'invalid' + """ + + +class _BaseVersion: + _key: Tuple[Any, ...] + + def __hash__(self) -> int: + return hash(self._key) + + # Please keep the duplicated `isinstance` check + # in the six comparisons hereunder + # unless you find a way to avoid adding overhead function calls. + def __lt__(self, other: "_BaseVersion") -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key < other._key + + def __le__(self, other: "_BaseVersion") -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key <= other._key + + def __eq__(self, other: object) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key == other._key + + def __ge__(self, other: "_BaseVersion") -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key >= other._key + + def __gt__(self, other: "_BaseVersion") -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key > other._key + + def __ne__(self, other: object) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key != other._key + + +# Deliberately not anchored to the start and end of the string, to make it +# easier for 3rd party code to reuse +_VERSION_PATTERN = r""" + v? + (?: + (?:(?P[0-9]+)!)? # epoch + (?P[0-9]+(?:\.[0-9]+)*) # release segment + (?P
                                          # pre-release
+            [-_\.]?
+            (?Palpha|a|beta|b|preview|pre|c|rc)
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+        (?P                                         # post release
+            (?:-(?P[0-9]+))
+            |
+            (?:
+                [-_\.]?
+                (?Ppost|rev|r)
+                [-_\.]?
+                (?P[0-9]+)?
+            )
+        )?
+        (?P                                          # dev release
+            [-_\.]?
+            (?Pdev)
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+    )
+    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+"""
+
+VERSION_PATTERN = _VERSION_PATTERN
+"""
+A string containing the regular expression used to match a valid version.
+
+The pattern is not anchored at either end, and is intended for embedding in larger
+expressions (for example, matching a version number as part of a file name). The
+regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
+flags set.
+
+:meta hide-value:
+"""
+
+
+class Version(_BaseVersion):
+    """This class abstracts handling of a project's versions.
+
+    A :class:`Version` instance is comparison aware and can be compared and
+    sorted using the standard Python interfaces.
+
+    >>> v1 = Version("1.0a5")
+    >>> v2 = Version("1.0")
+    >>> v1
+    
+    >>> v2
+    
+    >>> v1 < v2
+    True
+    >>> v1 == v2
+    False
+    >>> v1 > v2
+    False
+    >>> v1 >= v2
+    False
+    >>> v1 <= v2
+    True
+    """
+
+    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
+    _key: CmpKey
+
+    def __init__(self, version: str) -> None:
+        """Initialize a Version object.
+
+        :param version:
+            The string representation of a version which will be parsed and normalized
+            before use.
+        :raises InvalidVersion:
+            If the ``version`` does not conform to PEP 440 in any way then this
+            exception will be raised.
+        """
+
+        # Validate the version and parse it into pieces
+        match = self._regex.search(version)
+        if not match:
+            raise InvalidVersion(f"Invalid version: '{version}'")
+
+        # Store the parsed out pieces of the version
+        self._version = _Version(
+            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+            release=tuple(int(i) for i in match.group("release").split(".")),
+            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
+            post=_parse_letter_version(
+                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
+            ),
+            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
+            local=_parse_local_version(match.group("local")),
+        )
+
+        # Generate a key which will be used for sorting
+        self._key = _cmpkey(
+            self._version.epoch,
+            self._version.release,
+            self._version.pre,
+            self._version.post,
+            self._version.dev,
+            self._version.local,
+        )
+
+    def __repr__(self) -> str:
+        """A representation of the Version that shows all internal state.
+
+        >>> Version('1.0.0')
+        
+        """
+        return f""
+
+    def __str__(self) -> str:
+        """A string representation of the version that can be rounded-tripped.
+
+        >>> str(Version("1.0a5"))
+        '1.0a5'
+        """
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append(f"{self.epoch}!")
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        # Pre-release
+        if self.pre is not None:
+            parts.append("".join(str(x) for x in self.pre))
+
+        # Post-release
+        if self.post is not None:
+            parts.append(f".post{self.post}")
+
+        # Development release
+        if self.dev is not None:
+            parts.append(f".dev{self.dev}")
+
+        # Local version segment
+        if self.local is not None:
+            parts.append(f"+{self.local}")
+
+        return "".join(parts)
+
+    @property
+    def epoch(self) -> int:
+        """The epoch of the version.
+
+        >>> Version("2.0.0").epoch
+        0
+        >>> Version("1!2.0.0").epoch
+        1
+        """
+        return self._version.epoch
+
+    @property
+    def release(self) -> Tuple[int, ...]:
+        """The components of the "release" segment of the version.
+
+        >>> Version("1.2.3").release
+        (1, 2, 3)
+        >>> Version("2.0.0").release
+        (2, 0, 0)
+        >>> Version("1!2.0.0.post0").release
+        (2, 0, 0)
+
+        Includes trailing zeroes but not the epoch or any pre-release / development /
+        post-release suffixes.
+        """
+        return self._version.release
+
+    @property
+    def pre(self) -> Optional[Tuple[str, int]]:
+        """The pre-release segment of the version.
+
+        >>> print(Version("1.2.3").pre)
+        None
+        >>> Version("1.2.3a1").pre
+        ('a', 1)
+        >>> Version("1.2.3b1").pre
+        ('b', 1)
+        >>> Version("1.2.3rc1").pre
+        ('rc', 1)
+        """
+        return self._version.pre
+
+    @property
+    def post(self) -> Optional[int]:
+        """The post-release number of the version.
+
+        >>> print(Version("1.2.3").post)
+        None
+        >>> Version("1.2.3.post1").post
+        1
+        """
+        return self._version.post[1] if self._version.post else None
+
+    @property
+    def dev(self) -> Optional[int]:
+        """The development number of the version.
+
+        >>> print(Version("1.2.3").dev)
+        None
+        >>> Version("1.2.3.dev1").dev
+        1
+        """
+        return self._version.dev[1] if self._version.dev else None
+
+    @property
+    def local(self) -> Optional[str]:
+        """The local version segment of the version.
+
+        >>> print(Version("1.2.3").local)
+        None
+        >>> Version("1.2.3+abc").local
+        'abc'
+        """
+        if self._version.local:
+            return ".".join(str(x) for x in self._version.local)
+        else:
+            return None
+
+    @property
+    def public(self) -> str:
+        """The public portion of the version.
+
+        >>> Version("1.2.3").public
+        '1.2.3'
+        >>> Version("1.2.3+abc").public
+        '1.2.3'
+        >>> Version("1.2.3+abc.dev1").public
+        '1.2.3'
+        """
+        return str(self).split("+", 1)[0]
+
+    @property
+    def base_version(self) -> str:
+        """The "base version" of the version.
+
+        >>> Version("1.2.3").base_version
+        '1.2.3'
+        >>> Version("1.2.3+abc").base_version
+        '1.2.3'
+        >>> Version("1!1.2.3+abc.dev1").base_version
+        '1!1.2.3'
+
+        The "base version" is the public version of the project without any pre or post
+        release markers.
+        """
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append(f"{self.epoch}!")
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        return "".join(parts)
+
+    @property
+    def is_prerelease(self) -> bool:
+        """Whether this version is a pre-release.
+
+        >>> Version("1.2.3").is_prerelease
+        False
+        >>> Version("1.2.3a1").is_prerelease
+        True
+        >>> Version("1.2.3b1").is_prerelease
+        True
+        >>> Version("1.2.3rc1").is_prerelease
+        True
+        >>> Version("1.2.3dev1").is_prerelease
+        True
+        """
+        return self.dev is not None or self.pre is not None
+
+    @property
+    def is_postrelease(self) -> bool:
+        """Whether this version is a post-release.
+
+        >>> Version("1.2.3").is_postrelease
+        False
+        >>> Version("1.2.3.post1").is_postrelease
+        True
+        """
+        return self.post is not None
+
+    @property
+    def is_devrelease(self) -> bool:
+        """Whether this version is a development release.
+
+        >>> Version("1.2.3").is_devrelease
+        False
+        >>> Version("1.2.3.dev1").is_devrelease
+        True
+        """
+        return self.dev is not None
+
+    @property
+    def major(self) -> int:
+        """The first item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").major
+        1
+        """
+        return self.release[0] if len(self.release) >= 1 else 0
+
+    @property
+    def minor(self) -> int:
+        """The second item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").minor
+        2
+        >>> Version("1").minor
+        0
+        """
+        return self.release[1] if len(self.release) >= 2 else 0
+
+    @property
+    def micro(self) -> int:
+        """The third item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").micro
+        3
+        >>> Version("1").micro
+        0
+        """
+        return self.release[2] if len(self.release) >= 3 else 0
+
+
+def _parse_letter_version(
+    letter: Optional[str], number: Union[str, bytes, SupportsInt, None]
+) -> Optional[Tuple[str, int]]:
+
+    if letter:
+        # We consider there to be an implicit 0 in a pre-release if there is
+        # not a numeral associated with it.
+        if number is None:
+            number = 0
+
+        # We normalize any letters to their lower case form
+        letter = letter.lower()
+
+        # We consider some words to be alternate spellings of other words and
+        # in those cases we want to normalize the spellings to our preferred
+        # spelling.
+        if letter == "alpha":
+            letter = "a"
+        elif letter == "beta":
+            letter = "b"
+        elif letter in ["c", "pre", "preview"]:
+            letter = "rc"
+        elif letter in ["rev", "r"]:
+            letter = "post"
+
+        return letter, int(number)
+    if not letter and number:
+        # We assume if we are given a number, but we are not given a letter
+        # then this is using the implicit post release syntax (e.g. 1.0-1)
+        letter = "post"
+
+        return letter, int(number)
+
+    return None
+
+
+_local_version_separators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local: Optional[str]) -> Optional[LocalType]:
+    """
+    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+    """
+    if local is not None:
+        return tuple(
+            part.lower() if not part.isdigit() else int(part)
+            for part in _local_version_separators.split(local)
+        )
+    return None
+
+
+def _cmpkey(
+    epoch: int,
+    release: Tuple[int, ...],
+    pre: Optional[Tuple[str, int]],
+    post: Optional[Tuple[str, int]],
+    dev: Optional[Tuple[str, int]],
+    local: Optional[LocalType],
+) -> CmpKey:
+
+    # When we compare a release version, we want to compare it with all of the
+    # trailing zeros removed. So we'll use a reverse the list, drop all the now
+    # leading zeros until we come to something non zero, then take the rest
+    # re-reverse it back into the correct order and make it a tuple and use
+    # that for our sorting key.
+    _release = tuple(
+        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
+    )
+
+    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+    # We'll do this by abusing the pre segment, but we _only_ want to do this
+    # if there is not a pre or a post segment. If we have one of those then
+    # the normal sorting rules will handle this case correctly.
+    if pre is None and post is None and dev is not None:
+        _pre: CmpPrePostDevType = NegativeInfinity
+    # Versions without a pre-release (except as noted above) should sort after
+    # those with one.
+    elif pre is None:
+        _pre = Infinity
+    else:
+        _pre = pre
+
+    # Versions without a post segment should sort before those with one.
+    if post is None:
+        _post: CmpPrePostDevType = NegativeInfinity
+
+    else:
+        _post = post
+
+    # Versions without a development segment should sort after those with one.
+    if dev is None:
+        _dev: CmpPrePostDevType = Infinity
+
+    else:
+        _dev = dev
+
+    if local is None:
+        # Versions without a local segment should sort before those with one.
+        _local: CmpLocalType = NegativeInfinity
+    else:
+        # Versions with a local segment need that segment parsed to implement
+        # the sorting rules in PEP440.
+        # - Alpha numeric segments sort before numeric segments
+        # - Alpha numeric segments sort lexicographically
+        # - Numeric segments sort numerically
+        # - Shorter versions sort before longer versions when the prefixes
+        #   match exactly
+        _local = tuple(
+            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
+        )
+
+    return epoch, _release, _pre, _post, _dev, _local
diff --git a/setuptools/_distutils/_vendor/ruff.toml b/setuptools/_distutils/_vendor/ruff.toml
new file mode 100644
index 0000000000..00fee625a5
--- /dev/null
+++ b/setuptools/_distutils/_vendor/ruff.toml
@@ -0,0 +1 @@
+exclude = ["*"]
diff --git a/setuptools/_distutils/archive_util.py b/setuptools/_distutils/archive_util.py
index 052f6e4646..07cd97f4d0 100644
--- a/setuptools/_distutils/archive_util.py
+++ b/setuptools/_distutils/archive_util.py
@@ -56,7 +56,13 @@ def _get_uid(name):
 
 
 def make_tarball(
-    base_name, base_dir, compress="gzip", verbose=0, dry_run=0, owner=None, group=None
+    base_name,
+    base_dir,
+    compress="gzip",
+    verbose=False,
+    dry_run=False,
+    owner=None,
+    group=None,
 ):
     """Create a (possibly compressed) tar file from all the files under
     'base_dir'.
@@ -113,7 +119,7 @@ def _set_uid_gid(tarinfo):
         return tarinfo
 
     if not dry_run:
-        tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
+        tar = tarfile.open(archive_name, f'w|{tar_compression[compress]}')
         try:
             tar.add(base_dir, filter=_set_uid_gid)
         finally:
@@ -134,7 +140,7 @@ def _set_uid_gid(tarinfo):
     return archive_name
 
 
-def make_zipfile(base_name, base_dir, verbose=0, dry_run=0):  # noqa: C901
+def make_zipfile(base_name, base_dir, verbose=False, dry_run=False):  # noqa: C901
     """Create a zip file from all the files under 'base_dir'.
 
     The output zip file will be named 'base_name' + ".zip".  Uses either the
@@ -160,12 +166,9 @@ def make_zipfile(base_name, base_dir, verbose=0, dry_run=0):  # noqa: C901
             # XXX really should distinguish between "couldn't find
             # external 'zip' command" and "zip failed".
             raise DistutilsExecError(
-                (
-                    "unable to create zip file '%s': "
-                    "could neither import the 'zipfile' module nor "
-                    "find a standalone zip utility"
-                )
-                % zip_filename
+                f"unable to create zip file '{zip_filename}': "
+                "could neither import the 'zipfile' module nor "
+                "find a standalone zip utility"
             )
 
     else:
@@ -224,8 +227,8 @@ def make_archive(
     format,
     root_dir=None,
     base_dir=None,
-    verbose=0,
-    dry_run=0,
+    verbose=False,
+    dry_run=False,
     owner=None,
     group=None,
 ):
@@ -260,7 +263,7 @@ def make_archive(
     try:
         format_info = ARCHIVE_FORMATS[format]
     except KeyError:
-        raise ValueError("unknown archive format '%s'" % format)
+        raise ValueError(f"unknown archive format '{format}'")
 
     func = format_info[0]
     for arg, val in format_info[1]:
diff --git a/setuptools/_distutils/bcppcompiler.py b/setuptools/_distutils/bcppcompiler.py
index c1341e43cb..e47dca5d09 100644
--- a/setuptools/_distutils/bcppcompiler.py
+++ b/setuptools/_distutils/bcppcompiler.py
@@ -61,7 +61,7 @@ class BCPPCompiler(CCompiler):
     static_lib_format = shared_lib_format = '%s%s'
     exe_extension = '.exe'
 
-    def __init__(self, verbose=0, dry_run=0, force=0):
+    def __init__(self, verbose=False, dry_run=False, force=False):
         super().__init__(verbose, dry_run, force)
 
         # These executables are assumed to all be in the path.
@@ -84,13 +84,13 @@ def __init__(self, verbose=0, dry_run=0, force=0):
 
     # -- Worker methods ------------------------------------------------
 
-    def compile(  # noqa: C901
+    def compile(
         self,
         sources,
         output_dir=None,
         macros=None,
         include_dirs=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         depends=None,
@@ -161,7 +161,7 @@ def compile(  # noqa: C901
     # compile ()
 
     def create_static_lib(
-        self, objects, output_libname, output_dir=None, debug=0, target_lang=None
+        self, objects, output_libname, output_dir=None, debug=False, target_lang=None
     ):
         (objects, output_dir) = self._fix_object_args(objects, output_dir)
         output_filename = self.library_filename(output_libname, output_dir=output_dir)
@@ -189,7 +189,7 @@ def link(  # noqa: C901
         library_dirs=None,
         runtime_library_dirs=None,
         export_symbols=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         build_temp=None,
@@ -234,11 +234,11 @@ def link(  # noqa: C901
                 head, tail = os.path.split(output_filename)
                 modname, ext = os.path.splitext(tail)
                 temp_dir = os.path.dirname(objects[0])  # preserve tree structure
-                def_file = os.path.join(temp_dir, '%s.def' % modname)
+                def_file = os.path.join(temp_dir, f'{modname}.def')
                 contents = ['EXPORTS']
                 for sym in export_symbols or []:
                     contents.append(f'  {sym}=_{sym}')
-                self.execute(write_file, (def_file, contents), "writing %s" % def_file)
+                self.execute(write_file, (def_file, contents), f"writing {def_file}")
 
             # Borland C++ has problems with '/' in paths
             objects2 = map(os.path.normpath, objects)
@@ -254,7 +254,7 @@ def link(  # noqa: C901
                     objects.append(file)
 
             for ell in library_dirs:
-                ld_args.append("/L%s" % os.path.normpath(ell))
+                ld_args.append(f"/L{os.path.normpath(ell)}")
             ld_args.append("/L.")  # we sometimes use relative paths
 
             # list of object files
@@ -313,7 +313,7 @@ def link(  # noqa: C901
 
     # -- Miscellaneous methods -----------------------------------------
 
-    def find_library_file(self, dirs, lib, debug=0):
+    def find_library_file(self, dirs, lib, debug=False):
         # List of effective library names to try, in order of preference:
         # xxx_bcpp.lib is better than xxx.lib
         # and xxx_d.lib is better than xxx.lib if debug is set
@@ -339,7 +339,7 @@ def find_library_file(self, dirs, lib, debug=0):
             return None
 
     # overwrite the one from CCompiler to support rc and res-files
-    def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
+    def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
         if output_dir is None:
             output_dir = ''
         obj_names = []
diff --git a/setuptools/_distutils/ccompiler.py b/setuptools/_distutils/ccompiler.py
index 8876d73098..9d5297b944 100644
--- a/setuptools/_distutils/ccompiler.py
+++ b/setuptools/_distutils/ccompiler.py
@@ -6,6 +6,7 @@
 import os
 import re
 import sys
+import types
 import warnings
 
 from ._itertools import always_iterable
@@ -21,7 +22,7 @@
 )
 from .file_util import move_file
 from .spawn import spawn
-from .util import execute, split_quoted
+from .util import execute, split_quoted, is_mingw
 
 
 class CCompiler:
@@ -104,7 +105,7 @@ class CCompiler:
     library dirs specific to this compiler class
     """
 
-    def __init__(self, verbose=0, dry_run=0, force=0):
+    def __init__(self, verbose=False, dry_run=False, force=False):
         self.dry_run = dry_run
         self.force = force
         self.verbose = verbose
@@ -188,24 +189,28 @@ def _find_macro(self, name):
         return None
 
     def _check_macro_definitions(self, definitions):
-        """Ensures that every element of 'definitions' is a valid macro
-        definition, ie. either (name,value) 2-tuple or a (name,) tuple.  Do
-        nothing if all definitions are OK, raise TypeError otherwise.
-        """
+        """Ensure that every element of 'definitions' is valid."""
         for defn in definitions:
-            if not (
-                isinstance(defn, tuple)
-                and (
-                    len(defn) in (1, 2)
-                    and (isinstance(defn[1], str) or defn[1] is None)
-                )
-                and isinstance(defn[0], str)
-            ):
-                raise TypeError(
-                    ("invalid macro definition '%s': " % defn)
-                    + "must be tuple (string,), (string, string), or "
-                    + "(string, None)"
-                )
+            self._check_macro_definition(*defn)
+
+    def _check_macro_definition(self, defn):
+        """
+        Raise a TypeError if defn is not valid.
+
+        A valid definition is either a (name, value) 2-tuple or a (name,) tuple.
+        """
+        if not isinstance(defn, tuple) or not self._is_valid_macro(*defn):
+            raise TypeError(
+                f"invalid macro definition '{defn}': "
+                "must be tuple (string,), (string, string), or (string, None)"
+            )
+
+    @staticmethod
+    def _is_valid_macro(name, value=None):
+        """
+        A valid macro is a ``name : str`` and a ``value : str | None``.
+        """
+        return isinstance(name, str) and isinstance(value, (str, types.NoneType))
 
     # -- Bookkeeping methods -------------------------------------------
 
@@ -342,7 +347,7 @@ def _setup_compile(self, outdir, macros, incdirs, sources, depends, extra):
             extra = []
 
         # Get the list of expected output (object) files
-        objects = self.object_filenames(sources, strip_dir=0, output_dir=outdir)
+        objects = self.object_filenames(sources, strip_dir=False, output_dir=outdir)
         assert len(objects) == len(sources)
 
         pp_opts = gen_preprocess_options(macros, incdirs)
@@ -532,7 +537,7 @@ def compile(
         output_dir=None,
         macros=None,
         include_dirs=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         depends=None,
@@ -609,7 +614,7 @@ def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
         pass
 
     def create_static_lib(
-        self, objects, output_libname, output_dir=None, debug=0, target_lang=None
+        self, objects, output_libname, output_dir=None, debug=False, target_lang=None
     ):
         """Link a bunch of stuff together to create a static library file.
         The "bunch of stuff" consists of the list of object files supplied
@@ -650,7 +655,7 @@ def link(
         library_dirs=None,
         runtime_library_dirs=None,
         export_symbols=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         build_temp=None,
@@ -712,7 +717,7 @@ def link_shared_lib(
         library_dirs=None,
         runtime_library_dirs=None,
         export_symbols=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         build_temp=None,
@@ -743,7 +748,7 @@ def link_shared_object(
         library_dirs=None,
         runtime_library_dirs=None,
         export_symbols=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         build_temp=None,
@@ -773,7 +778,7 @@ def link_executable(
         libraries=None,
         library_dirs=None,
         runtime_library_dirs=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         target_lang=None,
@@ -859,7 +864,7 @@ def has_function(  # noqa: C901
         fd, fname = tempfile.mkstemp(".c", funcname, text=True)
         with os.fdopen(fd, "w", encoding='utf-8') as f:
             for incl in includes:
-                f.write("""#include "%s"\n""" % incl)
+                f.write(f"""#include "{incl}"\n""")
             if not includes:
                 # Use "char func(void);" as the prototype to follow
                 # what autoconf does.  This prototype does not match
@@ -869,22 +874,20 @@ def has_function(  # noqa: C901
                 # know the exact argument types, and the has_function
                 # interface does not provide that level of information.
                 f.write(
-                    """\
+                    f"""\
 #ifdef __cplusplus
 extern "C"
 #endif
-char %s(void);
+char {funcname}(void);
 """
-                    % funcname
                 )
             f.write(
-                """\
-int main (int argc, char **argv) {
-    %s();
+                f"""\
+int main (int argc, char **argv) {{
+    {funcname}();
     return 0;
-}
+}}
 """
-                % funcname
             )
 
         try:
@@ -909,7 +912,7 @@ def has_function(  # noqa: C901
                 os.remove(fn)
         return True
 
-    def find_library_file(self, dirs, lib, debug=0):
+    def find_library_file(self, dirs, lib, debug=False):
         """Search the specified list of directories for a static or shared
         library file 'lib' and return the full path to that file.  If
         'debug' true, look for a debugging version (if that makes sense on
@@ -952,7 +955,7 @@ def find_library_file(self, dirs, lib, debug=0):
     #   * exe_extension -
     #     extension for executable files, eg. '' or '.exe'
 
-    def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
+    def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
         if output_dir is None:
             output_dir = ''
         return list(
@@ -987,13 +990,13 @@ def _make_relative(base):
         # If abs, chop off leading /
         return no_drive[os.path.isabs(no_drive) :]
 
-    def shared_object_filename(self, basename, strip_dir=0, output_dir=''):
+    def shared_object_filename(self, basename, strip_dir=False, output_dir=''):
         assert output_dir is not None
         if strip_dir:
             basename = os.path.basename(basename)
         return os.path.join(output_dir, basename + self.shared_lib_extension)
 
-    def executable_filename(self, basename, strip_dir=0, output_dir=''):
+    def executable_filename(self, basename, strip_dir=False, output_dir=''):
         assert output_dir is not None
         if strip_dir:
             basename = os.path.basename(basename)
@@ -1003,7 +1006,7 @@ def library_filename(
         self,
         libname,
         lib_type='static',
-        strip_dir=0,
+        strip_dir=False,
         output_dir='',  # or 'shared'
     ):
         assert output_dir is not None
@@ -1032,7 +1035,7 @@ def debug_print(self, msg):
             print(msg)
 
     def warn(self, msg):
-        sys.stderr.write("warning: %s\n" % msg)
+        sys.stderr.write(f"warning: {msg}\n")
 
     def execute(self, func, args, msg=None, level=1):
         execute(func, args, msg, self.dry_run)
@@ -1077,6 +1080,10 @@ def get_default_compiler(osname=None, platform=None):
         osname = os.name
     if platform is None:
         platform = sys.platform
+    # Mingw is a special case where sys.platform is 'win32' but we
+    # want to use the 'mingw32' compiler, so check it first
+    if is_mingw():
+        return 'mingw32'
     for pattern, compiler in _default_compilers:
         if (
             re.match(pattern, platform) is not None
@@ -1125,7 +1132,7 @@ def show_compilers():
     pretty_printer.print_help("List of available compilers:")
 
 
-def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0):
+def new_compiler(plat=None, compiler=None, verbose=False, dry_run=False, force=False):
     """Generate an instance of some CCompiler subclass for the supplied
     platform/compiler combination.  'plat' defaults to 'os.name'
     (eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler
@@ -1145,9 +1152,9 @@ def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0):
 
         (module_name, class_name, long_description) = compiler_class[compiler]
     except KeyError:
-        msg = "don't know how to compile C/C++ code on platform '%s'" % plat
+        msg = f"don't know how to compile C/C++ code on platform '{plat}'"
         if compiler is not None:
-            msg = msg + " with '%s' compiler" % compiler
+            msg = msg + f" with '{compiler}' compiler"
         raise DistutilsPlatformError(msg)
 
     try:
@@ -1157,7 +1164,7 @@ def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0):
         klass = vars(module)[class_name]
     except ImportError:
         raise DistutilsModuleError(
-            "can't compile C/C++ code: unable to load module '%s'" % module_name
+            f"can't compile C/C++ code: unable to load module '{module_name}'"
         )
     except KeyError:
         raise DistutilsModuleError(
@@ -1196,15 +1203,15 @@ def gen_preprocess_options(macros, include_dirs):
     for macro in macros:
         if not (isinstance(macro, tuple) and 1 <= len(macro) <= 2):
             raise TypeError(
-                "bad macro definition '%s': "
-                "each element of 'macros' list must be a 1- or 2-tuple" % macro
+                f"bad macro definition '{macro}': "
+                "each element of 'macros' list must be a 1- or 2-tuple"
             )
 
         if len(macro) == 1:  # undefine this macro
-            pp_opts.append("-U%s" % macro[0])
+            pp_opts.append(f"-U{macro[0]}")
         elif len(macro) == 2:
             if macro[1] is None:  # define with no explicit value
-                pp_opts.append("-D%s" % macro[0])
+                pp_opts.append(f"-D{macro[0]}")
             else:
                 # XXX *don't* need to be clever about quoting the
                 # macro value here, because we're going to avoid the
@@ -1212,7 +1219,7 @@ def gen_preprocess_options(macros, include_dirs):
                 pp_opts.append("-D{}={}".format(*macro))
 
     for dir in include_dirs:
-        pp_opts.append("-I%s" % dir)
+        pp_opts.append(f"-I{dir}")
     return pp_opts
 
 
@@ -1245,7 +1252,7 @@ def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
                 lib_opts.append(lib_file)
             else:
                 compiler.warn(
-                    "no library file corresponding to '%s' found (skipping)" % lib
+                    f"no library file corresponding to '{lib}' found (skipping)"
                 )
         else:
             lib_opts.append(compiler.library_option(lib))
diff --git a/setuptools/_distutils/cmd.py b/setuptools/_distutils/cmd.py
index 02dbf165f5..2bb97956ab 100644
--- a/setuptools/_distutils/cmd.py
+++ b/setuptools/_distutils/cmd.py
@@ -87,13 +87,13 @@ def __init__(self, dist):
 
         # The 'help' flag is just used for command-line parsing, so
         # none of that complicated bureaucracy is needed.
-        self.help = 0
+        self.help = False
 
         # 'finalized' records whether or not 'finalize_options()' has been
         # called.  'finalize_options()' itself should not pay attention to
         # this flag: it is the business of 'ensure_finalized()', which
         # always calls 'finalize_options()', to respect/update it.
-        self.finalized = 0
+        self.finalized = False
 
     # XXX A more explicit way to customize dry_run would be better.
     def __getattr__(self, attr):
@@ -109,7 +109,7 @@ def __getattr__(self, attr):
     def ensure_finalized(self):
         if not self.finalized:
             self.finalize_options()
-        self.finalized = 1
+        self.finalized = True
 
     # Subclasses must define:
     #   initialize_options()
@@ -135,7 +135,7 @@ def initialize_options(self):
         This method must be implemented by all command classes.
         """
         raise RuntimeError(
-            "abstract method -- subclass %s must override" % self.__class__
+            f"abstract method -- subclass {self.__class__} must override"
         )
 
     def finalize_options(self):
@@ -150,14 +150,14 @@ def finalize_options(self):
         This method must be implemented by all command classes.
         """
         raise RuntimeError(
-            "abstract method -- subclass %s must override" % self.__class__
+            f"abstract method -- subclass {self.__class__} must override"
         )
 
     def dump_options(self, header=None, indent=""):
         from distutils.fancy_getopt import longopt_xlate
 
         if header is None:
-            header = "command options for '%s':" % self.get_command_name()
+            header = f"command options for '{self.get_command_name()}':"
         self.announce(indent + header, level=logging.INFO)
         indent = indent + "  "
         for option, _, _ in self.user_options:
@@ -178,7 +178,7 @@ def run(self):
         This method must be implemented by all command classes.
         """
         raise RuntimeError(
-            "abstract method -- subclass %s must override" % self.__class__
+            f"abstract method -- subclass {self.__class__} must override"
         )
 
     def announce(self, msg, level=logging.DEBUG):
@@ -293,7 +293,7 @@ def set_undefined_options(self, src_cmd, *option_pairs):
             if getattr(self, dst_option) is None:
                 setattr(self, dst_option, getattr(src_cmd_obj, src_option))
 
-    def get_finalized_command(self, command, create=1):
+    def get_finalized_command(self, command, create=True):
         """Wrapper around Distribution's 'get_command_obj()' method: find
         (create if necessary and 'create' is true) the command object for
         'command', call its 'ensure_finalized()' method, and return the
@@ -305,7 +305,7 @@ def get_finalized_command(self, command, create=1):
 
     # XXX rename to 'get_reinitialized_command()'? (should do the
     # same in dist.py, if so)
-    def reinitialize_command(self, command, reinit_subcommands=0):
+    def reinitialize_command(self, command, reinit_subcommands=False):
         return self.distribution.reinitialize_command(command, reinit_subcommands)
 
     def run_command(self, command):
@@ -340,7 +340,13 @@ def mkpath(self, name, mode=0o777):
         dir_util.mkpath(name, mode, dry_run=self.dry_run)
 
     def copy_file(
-        self, infile, outfile, preserve_mode=1, preserve_times=1, link=None, level=1
+        self,
+        infile,
+        outfile,
+        preserve_mode=True,
+        preserve_times=True,
+        link=None,
+        level=1,
     ):
         """Copy a file respecting verbose, dry-run and force flags.  (The
         former two default to whatever is in the Distribution object, and
@@ -359,9 +365,9 @@ def copy_tree(
         self,
         infile,
         outfile,
-        preserve_mode=1,
-        preserve_times=1,
-        preserve_symlinks=0,
+        preserve_mode=True,
+        preserve_times=True,
+        preserve_symlinks=False,
         level=1,
     ):
         """Copy an entire directory tree respecting verbose, dry-run,
@@ -381,7 +387,7 @@ def move_file(self, src, dst, level=1):
         """Move a file respecting dry-run flag."""
         return file_util.move_file(src, dst, dry_run=self.dry_run)
 
-    def spawn(self, cmd, search_path=1, level=1):
+    def spawn(self, cmd, search_path=True, level=1):
         """Spawn an external command respecting dry-run flag."""
         from distutils.spawn import spawn
 
@@ -412,7 +418,7 @@ def make_file(
         timestamp checks.
         """
         if skip_msg is None:
-            skip_msg = "skipping %s (inputs unchanged)" % outfile
+            skip_msg = f"skipping {outfile} (inputs unchanged)"
 
         # Allow 'infiles' to be a single string
         if isinstance(infiles, str):
diff --git a/setuptools/_distutils/command/__init__.py b/setuptools/_distutils/command/__init__.py
index 028dcfa0fc..1e8fbe60c2 100644
--- a/setuptools/_distutils/command/__init__.py
+++ b/setuptools/_distutils/command/__init__.py
@@ -3,7 +3,7 @@
 Package containing implementation of all the standard Distutils
 commands."""
 
-__all__ = [  # noqa: F822
+__all__ = [
     'build',
     'build_py',
     'build_ext',
diff --git a/setuptools/_distutils/command/bdist.py b/setuptools/_distutils/command/bdist.py
index ade98445ba..1738f4e56b 100644
--- a/setuptools/_distutils/command/bdist.py
+++ b/setuptools/_distutils/command/bdist.py
@@ -41,7 +41,7 @@ class bdist(Command):
             'plat-name=',
             'p',
             "platform name to embed in generated filenames "
-            "(default: %s)" % get_platform(),
+            f"[default: {get_platform()}]",
         ),
         ('formats=', None, "formats for distribution (comma-separated list)"),
         (
@@ -94,7 +94,7 @@ def initialize_options(self):
         self.plat_name = None
         self.formats = None
         self.dist_dir = None
-        self.skip_build = 0
+        self.skip_build = False
         self.group = None
         self.owner = None
 
@@ -120,7 +120,7 @@ def finalize_options(self):
             except KeyError:
                 raise DistutilsPlatformError(
                     "don't know how to create built distributions "
-                    "on platform %s" % os.name
+                    f"on platform {os.name}"
                 )
 
         if self.dist_dir is None:
@@ -133,7 +133,7 @@ def run(self):
             try:
                 commands.append(self.format_commands[format][0])
             except KeyError:
-                raise DistutilsOptionError("invalid format '%s'" % format)
+                raise DistutilsOptionError(f"invalid format '{format}'")
 
         # Reinitialize and run each command.
         for i in range(len(self.formats)):
@@ -150,5 +150,5 @@ def run(self):
             # If we're going to need to run this command again, tell it to
             # keep its temporary files around so subsequent runs go faster.
             if cmd_name in commands[i + 1 :]:
-                sub_cmd.keep_temp = 1
+                sub_cmd.keep_temp = True
             self.run_command(cmd_name)
diff --git a/setuptools/_distutils/command/bdist_dumb.py b/setuptools/_distutils/command/bdist_dumb.py
index 06502d201e..67b0c8cce9 100644
--- a/setuptools/_distutils/command/bdist_dumb.py
+++ b/setuptools/_distutils/command/bdist_dumb.py
@@ -23,7 +23,7 @@ class bdist_dumb(Command):
             'plat-name=',
             'p',
             "platform name to embed in generated filenames "
-            "(default: %s)" % get_platform(),
+            f"[default: {get_platform()}]",
         ),
         (
             'format=',
@@ -33,15 +33,14 @@ class bdist_dumb(Command):
         (
             'keep-temp',
             'k',
-            "keep the pseudo-installation tree around after "
-            + "creating the distribution archive",
+            "keep the pseudo-installation tree around after creating the distribution archive",
         ),
         ('dist-dir=', 'd', "directory to put final built distributions in"),
         ('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
         (
             'relative',
             None,
-            "build the archive using relative paths (default: false)",
+            "build the archive using relative paths [default: false]",
         ),
         (
             'owner=',
@@ -63,10 +62,10 @@ def initialize_options(self):
         self.bdist_dir = None
         self.plat_name = None
         self.format = None
-        self.keep_temp = 0
+        self.keep_temp = False
         self.dist_dir = None
         self.skip_build = None
-        self.relative = 0
+        self.relative = False
         self.owner = None
         self.group = None
 
@@ -81,7 +80,7 @@ def finalize_options(self):
             except KeyError:
                 raise DistutilsPlatformError(
                     "don't know how to create dumb built distributions "
-                    "on platform %s" % os.name
+                    f"on platform {os.name}"
                 )
 
         self.set_undefined_options(
@@ -95,10 +94,10 @@ def run(self):
         if not self.skip_build:
             self.run_command('build')
 
-        install = self.reinitialize_command('install', reinit_subcommands=1)
+        install = self.reinitialize_command('install', reinit_subcommands=True)
         install.root = self.bdist_dir
         install.skip_build = self.skip_build
-        install.warn_dir = 0
+        install.warn_dir = False
 
         log.info("installing to %s", self.bdist_dir)
         self.run_command('install')
@@ -116,7 +115,7 @@ def run(self):
             ):
                 raise DistutilsPlatformError(
                     "can't make a dumb built distribution where "
-                    f"base and platbase are different ({repr(install.install_base)}, {repr(install.install_platbase)})"
+                    f"base and platbase are different ({install.install_base!r}, {install.install_platbase!r})"
                 )
             else:
                 archive_root = os.path.join(
diff --git a/setuptools/_distutils/command/bdist_rpm.py b/setuptools/_distutils/command/bdist_rpm.py
index 649968a5eb..d443eb09b5 100644
--- a/setuptools/_distutils/command/bdist_rpm.py
+++ b/setuptools/_distutils/command/bdist_rpm.py
@@ -40,7 +40,7 @@ class bdist_rpm(Command):
             'python=',
             None,
             "path to Python interpreter to hard-code in the .spec file "
-            "(default: \"python\")",
+            "[default: \"python\"]",
         ),
         (
             'fix-python',
@@ -187,13 +187,13 @@ def initialize_options(self):
         self.build_requires = None
         self.obsoletes = None
 
-        self.keep_temp = 0
-        self.use_rpm_opt_flags = 1
-        self.rpm3_mode = 1
-        self.no_autoreq = 0
+        self.keep_temp = False
+        self.use_rpm_opt_flags = True
+        self.rpm3_mode = True
+        self.no_autoreq = False
 
         self.force_arch = None
-        self.quiet = 0
+        self.quiet = False
 
     def finalize_options(self):
         self.set_undefined_options('bdist', ('bdist_base', 'bdist_base'))
@@ -214,7 +214,7 @@ def finalize_options(self):
 
         if os.name != 'posix':
             raise DistutilsPlatformError(
-                "don't know how to create RPM distributions on platform %s" % os.name
+                f"don't know how to create RPM distributions on platform {os.name}"
             )
         if self.binary_only and self.source_only:
             raise DistutilsOptionError(
@@ -223,7 +223,7 @@ def finalize_options(self):
 
         # don't pass CFLAGS to pure python distributions
         if not self.distribution.has_ext_modules():
-            self.use_rpm_opt_flags = 0
+            self.use_rpm_opt_flags = False
 
         self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
         self.finalize_package_data()
@@ -295,9 +295,9 @@ def run(self):  # noqa: C901
 
         # Spec file goes into 'dist_dir' if '--spec-only specified',
         # build/rpm. otherwise.
-        spec_path = os.path.join(spec_dir, "%s.spec" % self.distribution.get_name())
+        spec_path = os.path.join(spec_dir, f"{self.distribution.get_name()}.spec")
         self.execute(
-            write_file, (spec_path, self._make_spec_file()), "writing '%s'" % spec_path
+            write_file, (spec_path, self._make_spec_file()), f"writing '{spec_path}'"
         )
 
         if self.spec_only:  # stop if requested
@@ -322,7 +322,7 @@ def run(self):  # noqa: C901
             if os.path.exists(self.icon):
                 self.copy_file(self.icon, source_dir)
             else:
-                raise DistutilsFileError("icon file '%s' does not exist" % self.icon)
+                raise DistutilsFileError(f"icon file '{self.icon}' does not exist")
 
         # build package
         log.info("building RPMs")
@@ -334,9 +334,9 @@ def run(self):  # noqa: C901
             rpm_cmd.append('-bb')
         else:
             rpm_cmd.append('-ba')
-        rpm_cmd.extend(['--define', '__python %s' % self.python])
+        rpm_cmd.extend(['--define', f'__python {self.python}'])
         if self.rpm3_mode:
-            rpm_cmd.extend(['--define', '_topdir %s' % os.path.abspath(self.rpm_base)])
+            rpm_cmd.extend(['--define', f'_topdir {os.path.abspath(self.rpm_base)}'])
         if not self.keep_temp:
             rpm_cmd.append('--clean')
 
@@ -370,7 +370,7 @@ def run(self):  # noqa: C901
 
             status = out.close()
             if status:
-                raise DistutilsExecError("Failed to execute: %s" % repr(q_cmd))
+                raise DistutilsExecError(f"Failed to execute: {q_cmd!r}")
 
         finally:
             out.close()
@@ -426,7 +426,7 @@ def _make_spec_file(self):  # noqa: C901
         # normalizing the whitespace to simplify the test for whether the
         # invocation of brp-python-bytecompile passes in __python):
         vendor_hook = '\n'.join([
-            '  %s \\' % line.strip() for line in vendor_hook.splitlines()
+            f'  {line.strip()} \\' for line in vendor_hook.splitlines()
         ])
         problem = "brp-python-bytecompile \\\n"
         fixed = "brp-python-bytecompile %{__python} \\\n"
@@ -468,7 +468,7 @@ def _make_spec_file(self):  # noqa: C901
             if not self.distribution.has_ext_modules():
                 spec_file.append('BuildArch: noarch')
         else:
-            spec_file.append('BuildArch: %s' % self.force_arch)
+            spec_file.append(f'BuildArch: {self.force_arch}')
 
         for field in (
             'Vendor',
@@ -518,7 +518,7 @@ def _make_spec_file(self):  # noqa: C901
         # rpm scripts
         # figure out default build script
         def_setup_call = f"{self.python} {os.path.basename(sys.argv[0])}"
-        def_build = "%s build" % def_setup_call
+        def_build = f"{def_setup_call} build"
         if self.use_rpm_opt_flags:
             def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build
 
@@ -528,9 +528,7 @@ def _make_spec_file(self):  # noqa: C901
         # that we open and interpolate into the spec file, but the defaults
         # are just text that we drop in as-is.  Hmmm.
 
-        install_cmd = (
-            '%s install -O1 --root=$RPM_BUILD_ROOT ' '--record=INSTALLED_FILES'
-        ) % def_setup_call
+        install_cmd = f'{def_setup_call} install -O1 --root=$RPM_BUILD_ROOT --record=INSTALLED_FILES'
 
         script_options = [
             ('prep', 'prep_script', "%setup -n %{name}-%{unmangled_version}"),
diff --git a/setuptools/_distutils/command/build.py b/setuptools/_distutils/command/build.py
index d18ed503e3..caf55073af 100644
--- a/setuptools/_distutils/command/build.py
+++ b/setuptools/_distutils/command/build.py
@@ -4,6 +4,7 @@
 
 import os
 import sys
+import sysconfig
 
 from ..core import Command
 from ..errors import DistutilsOptionError
@@ -26,16 +27,14 @@ class build(Command):
         (
             'build-lib=',
             None,
-            "build directory for all distribution (defaults to either "
-            + "build-purelib or build-platlib",
+            "build directory for all distribution (defaults to either build-purelib or build-platlib",
         ),
         ('build-scripts=', None, "build directory for scripts"),
         ('build-temp=', 't', "temporary build directory"),
         (
             'plat-name=',
             'p',
-            "platform name to build for, if supported "
-            "(default: %s)" % get_platform(),
+            f"platform name to build for, if supported [default: {get_platform()}]",
         ),
         ('compiler=', 'c', "specify the compiler type"),
         ('parallel=', 'j', "number of parallel build jobs"),
@@ -62,7 +61,7 @@ def initialize_options(self):
         self.compiler = None
         self.plat_name = None
         self.debug = None
-        self.force = 0
+        self.force = False
         self.executable = None
         self.parallel = None
 
@@ -81,6 +80,10 @@ def finalize_options(self):  # noqa: C901
 
         plat_specifier = f".{self.plat_name}-{sys.implementation.cache_tag}"
 
+        # Python 3.13+ with --disable-gil shouldn't share build directories
+        if sysconfig.get_config_var('Py_GIL_DISABLED'):
+            plat_specifier += 't'
+
         # Make it so Python 2.x and Python 2.x with --with-pydebug don't
         # share the same build directories. Doing so confuses the build
         # process for C modules
diff --git a/setuptools/_distutils/command/build_clib.py b/setuptools/_distutils/command/build_clib.py
index 360575d0cb..a600d09373 100644
--- a/setuptools/_distutils/command/build_clib.py
+++ b/setuptools/_distutils/command/build_clib.py
@@ -57,7 +57,7 @@ def initialize_options(self):
         self.define = None
         self.undef = None
         self.debug = None
-        self.force = 0
+        self.force = False
         self.compiler = None
 
     def finalize_options(self):
@@ -138,8 +138,8 @@ def check_library_list(self, libraries):
 
             if '/' in name or (os.sep != '/' and os.sep in name):
                 raise DistutilsSetupError(
-                    "bad library name '%s': "
-                    "may not contain directory separators" % lib[0]
+                    f"bad library name '{lib[0]}': "
+                    "may not contain directory separators"
                 )
 
             if not isinstance(build_info, dict):
@@ -166,9 +166,9 @@ def get_source_files(self):
             sources = build_info.get('sources')
             if sources is None or not isinstance(sources, (list, tuple)):
                 raise DistutilsSetupError(
-                    "in 'libraries' option (library '%s'), "
+                    f"in 'libraries' option (library '{lib_name}'), "
                     "'sources' must be present and must be "
-                    "a list of source filenames" % lib_name
+                    "a list of source filenames"
                 )
 
             filenames.extend(sources)
@@ -179,9 +179,9 @@ def build_libraries(self, libraries):
             sources = build_info.get('sources')
             if sources is None or not isinstance(sources, (list, tuple)):
                 raise DistutilsSetupError(
-                    "in 'libraries' option (library '%s'), "
+                    f"in 'libraries' option (library '{lib_name}'), "
                     "'sources' must be present and must be "
-                    "a list of source filenames" % lib_name
+                    "a list of source filenames"
                 )
             sources = list(sources)
 
diff --git a/setuptools/_distutils/command/build_ext.py b/setuptools/_distutils/command/build_ext.py
index 06d949aff1..18e1601a28 100644
--- a/setuptools/_distutils/command/build_ext.py
+++ b/setuptools/_distutils/command/build_ext.py
@@ -23,7 +23,7 @@
 )
 from ..extension import Extension
 from ..sysconfig import customize_compiler, get_config_h_filename, get_python_version
-from ..util import get_platform
+from ..util import get_platform, is_mingw
 
 # An extension name is just a dot-separated list of Python NAMEs (ie.
 # the same as a fully-qualified module name).
@@ -57,7 +57,7 @@ class build_ext(Command):
     #     takes care of both command-line and client options
     #     in between initialize_options() and finalize_options())
 
-    sep_by = " (separated by '%s')" % os.pathsep
+    sep_by = f" (separated by '{os.pathsep}')"
     user_options = [
         ('build-lib=', 'b', "directory for compiled extension modules"),
         ('build-temp=', 't', "directory for temporary files (build by-products)"),
@@ -65,13 +65,13 @@ class build_ext(Command):
             'plat-name=',
             'p',
             "platform name to cross-compile for, if supported "
-            "(default: %s)" % get_platform(),
+            f"[default: {get_platform()}]",
         ),
         (
             'inplace',
             'i',
             "ignore build-lib and put compiled extensions into the source "
-            + "directory alongside your pure Python modules",
+            "directory alongside your pure Python modules",
         ),
         (
             'include-dirs=',
@@ -109,7 +109,7 @@ def initialize_options(self):
         self.build_lib = None
         self.plat_name = None
         self.build_temp = None
-        self.inplace = 0
+        self.inplace = False
         self.package = None
 
         self.include_dirs = None
@@ -175,7 +175,7 @@ def finalize_options(self):  # noqa: C901
         # Make sure Python's include directories (for Python.h, pyconfig.h,
         # etc.) are in the include search path.
         py_include = sysconfig.get_python_inc()
-        plat_py_include = sysconfig.get_python_inc(plat_specific=1)
+        plat_py_include = sysconfig.get_python_inc(plat_specific=True)
         if self.include_dirs is None:
             self.include_dirs = self.distribution.include_dirs or []
         if isinstance(self.include_dirs, str):
@@ -212,7 +212,7 @@ def finalize_options(self):  # noqa: C901
         # for extensions under windows use different directories
         # for Release and Debug builds.
         # also Python's library directory must be appended to library_dirs
-        if os.name == 'nt':
+        if os.name == 'nt' and not is_mingw():
             # the 'libs' directory is for binary installs - we assume that
             # must be the *native* platform.  But we don't really support
             # cross-compiling via a binary install anyway, so we let it go.
@@ -517,9 +517,9 @@ def build_extension(self, ext):
         sources = ext.sources
         if sources is None or not isinstance(sources, (list, tuple)):
             raise DistutilsSetupError(
-                "in 'ext_modules' option (extension '%s'), "
+                f"in 'ext_modules' option (extension '{ext.name}'), "
                 "'sources' must be present and must be "
-                "a list of source filenames" % ext.name
+                "a list of source filenames"
             )
         # sort to make the resulting .so file build reproducible
         sources = sorted(sources)
@@ -663,7 +663,7 @@ def find_swig(self):
             # Windows (or so I presume!).  If we find it there, great;
             # if not, act like Unix and assume it's in the PATH.
             for vers in ("1.3", "1.2", "1.1"):
-                fn = os.path.join("c:\\swig%s" % vers, "swig.exe")
+                fn = os.path.join(f"c:\\swig{vers}", "swig.exe")
                 if os.path.isfile(fn):
                     return fn
             else:
@@ -671,7 +671,7 @@ def find_swig(self):
         else:
             raise DistutilsPlatformError(
                 "I don't know how to find (much less run) SWIG "
-                "on platform '%s'" % os.name
+                f"on platform '{os.name}'"
             )
 
     # -- Name generators -----------------------------------------------
@@ -754,7 +754,7 @@ def get_libraries(self, ext):  # noqa: C901
         # pyconfig.h that MSVC groks.  The other Windows compilers all seem
         # to need it mentioned explicitly, though, so that's what we do.
         # Append '_d' to the python import library on debug builds.
-        if sys.platform == "win32":
+        if sys.platform == "win32" and not is_mingw():
             from .._msvccompiler import MSVCCompiler
 
             if not isinstance(self.compiler, MSVCCompiler):
@@ -784,7 +784,7 @@ def get_libraries(self, ext):  # noqa: C901
                 # A native build on an Android device or on Cygwin
                 if hasattr(sys, 'getandroidapilevel'):
                     link_libpython = True
-                elif sys.platform == 'cygwin':
+                elif sys.platform == 'cygwin' or is_mingw():
                     link_libpython = True
                 elif '_PYTHON_HOST_PLATFORM' in os.environ:
                     # We are cross-compiling for one of the relevant platforms
diff --git a/setuptools/_distutils/command/build_py.py b/setuptools/_distutils/command/build_py.py
index 56e6fa2e66..49d710346e 100644
--- a/setuptools/_distutils/command/build_py.py
+++ b/setuptools/_distutils/command/build_py.py
@@ -38,7 +38,7 @@ def initialize_options(self):
         self.package = None
         self.package_data = None
         self.package_dir = None
-        self.compile = 0
+        self.compile = False
         self.optimize = 0
         self.force = None
 
@@ -95,7 +95,7 @@ def run(self):
             self.build_packages()
             self.build_package_data()
 
-        self.byte_compile(self.get_outputs(include_bytecode=0))
+        self.byte_compile(self.get_outputs(include_bytecode=False))
 
     def get_data_files(self):
         """Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
@@ -191,12 +191,12 @@ def check_package(self, package, package_dir):
         if package_dir != "":
             if not os.path.exists(package_dir):
                 raise DistutilsFileError(
-                    "package directory '%s' does not exist" % package_dir
+                    f"package directory '{package_dir}' does not exist"
                 )
             if not os.path.isdir(package_dir):
                 raise DistutilsFileError(
-                    "supposed package directory '%s' exists, "
-                    "but is not a directory" % package_dir
+                    f"supposed package directory '{package_dir}' exists, "
+                    "but is not a directory"
                 )
 
         # Directories without __init__.py are namespace packages (PEP 420).
@@ -228,7 +228,7 @@ def find_package_modules(self, package, package_dir):
                 module = os.path.splitext(os.path.basename(f))[0]
                 modules.append((package, module, f))
             else:
-                self.debug_print("excluding %s" % setup_script)
+                self.debug_print(f"excluding {setup_script}")
         return modules
 
     def find_modules(self):
@@ -264,7 +264,7 @@ def find_modules(self):
                 (package_dir, checked) = packages[package]
             except KeyError:
                 package_dir = self.get_package_dir(package)
-                checked = 0
+                checked = False
 
             if not checked:
                 init_py = self.check_package(package, package_dir)
@@ -306,7 +306,7 @@ def get_module_outfile(self, build_dir, package, module):
         outfile_path = [build_dir] + list(package) + [module + ".py"]
         return os.path.join(*outfile_path)
 
-    def get_outputs(self, include_bytecode=1):
+    def get_outputs(self, include_bytecode=True):
         modules = self.find_all_modules()
         outputs = []
         for package, module, _module_file in modules:
@@ -347,7 +347,7 @@ def build_module(self, module, module_file, package):
         outfile = self.get_module_outfile(self.build_lib, package, module)
         dir = os.path.dirname(outfile)
         self.mkpath(dir)
-        return self.copy_file(module_file, outfile, preserve_mode=0)
+        return self.copy_file(module_file, outfile, preserve_mode=False)
 
     def build_modules(self):
         modules = self.find_modules()
diff --git a/setuptools/_distutils/command/build_scripts.py b/setuptools/_distutils/command/build_scripts.py
index 5f3902a027..9e5963c243 100644
--- a/setuptools/_distutils/command/build_scripts.py
+++ b/setuptools/_distutils/command/build_scripts.py
@@ -96,7 +96,7 @@ def _copy_script(self, script, outfiles, updated_files):  # noqa: C901
         else:
             first_line = f.readline()
             if not first_line:
-                self.warn("%s is an empty file (skipping)" % script)
+                self.warn(f"{script} is an empty file (skipping)")
                 return
 
             shebang_match = shebang_pattern.match(first_line)
diff --git a/setuptools/_distutils/command/check.py b/setuptools/_distutils/command/check.py
index 28599e109c..58b3f949f9 100644
--- a/setuptools/_distutils/command/check.py
+++ b/setuptools/_distutils/command/check.py
@@ -21,7 +21,7 @@ def __init__(
             report_level,
             halt_level,
             stream=None,
-            debug=0,
+            debug=False,
             encoding='ascii',
             error_handler='replace',
         ):
@@ -58,9 +58,9 @@ class check(Command):
 
     def initialize_options(self):
         """Sets default values for options."""
-        self.restructuredtext = 0
+        self.restructuredtext = False
         self.metadata = 1
-        self.strict = 0
+        self.strict = False
         self._warnings = 0
 
     def finalize_options(self):
@@ -106,7 +106,7 @@ def check_metadata(self):
                 missing.append(attr)
 
         if missing:
-            self.warn("missing required meta-data: %s" % ', '.join(missing))
+            self.warn("missing required meta-data: {}".format(', '.join(missing)))
 
     def check_restructuredtext(self):
         """Checks if the long string fields are reST-compliant."""
@@ -147,7 +147,7 @@ def _check_rst_data(self, data):
         except AttributeError as e:
             reporter.messages.append((
                 -1,
-                'Could not finish the parsing: %s.' % e,
+                f'Could not finish the parsing: {e}.',
                 '',
                 {},
             ))
diff --git a/setuptools/_distutils/command/clean.py b/setuptools/_distutils/command/clean.py
index 4167a83fb3..fb54a60ed4 100644
--- a/setuptools/_distutils/command/clean.py
+++ b/setuptools/_distutils/command/clean.py
@@ -14,17 +14,17 @@
 class clean(Command):
     description = "clean up temporary files from 'build' command"
     user_options = [
-        ('build-base=', 'b', "base build directory (default: 'build.build-base')"),
+        ('build-base=', 'b', "base build directory [default: 'build.build-base']"),
         (
             'build-lib=',
             None,
-            "build directory for all modules (default: 'build.build-lib')",
+            "build directory for all modules [default: 'build.build-lib']",
         ),
-        ('build-temp=', 't', "temporary build directory (default: 'build.build-temp')"),
+        ('build-temp=', 't', "temporary build directory [default: 'build.build-temp']"),
         (
             'build-scripts=',
             None,
-            "build directory for scripts (default: 'build.build-scripts')",
+            "build directory for scripts [default: 'build.build-scripts']",
         ),
         ('bdist-base=', None, "temporary directory for built distributions"),
         ('all', 'a', "remove all build output, not just temporary by-products"),
diff --git a/setuptools/_distutils/command/config.py b/setuptools/_distutils/command/config.py
index d4b2b0a362..fe83c2924d 100644
--- a/setuptools/_distutils/command/config.py
+++ b/setuptools/_distutils/command/config.py
@@ -94,7 +94,7 @@ def _check_compiler(self):
 
         if not isinstance(self.compiler, CCompiler):
             self.compiler = new_compiler(
-                compiler=self.compiler, dry_run=self.dry_run, force=1
+                compiler=self.compiler, dry_run=self.dry_run, force=True
             )
             customize_compiler(self.compiler)
             if self.include_dirs:
@@ -109,7 +109,7 @@ def _gen_temp_sourcefile(self, body, headers, lang):
         with open(filename, "w", encoding='utf-8') as file:
             if headers:
                 for header in headers:
-                    file.write("#include <%s>\n" % header)
+                    file.write(f"#include <{header}>\n")
                 file.write("\n")
             file.write(body)
             if body[-1] != "\n":
@@ -126,7 +126,7 @@ def _preprocess(self, body, headers, include_dirs, lang):
     def _compile(self, body, headers, include_dirs, lang):
         src = self._gen_temp_sourcefile(body, headers, lang)
         if self.dump_source:
-            dump_file(src, "compiling '%s':" % src)
+            dump_file(src, f"compiling '{src}':")
         (obj,) = self.compiler.object_filenames([src])
         self.temp_files.extend([src, obj])
         self.compiler.compile([src], include_dirs=include_dirs)
@@ -292,8 +292,8 @@ def check_func(
         include_dirs=None,
         libraries=None,
         library_dirs=None,
-        decl=0,
-        call=0,
+        decl=False,
+        call=False,
     ):
         """Determine if function 'func' is available by constructing a
         source file that refers to 'func', and compiles and links it.
@@ -311,12 +311,12 @@ def check_func(
         self._check_compiler()
         body = []
         if decl:
-            body.append("int %s ();" % func)
+            body.append(f"int {func} ();")
         body.append("int main () {")
         if call:
-            body.append("  %s();" % func)
+            body.append(f"  {func}();")
         else:
-            body.append("  %s;" % func)
+            body.append(f"  {func};")
         body.append("}")
         body = "\n".join(body) + "\n"
 
diff --git a/setuptools/_distutils/command/install.py b/setuptools/_distutils/command/install.py
index 8e920be4de..1fc09eef89 100644
--- a/setuptools/_distutils/command/install.py
+++ b/setuptools/_distutils/command/install.py
@@ -193,8 +193,7 @@ class install(Command):
         (
             'install-platbase=',
             None,
-            "base installation directory for platform-specific files "
-            + "(instead of --exec-prefix or --home)",
+            "base installation directory for platform-specific files (instead of --exec-prefix or --home)",
         ),
         ('root=', None, "install everything relative to this alternate root directory"),
         # Or, explicitly set the installation scheme
@@ -211,8 +210,7 @@ class install(Command):
         (
             'install-lib=',
             None,
-            "installation directory for all module distributions "
-            + "(overrides --install-purelib and --install-platlib)",
+            "installation directory for all module distributions (overrides --install-purelib and --install-platlib)",
         ),
         ('install-headers=', None, "installation directory for C/C++ headers"),
         ('install-scripts=', None, "installation directory for Python scripts"),
@@ -245,7 +243,7 @@ class install(Command):
         user_options.append((
             'user',
             None,
-            "install in user site-package '%s'" % USER_SITE,
+            f"install in user site-package '{USER_SITE}'",
         ))
         boolean_options.append('user')
 
@@ -258,7 +256,7 @@ def initialize_options(self):
         self.prefix = None
         self.exec_prefix = None
         self.home = None
-        self.user = 0
+        self.user = False
 
         # These select only the installation base; it's up to the user to
         # specify the installation scheme (currently, that means supplying
@@ -293,7 +291,7 @@ def initialize_options(self):
         # 'install_path_file' is always true unless some outsider meddles
         # with it.
         self.extra_path = None
-        self.install_path_file = 1
+        self.install_path_file = True
 
         # 'force' forces installation, even if target files are not
         # out-of-date.  'skip_build' skips running the "build" command,
@@ -301,9 +299,9 @@ def initialize_options(self):
         # a user option, it's just there so the bdist_* commands can turn
         # it off) determines whether we warn about installing to a
         # directory not in sys.path.
-        self.force = 0
-        self.skip_build = 0
-        self.warn_dir = 1
+        self.force = False
+        self.skip_build = False
+        self.warn_dir = True
 
         # These are only here as a conduit from the 'build' command to the
         # 'install_*' commands that do the real work.  ('build_base' isn't
@@ -348,8 +346,7 @@ def finalize_options(self):  # noqa: C901
             self.install_base or self.install_platbase
         ):
             raise DistutilsOptionError(
-                "must supply either prefix/exec-prefix/home or "
-                + "install-base/install-platbase -- not both"
+                "must supply either prefix/exec-prefix/home or install-base/install-platbase -- not both"
             )
 
         if self.home and (self.prefix or self.exec_prefix):
@@ -600,7 +597,7 @@ def finalize_other(self):
                 self.select_scheme(os.name)
             except KeyError:
                 raise DistutilsPlatformError(
-                    "I don't know how to install stuff on '%s'" % os.name
+                    f"I don't know how to install stuff on '{os.name}'"
                 )
 
     def select_scheme(self, name):
@@ -685,7 +682,7 @@ def create_home_path(self):
         home = convert_path(os.path.expanduser("~"))
         for _name, path in self.config_vars.items():
             if str(path).startswith(home) and not os.path.isdir(path):
-                self.debug_print("os.makedirs('%s', 0o700)" % path)
+                self.debug_print(f"os.makedirs('{path}', 0o700)")
                 os.makedirs(path, 0o700)
 
     # -- Command execution methods -------------------------------------
@@ -720,7 +717,7 @@ def run(self):
             self.execute(
                 write_file,
                 (self.record, outputs),
-                "writing list of installed files to '%s'" % self.record,
+                f"writing list of installed files to '{self.record}'",
             )
 
         sys_path = map(os.path.normpath, sys.path)
@@ -745,10 +742,10 @@ def create_path_file(self):
         filename = os.path.join(self.install_libbase, self.path_file + ".pth")
         if self.install_path_file:
             self.execute(
-                write_file, (filename, [self.extra_dirs]), "creating %s" % filename
+                write_file, (filename, [self.extra_dirs]), f"creating {filename}"
             )
         else:
-            self.warn("path file '%s' not created" % filename)
+            self.warn(f"path file '{filename}' not created")
 
     # -- Reporting methods ---------------------------------------------
 
diff --git a/setuptools/_distutils/command/install_data.py b/setuptools/_distutils/command/install_data.py
index b63a1af25e..624c0b901b 100644
--- a/setuptools/_distutils/command/install_data.py
+++ b/setuptools/_distutils/command/install_data.py
@@ -19,7 +19,7 @@ class install_data(Command):
             'install-dir=',
             'd',
             "base directory for installing data files "
-            "(default: installation base dir)",
+            "[default: installation base dir]",
         ),
         ('root=', None, "install everything relative to this alternate root directory"),
         ('force', 'f', "force installation (overwrite existing files)"),
@@ -31,9 +31,9 @@ def initialize_options(self):
         self.install_dir = None
         self.outfiles = []
         self.root = None
-        self.force = 0
+        self.force = False
         self.data_files = self.distribution.data_files
-        self.warn_dir = 1
+        self.warn_dir = True
 
     def finalize_options(self):
         self.set_undefined_options(
diff --git a/setuptools/_distutils/command/install_headers.py b/setuptools/_distutils/command/install_headers.py
index 085272c1a2..fbb3b242ea 100644
--- a/setuptools/_distutils/command/install_headers.py
+++ b/setuptools/_distutils/command/install_headers.py
@@ -19,7 +19,7 @@ class install_headers(Command):
 
     def initialize_options(self):
         self.install_dir = None
-        self.force = 0
+        self.force = False
         self.outfiles = []
 
     def finalize_options(self):
diff --git a/setuptools/_distutils/command/install_lib.py b/setuptools/_distutils/command/install_lib.py
index b1f346f018..54a12d38a8 100644
--- a/setuptools/_distutils/command/install_lib.py
+++ b/setuptools/_distutils/command/install_lib.py
@@ -54,7 +54,7 @@ def initialize_options(self):
         # let the 'install' command dictate our installation directory
         self.install_dir = None
         self.build_dir = None
-        self.force = 0
+        self.force = False
         self.compile = None
         self.optimize = None
         self.skip_build = None
@@ -114,7 +114,7 @@ def install(self):
             outfiles = self.copy_tree(self.build_dir, self.install_dir)
         else:
             self.warn(
-                "'%s' does not exist -- no Python modules to install" % self.build_dir
+                f"'{self.build_dir}' does not exist -- no Python modules to install"
             )
             return
         return outfiles
diff --git a/setuptools/_distutils/command/install_scripts.py b/setuptools/_distutils/command/install_scripts.py
index e66b13a16d..bb43387fb8 100644
--- a/setuptools/_distutils/command/install_scripts.py
+++ b/setuptools/_distutils/command/install_scripts.py
@@ -26,7 +26,7 @@ class install_scripts(Command):
 
     def initialize_options(self):
         self.install_dir = None
-        self.force = 0
+        self.force = False
         self.build_dir = None
         self.skip_build = None
 
diff --git a/setuptools/_distutils/command/register.py b/setuptools/_distutils/command/register.py
index ee6c54daba..c1acd27b54 100644
--- a/setuptools/_distutils/command/register.py
+++ b/setuptools/_distutils/command/register.py
@@ -37,8 +37,8 @@ class register(PyPIRCCommand):
 
     def initialize_options(self):
         PyPIRCCommand.initialize_options(self)
-        self.list_classifiers = 0
-        self.strict = 0
+        self.list_classifiers = False
+        self.strict = False
 
     def finalize_options(self):
         PyPIRCCommand.finalize_options(self)
@@ -74,7 +74,7 @@ def check_metadata(self):
         check = self.distribution.get_command_obj('check')
         check.ensure_finalized()
         check.strict = self.strict
-        check.restructuredtext = 1
+        check.restructuredtext = True
         check.run()
 
     def _set_config(self):
@@ -88,7 +88,7 @@ def _set_config(self):
             self.has_config = True
         else:
             if self.repository not in ('pypi', self.DEFAULT_REPOSITORY):
-                raise ValueError('%s not found in .pypirc' % self.repository)
+                raise ValueError(f'{self.repository} not found in .pypirc')
             if self.repository == 'pypi':
                 self.repository = self.DEFAULT_REPOSITORY
             self.has_config = False
@@ -192,7 +192,7 @@ def send_metadata(self):  # noqa: C901
                         logging.INFO,
                     )
                     self.announce(
-                        '(the login will be stored in %s)' % self._get_rc_file(),
+                        f'(the login will be stored in {self._get_rc_file()})',
                         logging.INFO,
                     )
                     choice = 'X'
@@ -225,7 +225,7 @@ def send_metadata(self):  # noqa: C901
                 log.info('Server response (%s): %s', code, result)
             else:
                 log.info('You will receive an email shortly.')
-                log.info('Follow the instructions in it to ' 'complete registration.')
+                log.info('Follow the instructions in it to complete registration.')
         elif choice == '3':
             data = {':action': 'password_reset'}
             data['email'] = ''
@@ -277,7 +277,7 @@ def post_to_server(self, data, auth=None):  # noqa: C901
         for key, values in data.items():
             for value in map(str, make_iterable(values)):
                 body.write(sep_boundary)
-                body.write('\nContent-Disposition: form-data; name="%s"' % key)
+                body.write(f'\nContent-Disposition: form-data; name="{key}"')
                 body.write("\n\n")
                 body.write(value)
                 if value and value[-1] == '\r':
@@ -288,8 +288,7 @@ def post_to_server(self, data, auth=None):  # noqa: C901
 
         # build the Request
         headers = {
-            'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'
-            % boundary,
+            'Content-type': f'multipart/form-data; boundary={boundary}; charset=utf-8',
             'Content-length': str(len(body)),
         }
         req = urllib.request.Request(self.repository, body, headers)
diff --git a/setuptools/_distutils/command/sdist.py b/setuptools/_distutils/command/sdist.py
index 387d27c90b..04333dd214 100644
--- a/setuptools/_distutils/command/sdist.py
+++ b/setuptools/_distutils/command/sdist.py
@@ -125,14 +125,14 @@ def initialize_options(self):
 
         # 'use_defaults': if true, we will include the default file set
         # in the manifest
-        self.use_defaults = 1
-        self.prune = 1
+        self.use_defaults = True
+        self.prune = True
 
-        self.manifest_only = 0
-        self.force_manifest = 0
+        self.manifest_only = False
+        self.force_manifest = False
 
         self.formats = ['gztar']
-        self.keep_temp = 0
+        self.keep_temp = False
         self.dist_dir = None
 
         self.archive_files = None
@@ -150,7 +150,7 @@ def finalize_options(self):
 
         bad_format = archive_util.check_archive_formats(self.formats)
         if bad_format:
-            raise DistutilsOptionError("unknown archive format '%s'" % bad_format)
+            raise DistutilsOptionError(f"unknown archive format '{bad_format}'")
 
         if self.dist_dir is None:
             self.dist_dir = "dist"
@@ -288,7 +288,7 @@ def _add_defaults_standards(self):
                 if self._cs_path_exists(fn):
                     self.filelist.append(fn)
                 else:
-                    self.warn("standard file '%s' not found" % fn)
+                    self.warn(f"standard file '{fn}' not found")
 
     def _add_defaults_optional(self):
         optional = ['tests/test*.py', 'test/test*.py', 'setup.cfg']
@@ -353,12 +353,12 @@ def read_template(self):
         log.info("reading manifest template '%s'", self.template)
         template = TextFile(
             self.template,
-            strip_comments=1,
-            skip_blanks=1,
-            join_lines=1,
-            lstrip_ws=1,
-            rstrip_ws=1,
-            collapse_join=1,
+            strip_comments=True,
+            skip_blanks=True,
+            join_lines=True,
+            lstrip_ws=True,
+            rstrip_ws=True,
+            collapse_join=True,
         )
 
         try:
@@ -401,7 +401,7 @@ def prune_file_list(self):
 
         vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr', '_darcs']
         vcs_ptrn = r'(^|{})({})({}).*'.format(seps, '|'.join(vcs_dirs), seps)
-        self.filelist.exclude_pattern(vcs_ptrn, is_regex=1)
+        self.filelist.exclude_pattern(vcs_ptrn, is_regex=True)
 
     def write_manifest(self):
         """Write the file list in 'self.filelist' (presumably as filled in
@@ -410,8 +410,7 @@ def write_manifest(self):
         """
         if self._manifest_is_not_generated():
             log.info(
-                "not writing to manually maintained "
-                "manifest file '%s'" % self.manifest
+                f"not writing to manually maintained manifest file '{self.manifest}'"
             )
             return
 
@@ -420,7 +419,7 @@ def write_manifest(self):
         self.execute(
             file_util.write_file,
             (self.manifest, content),
-            "writing manifest file '%s'" % self.manifest,
+            f"writing manifest file '{self.manifest}'",
         )
 
     def _manifest_is_not_generated(self):
@@ -468,10 +467,10 @@ def make_release_tree(self, base_dir, files):
 
         if hasattr(os, 'link'):  # can make hard links on this system
             link = 'hard'
-            msg = "making hard links in %s..." % base_dir
+            msg = f"making hard links in {base_dir}..."
         else:  # nope, have to copy
             link = None
-            msg = "copying files to %s..." % base_dir
+            msg = f"copying files to {base_dir}..."
 
         if not files:
             log.warning("no files to distribute -- empty manifest?")
diff --git a/setuptools/_distutils/command/upload.py b/setuptools/_distutils/command/upload.py
index cf541f8a82..a2461e089f 100644
--- a/setuptools/_distutils/command/upload.py
+++ b/setuptools/_distutils/command/upload.py
@@ -41,7 +41,7 @@ def initialize_options(self):
         PyPIRCCommand.initialize_options(self)
         self.username = ''
         self.password = ''
-        self.show_response = 0
+        self.show_response = False
         self.sign = False
         self.identity = None
 
@@ -75,7 +75,7 @@ def upload_file(self, command, pyversion, filename):  # noqa: C901
         # Makes sure the repository URL is compliant
         schema, netloc, url, params, query, fragments = urlparse(self.repository)
         if params or query or fragments:
-            raise AssertionError("Incompatible url %s" % self.repository)
+            raise AssertionError(f"Incompatible url {self.repository}")
 
         if schema not in ('http', 'https'):
             raise AssertionError("unsupported schema " + schema)
@@ -153,10 +153,10 @@ def upload_file(self, command, pyversion, filename):  # noqa: C901
         end_boundary = sep_boundary + b'--\r\n'
         body = io.BytesIO()
         for key, values in data.items():
-            title = '\r\nContent-Disposition: form-data; name="%s"' % key
+            title = f'\r\nContent-Disposition: form-data; name="{key}"'
             for value in make_iterable(values):
                 if type(value) is tuple:
-                    title += '; filename="%s"' % value[0]
+                    title += f'; filename="{value[0]}"'
                     value = value[1]
                 else:
                     value = str(value).encode('utf-8')
@@ -172,7 +172,7 @@ def upload_file(self, command, pyversion, filename):  # noqa: C901
 
         # build the Request
         headers = {
-            'Content-type': 'multipart/form-data; boundary=%s' % boundary,
+            'Content-type': f'multipart/form-data; boundary={boundary}',
             'Content-length': str(len(body)),
             'Authorization': auth,
         }
diff --git a/setuptools/_distutils/compat/__init__.py b/setuptools/_distutils/compat/__init__.py
index b1ee3fe8b0..e12534a32c 100644
--- a/setuptools/_distutils/compat/__init__.py
+++ b/setuptools/_distutils/compat/__init__.py
@@ -3,11 +3,11 @@
 from .py38 import removeprefix
 
 
-def consolidate_linker_args(args: list[str]) -> str:
+def consolidate_linker_args(args: list[str]) -> list[str] | str:
     """
     Ensure the return value is a string for backward compatibility.
 
-    Retain until at least 2024-04-31. See pypa/distutils#246
+    Retain until at least 2025-04-31. See pypa/distutils#246
     """
 
     if not all(arg.startswith('-Wl,') for arg in args):
diff --git a/setuptools/_distutils/compat/py38.py b/setuptools/_distutils/compat/py38.py
index 0af3814017..2d44211147 100644
--- a/setuptools/_distutils/compat/py38.py
+++ b/setuptools/_distutils/compat/py38.py
@@ -21,3 +21,13 @@ def removesuffix(self, suffix):
 
     def removeprefix(self, prefix):
         return self.removeprefix(prefix)
+
+
+def aix_platform(osname, version, release):
+    try:
+        import _aix_support  # type: ignore
+
+        return _aix_support.aix_platform()
+    except ImportError:
+        pass
+    return f"{osname}-{version}.{release}"
diff --git a/setuptools/_distutils/py39compat.py b/setuptools/_distutils/compat/py39.py
similarity index 100%
rename from setuptools/_distutils/py39compat.py
rename to setuptools/_distutils/compat/py39.py
diff --git a/setuptools/_distutils/config.py b/setuptools/_distutils/config.py
index 83f96a9eec..ebd2e11da3 100644
--- a/setuptools/_distutils/config.py
+++ b/setuptools/_distutils/config.py
@@ -30,7 +30,7 @@ class PyPIRCCommand(Command):
     realm = None
 
     user_options = [
-        ('repository=', 'r', "url of repository [default: %s]" % DEFAULT_REPOSITORY),
+        ('repository=', 'r', f"url of repository [default: {DEFAULT_REPOSITORY}]"),
         ('show-response', None, 'display full response text from server'),
     ]
 
@@ -51,7 +51,7 @@ def _read_pypirc(self):  # noqa: C901
         """Reads the .pypirc file."""
         rc = self._get_rc_file()
         if os.path.exists(rc):
-            self.announce('Using PyPI login from %s' % rc)
+            self.announce(f'Using PyPI login from {rc}')
             repository = self.repository or self.DEFAULT_REPOSITORY
 
             config = RawConfigParser()
@@ -129,7 +129,7 @@ def initialize_options(self):
         """Initialize options."""
         self.repository = None
         self.realm = None
-        self.show_response = 0
+        self.show_response = False
 
     def finalize_options(self):
         """Finalizes options."""
diff --git a/setuptools/_distutils/core.py b/setuptools/_distutils/core.py
index 309ce696fa..82113c47c1 100644
--- a/setuptools/_distutils/core.py
+++ b/setuptools/_distutils/core.py
@@ -146,7 +146,7 @@ class found in 'cmdclass' is used in place of the default, which is
         _setup_distribution = dist = klass(attrs)
     except DistutilsSetupError as msg:
         if 'name' not in attrs:
-            raise SystemExit("error in setup command: %s" % msg)
+            raise SystemExit(f"error in setup command: {msg}")
         else:
             raise SystemExit("error in {} setup command: {}".format(attrs['name'], msg))
 
@@ -170,7 +170,7 @@ class found in 'cmdclass' is used in place of the default, which is
     try:
         ok = dist.parse_command_line()
     except DistutilsArgError as msg:
-        raise SystemExit(gen_usage(dist.script_name) + "\nerror: %s" % msg)
+        raise SystemExit(gen_usage(dist.script_name) + f"\nerror: {msg}")
 
     if DEBUG:
         print("options (after parsing command line):")
@@ -274,11 +274,8 @@ def run_setup(script_name, script_args=None, stop_after="run"):
 
     if _setup_distribution is None:
         raise RuntimeError(
-            (
-                "'distutils.core.setup()' was never called -- "
-                "perhaps '%s' is not a Distutils setup script?"
-            )
-            % script_name
+            "'distutils.core.setup()' was never called -- "
+            f"perhaps '{script_name}' is not a Distutils setup script?"
         )
 
     # I wonder if the setup script's namespace -- g and l -- would be of
diff --git a/setuptools/_distutils/cygwinccompiler.py b/setuptools/_distutils/cygwinccompiler.py
index 539f09d8f3..7b812fd055 100644
--- a/setuptools/_distutils/cygwinccompiler.py
+++ b/setuptools/_distutils/cygwinccompiler.py
@@ -57,11 +57,11 @@ def get_msvcr():
     try:
         msc_ver = int(match.group(1))
     except AttributeError:
-        return
+        return []
     try:
         return _msvcr_lookup[msc_ver]
     except KeyError:
-        raise ValueError("Unknown MS Compiler version %s " % msc_ver)
+        raise ValueError(f"Unknown MS Compiler version {msc_ver} ")
 
 
 _runtime_library_dirs_msg = (
@@ -83,7 +83,7 @@ class CygwinCCompiler(UnixCCompiler):
     dylib_lib_format = "cyg%s%s"
     exe_extension = ".exe"
 
-    def __init__(self, verbose=0, dry_run=0, force=0):
+    def __init__(self, verbose=False, dry_run=False, force=False):
         super().__init__(verbose, dry_run, force)
 
         status, details = check_config_h()
@@ -91,8 +91,8 @@ def __init__(self, verbose=0, dry_run=0, force=0):
         if status is not CONFIG_H_OK:
             self.warn(
                 "Python's pyconfig.h doesn't seem to support your compiler. "
-                "Reason: %s. "
-                "Compiling may fail because of undefined preprocessor macros." % details
+                f"Reason: {details}. "
+                "Compiling may fail because of undefined preprocessor macros."
             )
 
         self.cc = os.environ.get('CC', 'gcc')
@@ -102,10 +102,10 @@ def __init__(self, verbose=0, dry_run=0, force=0):
         shared_option = "-shared"
 
         self.set_executables(
-            compiler='%s -mcygwin -O -Wall' % self.cc,
-            compiler_so='%s -mcygwin -mdll -O -Wall' % self.cc,
-            compiler_cxx='%s -mcygwin -O -Wall' % self.cxx,
-            linker_exe='%s -mcygwin' % self.cc,
+            compiler=f'{self.cc} -mcygwin -O -Wall',
+            compiler_so=f'{self.cc} -mcygwin -mdll -O -Wall',
+            compiler_cxx=f'{self.cxx} -mcygwin -O -Wall',
+            linker_exe=f'{self.cc} -mcygwin',
             linker_so=(f'{self.linker_dll} -mcygwin {shared_option}'),
         )
 
@@ -154,7 +154,7 @@ def link(
         library_dirs=None,
         runtime_library_dirs=None,
         export_symbols=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         build_temp=None,
@@ -195,10 +195,10 @@ def link(
             def_file = os.path.join(temp_dir, dll_name + ".def")
 
             # Generate .def file
-            contents = ["LIBRARY %s" % os.path.basename(output_filename), "EXPORTS"]
+            contents = [f"LIBRARY {os.path.basename(output_filename)}", "EXPORTS"]
             for sym in export_symbols:
                 contents.append(sym)
-            self.execute(write_file, (def_file, contents), "writing %s" % def_file)
+            self.execute(write_file, (def_file, contents), f"writing {def_file}")
 
             # next add options for def-file
 
@@ -265,7 +265,7 @@ class Mingw32CCompiler(CygwinCCompiler):
 
     compiler_type = 'mingw32'
 
-    def __init__(self, verbose=0, dry_run=0, force=0):
+    def __init__(self, verbose=False, dry_run=False, force=False):
         super().__init__(verbose, dry_run, force)
 
         shared_option = "-shared"
@@ -274,10 +274,10 @@ def __init__(self, verbose=0, dry_run=0, force=0):
             raise CCompilerError('Cygwin gcc cannot be used with --compiler=mingw32')
 
         self.set_executables(
-            compiler='%s -O -Wall' % self.cc,
-            compiler_so='%s -mdll -O -Wall' % self.cc,
-            compiler_cxx='%s -O -Wall' % self.cxx,
-            linker_exe='%s' % self.cc,
+            compiler=f'{self.cc} -O -Wall',
+            compiler_so=f'{self.cc} -shared -O -Wall',
+            compiler_cxx=f'{self.cxx} -O -Wall',
+            linker_exe=f'{self.cc}',
             linker_so=f'{self.linker_dll} {shared_option}',
         )
 
diff --git a/setuptools/_distutils/dir_util.py b/setuptools/_distutils/dir_util.py
index 370c6ffd49..724afeff6f 100644
--- a/setuptools/_distutils/dir_util.py
+++ b/setuptools/_distutils/dir_util.py
@@ -10,10 +10,10 @@
 
 # cache for by mkpath() -- in addition to cheapening redundant calls,
 # eliminates redundant "creating /foo/bar/baz" messages in dry-run mode
-_path_created = {}
+_path_created = set()
 
 
-def mkpath(name, mode=0o777, verbose=1, dry_run=0):  # noqa: C901
+def mkpath(name, mode=0o777, verbose=True, dry_run=False):  # noqa: C901
     """Create a directory and any missing ancestor directories.
 
     If the directory already exists (or if 'name' is the empty string, which
@@ -45,7 +45,7 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0):  # noqa: C901
     created_dirs = []
     if os.path.isdir(name) or name == '':
         return created_dirs
-    if _path_created.get(os.path.abspath(name)):
+    if os.path.abspath(name) in _path_created:
         return created_dirs
 
     (head, tail) = os.path.split(name)
@@ -63,7 +63,7 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0):  # noqa: C901
         head = os.path.join(head, d)
         abs_head = os.path.abspath(head)
 
-        if _path_created.get(abs_head):
+        if abs_head in _path_created:
             continue
 
         if verbose >= 1:
@@ -79,11 +79,11 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0):  # noqa: C901
                     )
             created_dirs.append(head)
 
-        _path_created[abs_head] = 1
+        _path_created.add(abs_head)
     return created_dirs
 
 
-def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0):
+def create_tree(base_dir, files, mode=0o777, verbose=True, dry_run=False):
     """Create all the empty directories under 'base_dir' needed to put 'files'
     there.
 
@@ -104,12 +104,12 @@ def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0):
 def copy_tree(  # noqa: C901
     src,
     dst,
-    preserve_mode=1,
-    preserve_times=1,
-    preserve_symlinks=0,
-    update=0,
-    verbose=1,
-    dry_run=0,
+    preserve_mode=True,
+    preserve_times=True,
+    preserve_symlinks=False,
+    update=False,
+    verbose=True,
+    dry_run=False,
 ):
     """Copy an entire directory tree 'src' to a new location 'dst'.
 
@@ -133,7 +133,7 @@ def copy_tree(  # noqa: C901
     from distutils.file_util import copy_file
 
     if not dry_run and not os.path.isdir(src):
-        raise DistutilsFileError("cannot copy tree '%s': not a directory" % src)
+        raise DistutilsFileError(f"cannot copy tree '{src}': not a directory")
     try:
         names = os.listdir(src)
     except OSError as e:
@@ -202,7 +202,7 @@ def _build_cmdtuple(path, cmdtuples):
     cmdtuples.append((os.rmdir, path))
 
 
-def remove_tree(directory, verbose=1, dry_run=0):
+def remove_tree(directory, verbose=True, dry_run=False):
     """Recursively remove an entire directory tree.
 
     Any errors are ignored (apart from being reported to stdout if 'verbose'
@@ -222,7 +222,7 @@ def remove_tree(directory, verbose=1, dry_run=0):
             # remove dir from cache if it's already there
             abspath = os.path.abspath(cmd[1])
             if abspath in _path_created:
-                _path_created.pop(abspath)
+                _path_created.remove(abspath)
         except OSError as exc:
             log.warning("error removing %s: %s", directory, exc)
 
diff --git a/setuptools/_distutils/dist.py b/setuptools/_distutils/dist.py
index 668ce7eb0a..d7d4ca8fc8 100644
--- a/setuptools/_distutils/dist.py
+++ b/setuptools/_distutils/dist.py
@@ -13,6 +13,8 @@
 from collections.abc import Iterable
 from email import message_from_file
 
+from ._vendor.packaging.utils import canonicalize_name, canonicalize_version
+
 try:
     import warnings
 except ImportError:
@@ -137,9 +139,9 @@ def __init__(self, attrs=None):  # noqa: C901
         """
 
         # Default values for our command-line options
-        self.verbose = 1
-        self.dry_run = 0
-        self.help = 0
+        self.verbose = True
+        self.dry_run = False
+        self.help = False
         for attr in self.display_option_names:
             setattr(self, attr, 0)
 
@@ -262,7 +264,7 @@ def __init__(self, attrs=None):  # noqa: C901
                 elif hasattr(self, key):
                     setattr(self, key, val)
                 else:
-                    msg = "Unknown distribution option: %s" % repr(key)
+                    msg = f"Unknown distribution option: {key!r}"
                     warnings.warn(msg)
 
         # no-user-cfg is handled before other command line args
@@ -311,9 +313,9 @@ def dump_option_dicts(self, header=None, commands=None, indent=""):
         for cmd_name in commands:
             opt_dict = self.command_options.get(cmd_name)
             if opt_dict is None:
-                self.announce(indent + "no option dict for '%s' command" % cmd_name)
+                self.announce(indent + f"no option dict for '{cmd_name}' command")
             else:
-                self.announce(indent + "option dict for '%s' command:" % cmd_name)
+                self.announce(indent + f"option dict for '{cmd_name}' command:")
                 out = pformat(opt_dict)
                 for line in out.split('\n'):
                     self.announce(indent + "  " + line)
@@ -339,7 +341,7 @@ def find_config_files(self):
         files = [str(path) for path in self._gen_paths() if os.path.isfile(path)]
 
         if DEBUG:
-            self.announce("using config files: %s" % ', '.join(files))
+            self.announce("using config files: {}".format(', '.join(files)))
 
         return files
 
@@ -395,7 +397,7 @@ def parse_config_files(self, filenames=None):  # noqa: C901
         parser = ConfigParser()
         for filename in filenames:
             if DEBUG:
-                self.announce("  reading %s" % filename)
+                self.announce(f"  reading {filename}")
             parser.read(filename, encoding='utf-8')
             for section in parser.sections():
                 options = parser.options(section)
@@ -525,7 +527,7 @@ def _parse_command_opts(self, parser, args):  # noqa: C901
         # Pull the current command from the head of the command line
         command = args[0]
         if not command_re.match(command):
-            raise SystemExit("invalid command name '%s'" % command)
+            raise SystemExit(f"invalid command name '{command}'")
         self.commands.append(command)
 
         # Dig up the command class that implements this command, so we
@@ -540,7 +542,7 @@ def _parse_command_opts(self, parser, args):  # noqa: C901
         # to be sure that the basic "command" interface is implemented.
         if not issubclass(cmd_class, Command):
             raise DistutilsClassError(
-                "command class %s must subclass Command" % cmd_class
+                f"command class {cmd_class} must subclass Command"
             )
 
         # Also make sure that the command object provides a list of its
@@ -579,7 +581,7 @@ def _parse_command_opts(self, parser, args):  # noqa: C901
         parser.set_negative_aliases(negative_opt)
         (args, opts) = parser.getopt(args[1:])
         if hasattr(opts, 'help') and opts.help:
-            self._show_help(parser, display_options=0, commands=[cmd_class])
+            self._show_help(parser, display_options=False, commands=[cmd_class])
             return
 
         if hasattr(cmd_class, 'help_options') and isinstance(
@@ -622,7 +624,7 @@ def finalize_options(self):
                 setattr(self.metadata, attr, value)
 
     def _show_help(
-        self, parser, global_options=1, display_options=1, commands: Iterable = ()
+        self, parser, global_options=True, display_options=True, commands: Iterable = ()
     ):
         """Show help for the setup script command-line in the form of
         several lists of command-line options.  'parser' should be a
@@ -652,8 +654,7 @@ def _show_help(
         if display_options:
             parser.set_option_table(self.display_options)
             parser.print_help(
-                "Information display options (just display "
-                + "information, ignore any commands)"
+                "Information display options (just display information, ignore any commands)"
             )
             print()
 
@@ -668,7 +669,7 @@ def _show_help(
                 )
             else:
                 parser.set_option_table(klass.user_options)
-            parser.print_help("Options for '%s' command:" % klass.__name__)
+            parser.print_help(f"Options for '{klass.__name__}' command:")
             print()
 
         print(gen_usage(self.script_name))
@@ -694,12 +695,12 @@ def handle_display_options(self, option_order):
         # display that metadata in the order in which the user supplied the
         # metadata options.
         any_display_options = 0
-        is_display_option = {}
+        is_display_option = set()
         for option in self.display_options:
-            is_display_option[option[0]] = 1
+            is_display_option.add(option[0])
 
         for opt, val in option_order:
-            if val and is_display_option.get(opt):
+            if val and opt in is_display_option:
                 opt = translate_longopt(opt)
                 value = getattr(self.metadata, "get_" + opt)()
                 if opt in ('keywords', 'platforms'):
@@ -740,13 +741,13 @@ def print_commands(self):
         import distutils.command
 
         std_commands = distutils.command.__all__
-        is_std = {}
+        is_std = set()
         for cmd in std_commands:
-            is_std[cmd] = 1
+            is_std.add(cmd)
 
         extra_commands = []
         for cmd in self.cmdclass.keys():
-            if not is_std.get(cmd):
+            if cmd not in is_std:
                 extra_commands.append(cmd)
 
         max_length = 0
@@ -771,13 +772,13 @@ def get_command_list(self):
         import distutils.command
 
         std_commands = distutils.command.__all__
-        is_std = {}
+        is_std = set()
         for cmd in std_commands:
-            is_std[cmd] = 1
+            is_std.add(cmd)
 
         extra_commands = []
         for cmd in self.cmdclass.keys():
-            if not is_std.get(cmd):
+            if cmd not in is_std:
                 extra_commands.append(cmd)
 
         rv = []
@@ -842,9 +843,9 @@ def get_command_class(self, command):
             self.cmdclass[command] = klass
             return klass
 
-        raise DistutilsModuleError("invalid command '%s'" % command)
+        raise DistutilsModuleError(f"invalid command '{command}'")
 
-    def get_command_obj(self, command, create=1):
+    def get_command_obj(self, command, create=True):
         """Return the command object for 'command'.  Normally this object
         is cached on a previous call to 'get_command_obj()'; if no command
         object for 'command' is in the cache, then we either create and
@@ -855,12 +856,12 @@ def get_command_obj(self, command, create=1):
             if DEBUG:
                 self.announce(
                     "Distribution.get_command_obj(): "
-                    "creating '%s' command object" % command
+                    f"creating '{command}' command object"
                 )
 
             klass = self.get_command_class(command)
             cmd_obj = self.command_obj[command] = klass(self)
-            self.have_run[command] = 0
+            self.have_run[command] = False
 
             # Set any options that were supplied in config files
             # or on the command line.  (NB. support for error
@@ -887,7 +888,7 @@ def _set_command_options(self, command_obj, option_dict=None):  # noqa: C901
             option_dict = self.get_option_dict(command_name)
 
         if DEBUG:
-            self.announce("  setting options for '%s' command:" % command_name)
+            self.announce(f"  setting options for '{command_name}' command:")
         for option, (source, value) in option_dict.items():
             if DEBUG:
                 self.announce(f"    {option} = {value} (from {source})")
@@ -915,7 +916,7 @@ def _set_command_options(self, command_obj, option_dict=None):  # noqa: C901
             except ValueError as msg:
                 raise DistutilsOptionError(msg)
 
-    def reinitialize_command(self, command, reinit_subcommands=0):
+    def reinitialize_command(self, command, reinit_subcommands=False):
         """Reinitializes a command to the state it was in when first
         returned by 'get_command_obj()': ie., initialized but not yet
         finalized.  This provides the opportunity to sneak option
@@ -945,8 +946,8 @@ def reinitialize_command(self, command, reinit_subcommands=0):
         if not command.finalized:
             return command
         command.initialize_options()
-        command.finalized = 0
-        self.have_run[command_name] = 0
+        command.finalized = False
+        self.have_run[command_name] = False
         self._set_command_options(command)
 
         if reinit_subcommands:
@@ -986,7 +987,7 @@ def run_command(self, command):
         cmd_obj = self.get_command_obj(command)
         cmd_obj.ensure_finalized()
         cmd_obj.run()
-        self.have_run[command] = 1
+        self.have_run[command] = True
 
     # -- Distribution query methods ------------------------------------
 
@@ -1149,9 +1150,9 @@ def write_pkg_file(self, file):
             version = '1.1'
 
         # required fields
-        file.write('Metadata-Version: %s\n' % version)
-        file.write('Name: %s\n' % self.get_name())
-        file.write('Version: %s\n' % self.get_version())
+        file.write(f'Metadata-Version: {version}\n')
+        file.write(f'Name: {self.get_name()}\n')
+        file.write(f'Version: {self.get_version()}\n')
 
         def maybe_write(header, val):
             if val:
@@ -1189,7 +1190,26 @@ def get_version(self):
         return self.version or "0.0.0"
 
     def get_fullname(self):
-        return f"{self.get_name()}-{self.get_version()}"
+        return self._fullname(self.get_name(), self.get_version())
+
+    @staticmethod
+    def _fullname(name: str, version: str) -> str:
+        """
+        >>> DistributionMetadata._fullname('setup.tools', '1.0-2')
+        'setup_tools-1.0.post2'
+        >>> DistributionMetadata._fullname('setup-tools', '1.2post2')
+        'setup_tools-1.2.post2'
+        >>> DistributionMetadata._fullname('setup-tools', '1.0-r2')
+        'setup_tools-1.0.post2'
+        >>> DistributionMetadata._fullname('setup.tools', '1.0.post')
+        'setup_tools-1.0.post0'
+        >>> DistributionMetadata._fullname('setup.tools', '1.0+ubuntu-1')
+        'setup_tools-1.0+ubuntu.1'
+        """
+        return "{}-{}".format(
+            canonicalize_name(name).replace('-', '_'),
+            canonicalize_version(version, strip_trailing_zero=False),
+        )
 
     def get_author(self):
         return self.author
diff --git a/setuptools/_distutils/extension.py b/setuptools/_distutils/extension.py
index 94e71635d9..04e871bcd6 100644
--- a/setuptools/_distutils/extension.py
+++ b/setuptools/_distutils/extension.py
@@ -130,7 +130,7 @@ def __init__(
         if len(kw) > 0:
             options = [repr(option) for option in kw]
             options = ', '.join(sorted(options))
-            msg = "Unknown Extension options: %s" % options
+            msg = f"Unknown Extension options: {options}"
             warnings.warn(msg)
 
     def __repr__(self):
@@ -150,11 +150,11 @@ def read_setup_file(filename):  # noqa: C901
     #    ... [ ...] [ ...] [ ...]
     file = TextFile(
         filename,
-        strip_comments=1,
-        skip_blanks=1,
-        join_lines=1,
-        lstrip_ws=1,
-        rstrip_ws=1,
+        strip_comments=True,
+        skip_blanks=True,
+        join_lines=True,
+        lstrip_ws=True,
+        rstrip_ws=True,
     )
     try:
         extensions = []
@@ -167,7 +167,7 @@ def read_setup_file(filename):  # noqa: C901
                 continue
 
             if line[0] == line[-1] == "*":
-                file.warn("'%s' lines not handled yet" % line)
+                file.warn(f"'{line}' lines not handled yet")
                 continue
 
             line = expand_makefile_vars(line, vars)
@@ -233,7 +233,7 @@ def read_setup_file(filename):  # noqa: C901
                     # and append it to sources.  Hmmmm.
                     ext.extra_objects.append(word)
                 else:
-                    file.warn("unrecognized argument '%s'" % word)
+                    file.warn(f"unrecognized argument '{word}'")
 
             extensions.append(ext)
     finally:
diff --git a/setuptools/_distutils/fancy_getopt.py b/setuptools/_distutils/fancy_getopt.py
index e905aede4d..907cc2b73c 100644
--- a/setuptools/_distutils/fancy_getopt.py
+++ b/setuptools/_distutils/fancy_getopt.py
@@ -21,7 +21,7 @@
 # utilities, we use '-' in place of '_'.  (The spirit of LISP lives on!)
 # The similarities to NAME are again not a coincidence...
 longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)'
-longopt_re = re.compile(r'^%s$' % longopt_pat)
+longopt_re = re.compile(rf'^{longopt_pat}$')
 
 # For recognizing "negative alias" options, eg. "quiet=!verbose"
 neg_alias_re = re.compile(f"^({longopt_pat})=!({longopt_pat})$")
@@ -95,7 +95,7 @@ def set_option_table(self, option_table):
     def add_option(self, long_option, short_option=None, help_string=None):
         if long_option in self.option_index:
             raise DistutilsGetoptError(
-                "option conflict: already an option '%s'" % long_option
+                f"option conflict: already an option '{long_option}'"
             )
         else:
             option = (long_option, short_option, help_string)
@@ -118,11 +118,11 @@ def _check_alias_dict(self, aliases, what):
         for alias, opt in aliases.items():
             if alias not in self.option_index:
                 raise DistutilsGetoptError(
-                    f"invalid {what} '{alias}': " f"option '{alias}' not defined"
+                    f"invalid {what} '{alias}': option '{alias}' not defined"
                 )
             if opt not in self.option_index:
                 raise DistutilsGetoptError(
-                    f"invalid {what} '{alias}': " f"aliased option '{opt}' not defined"
+                    f"invalid {what} '{alias}': aliased option '{opt}' not defined"
                 )
 
     def set_aliases(self, alias):
@@ -162,13 +162,13 @@ def _grok_option_table(self):  # noqa: C901
             # Type- and value-check the option names
             if not isinstance(long, str) or len(long) < 2:
                 raise DistutilsGetoptError(
-                    ("invalid long option '%s': must be a string of length >= 2") % long
+                    f"invalid long option '{long}': must be a string of length >= 2"
                 )
 
             if not ((short is None) or (isinstance(short, str) and len(short) == 1)):
                 raise DistutilsGetoptError(
-                    "invalid short option '%s': "
-                    "must a single character or None" % short
+                    f"invalid short option '{short}': "
+                    "must a single character or None"
                 )
 
             self.repeat[long] = repeat
@@ -178,7 +178,7 @@ def _grok_option_table(self):  # noqa: C901
                 if short:
                     short = short + ':'
                 long = long[0:-1]
-                self.takes_arg[long] = 1
+                self.takes_arg[long] = True
             else:
                 # Is option is a "negative alias" for some other option (eg.
                 # "quiet" == "!verbose")?
@@ -191,7 +191,7 @@ def _grok_option_table(self):  # noqa: C901
                         )
 
                     self.long_opts[-1] = long  # XXX redundant?!
-                self.takes_arg[long] = 0
+                self.takes_arg[long] = False
 
             # If this is an alias option, make sure its "takes arg" flag is
             # the same as the option it's aliased to.
@@ -210,8 +210,8 @@ def _grok_option_table(self):  # noqa: C901
             # '='.
             if not longopt_re.match(long):
                 raise DistutilsGetoptError(
-                    "invalid long option name '%s' "
-                    "(must be letters, numbers, hyphens only" % long
+                    f"invalid long option name '{long}' "
+                    "(must be letters, numbers, hyphens only"
                 )
 
             self.attr_name[long] = self.get_attr_name(long)
@@ -268,7 +268,7 @@ def getopt(self, args=None, object=None):  # noqa: C901
 
             attr = self.attr_name[opt]
             # The only repeating option at the moment is 'verbose'.
-            # It has a negative option -q quiet, which should set verbose = 0.
+            # It has a negative option -q quiet, which should set verbose = False.
             if val and self.repeat.get(attr) is not None:
                 val = getattr(object, attr, 0) + 1
             setattr(object, attr, val)
diff --git a/setuptools/_distutils/file_util.py b/setuptools/_distutils/file_util.py
index 960def9cf9..b19a5dcfa4 100644
--- a/setuptools/_distutils/file_util.py
+++ b/setuptools/_distutils/file_util.py
@@ -63,12 +63,12 @@ def _copy_file_contents(src, dst, buffer_size=16 * 1024):  # noqa: C901
 def copy_file(  # noqa: C901
     src,
     dst,
-    preserve_mode=1,
-    preserve_times=1,
-    update=0,
+    preserve_mode=True,
+    preserve_times=True,
+    update=False,
     link=None,
-    verbose=1,
-    dry_run=0,
+    verbose=True,
+    dry_run=False,
 ):
     """Copy a file 'src' to 'dst'.  If 'dst' is a directory, then 'src' is
     copied there with the same name; otherwise, it must be a filename.  (If
@@ -106,7 +106,7 @@ def copy_file(  # noqa: C901
 
     if not os.path.isfile(src):
         raise DistutilsFileError(
-            "can't copy '%s': doesn't exist or not a regular file" % src
+            f"can't copy '{src}': doesn't exist or not a regular file"
         )
 
     if os.path.isdir(dst):
@@ -123,7 +123,7 @@ def copy_file(  # noqa: C901
     try:
         action = _copy_action[link]
     except KeyError:
-        raise ValueError("invalid value '%s' for 'link' argument" % link)
+        raise ValueError(f"invalid value '{link}' for 'link' argument")
 
     if verbose >= 1:
         if os.path.basename(dst) == os.path.basename(src):
@@ -168,7 +168,7 @@ def copy_file(  # noqa: C901
 
 
 # XXX I suspect this is Unix-specific -- need porting help!
-def move_file(src, dst, verbose=1, dry_run=0):  # noqa: C901
+def move_file(src, dst, verbose=True, dry_run=False):  # noqa: C901
     """Move a file 'src' to 'dst'.  If 'dst' is a directory, the file will
     be moved into it with the same name; otherwise, 'src' is just renamed
     to 'dst'.  Return the new full name of the file.
@@ -186,7 +186,7 @@ def move_file(src, dst, verbose=1, dry_run=0):  # noqa: C901
         return dst
 
     if not isfile(src):
-        raise DistutilsFileError("can't move '%s': not a regular file" % src)
+        raise DistutilsFileError(f"can't move '{src}': not a regular file")
 
     if isdir(dst):
         dst = os.path.join(dst, basename(src))
diff --git a/setuptools/_distutils/filelist.py b/setuptools/_distutils/filelist.py
index 71ffb2abe7..44ae9e67ef 100644
--- a/setuptools/_distutils/filelist.py
+++ b/setuptools/_distutils/filelist.py
@@ -84,24 +84,24 @@ def _parse_template_line(self, line):
         if action in ('include', 'exclude', 'global-include', 'global-exclude'):
             if len(words) < 2:
                 raise DistutilsTemplateError(
-                    "'%s' expects   ..." % action
+                    f"'{action}' expects   ..."
                 )
             patterns = [convert_path(w) for w in words[1:]]
         elif action in ('recursive-include', 'recursive-exclude'):
             if len(words) < 3:
                 raise DistutilsTemplateError(
-                    "'%s' expects    ..." % action
+                    f"'{action}' expects    ..."
                 )
             dir = convert_path(words[1])
             patterns = [convert_path(w) for w in words[2:]]
         elif action in ('graft', 'prune'):
             if len(words) != 2:
                 raise DistutilsTemplateError(
-                    "'%s' expects a single " % action
+                    f"'{action}' expects a single "
                 )
             dir_pattern = convert_path(words[1])
         else:
-            raise DistutilsTemplateError("unknown action '%s'" % action)
+            raise DistutilsTemplateError(f"unknown action '{action}'")
 
         return (action, patterns, dir, dir_pattern)
 
@@ -119,13 +119,13 @@ def process_template_line(self, line):  # noqa: C901
         if action == 'include':
             self.debug_print("include " + ' '.join(patterns))
             for pattern in patterns:
-                if not self.include_pattern(pattern, anchor=1):
+                if not self.include_pattern(pattern, anchor=True):
                     log.warning("warning: no files found matching '%s'", pattern)
 
         elif action == 'exclude':
             self.debug_print("exclude " + ' '.join(patterns))
             for pattern in patterns:
-                if not self.exclude_pattern(pattern, anchor=1):
+                if not self.exclude_pattern(pattern, anchor=True):
                     log.warning(
                         (
                             "warning: no previously-included files "
@@ -137,7 +137,7 @@ def process_template_line(self, line):  # noqa: C901
         elif action == 'global-include':
             self.debug_print("global-include " + ' '.join(patterns))
             for pattern in patterns:
-                if not self.include_pattern(pattern, anchor=0):
+                if not self.include_pattern(pattern, anchor=False):
                     log.warning(
                         (
                             "warning: no files found matching '%s' "
@@ -149,7 +149,7 @@ def process_template_line(self, line):  # noqa: C901
         elif action == 'global-exclude':
             self.debug_print("global-exclude " + ' '.join(patterns))
             for pattern in patterns:
-                if not self.exclude_pattern(pattern, anchor=0):
+                if not self.exclude_pattern(pattern, anchor=False):
                     log.warning(
                         (
                             "warning: no previously-included files matching "
@@ -192,12 +192,12 @@ def process_template_line(self, line):  # noqa: C901
                 )
         else:
             raise DistutilsInternalError(
-                "this cannot happen: invalid action '%s'" % action
+                f"this cannot happen: invalid action '{action}'"
             )
 
     # Filtering/selection methods
 
-    def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
+    def include_pattern(self, pattern, anchor=True, prefix=None, is_regex=False):
         """Select strings (presumably filenames) from 'self.files' that
         match 'pattern', a Unix-style wildcard (glob) pattern.  Patterns
         are not quite the same as implemented by the 'fnmatch' module: '*'
@@ -225,7 +225,7 @@ def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
         # XXX docstring lying about what the special chars are?
         files_found = False
         pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
-        self.debug_print("include_pattern: applying regex r'%s'" % pattern_re.pattern)
+        self.debug_print(f"include_pattern: applying regex r'{pattern_re.pattern}'")
 
         # delayed loading of allfiles list
         if self.allfiles is None:
@@ -238,7 +238,7 @@ def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
                 files_found = True
         return files_found
 
-    def exclude_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
+    def exclude_pattern(self, pattern, anchor=True, prefix=None, is_regex=False):
         """Remove strings (presumably filenames) from 'files' that match
         'pattern'.  Other parameters are the same as for
         'include_pattern()', above.
@@ -247,7 +247,7 @@ def exclude_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
         """
         files_found = False
         pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
-        self.debug_print("exclude_pattern: applying regex r'%s'" % pattern_re.pattern)
+        self.debug_print(f"exclude_pattern: applying regex r'{pattern_re.pattern}'")
         for i in range(len(self.files) - 1, -1, -1):
             if pattern_re.search(self.files[i]):
                 self.debug_print(" removing " + self.files[i])
@@ -327,12 +327,12 @@ def glob_to_re(pattern):
         # we're using a regex to manipulate a regex, so we need
         # to escape the backslash twice
         sep = r'\\\\'
-    escaped = r'\1[^%s]' % sep
+    escaped = rf'\1[^{sep}]'
     pattern_re = re.sub(r'((? Mapping[str:str] | None:
+    if platform.system() != 'Darwin':
+        return env
+
+    from .util import MACOSX_VERSION_VAR, get_macosx_target_ver
+
+    target_ver = get_macosx_target_ver()
+    update = {MACOSX_VERSION_VAR: target_ver} if target_ver else {}
+    return {**_resolve(env), **update}
+
+
+def _resolve(env: Mapping[str:str] | None) -> Mapping[str:str]:
+    return os.environ if env is None else env
+
+
+def spawn(cmd, search_path=True, verbose=False, dry_run=False, env=None):
     """Run another program, specified as a command list 'cmd', in a new process.
 
     'cmd' is just the argument list for the new process, ie.
@@ -31,41 +58,25 @@ def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None):  # noqa: C901
     Raise DistutilsExecError if running the program fails in any way; just
     return on success.
     """
-    # cmd is documented as a list, but just in case some code passes a tuple
-    # in, protect our %-formatting code against horrible death
-    cmd = list(cmd)
-
     log.info(subprocess.list2cmdline(cmd))
     if dry_run:
         return
 
     if search_path:
-        executable = find_executable(cmd[0])
+        executable = shutil.which(cmd[0])
         if executable is not None:
             cmd[0] = executable
 
-    env = env if env is not None else dict(os.environ)
-
-    if sys.platform == 'darwin':
-        from distutils.util import MACOSX_VERSION_VAR, get_macosx_target_ver
-
-        macosx_target_ver = get_macosx_target_ver()
-        if macosx_target_ver:
-            env[MACOSX_VERSION_VAR] = macosx_target_ver
-
     try:
-        proc = subprocess.Popen(cmd, env=env)
-        proc.wait()
-        exitcode = proc.returncode
+        subprocess.check_call(cmd, env=_inject_macos_ver(env))
     except OSError as exc:
-        if not DEBUG:
-            cmd = cmd[0]
-        raise DistutilsExecError(f"command {cmd!r} failed: {exc.args[-1]}") from exc
-
-    if exitcode:
-        if not DEBUG:
-            cmd = cmd[0]
-        raise DistutilsExecError(f"command {cmd!r} failed with exit code {exitcode}")
+        raise DistutilsExecError(
+            f"command {_debug(cmd)!r} failed: {exc.args[-1]}"
+        ) from exc
+    except subprocess.CalledProcessError as err:
+        raise DistutilsExecError(
+            f"command {_debug(cmd)!r} failed with exit code {err.returncode}"
+        ) from err
 
 
 def find_executable(executable, path=None):
@@ -74,6 +85,9 @@ def find_executable(executable, path=None):
     A string listing directories separated by 'os.pathsep'; defaults to
     os.environ['PATH'].  Returns the complete filename or None if not found.
     """
+    warnings.warn(
+        'Use shutil.which instead of find_executable', DeprecationWarning, stacklevel=2
+    )
     _, ext = os.path.splitext(executable)
     if (sys.platform == 'win32') and (ext != '.exe'):
         executable = executable + '.exe'
@@ -83,14 +97,13 @@ def find_executable(executable, path=None):
 
     if path is None:
         path = os.environ.get('PATH', None)
+        # bpo-35755: Don't fall through if PATH is the empty string
         if path is None:
             try:
                 path = os.confstr("CS_PATH")
             except (AttributeError, ValueError):
                 # os.confstr() or CS_PATH is not available
                 path = os.defpath
-        # bpo-35755: Don't use os.defpath if the PATH environment variable is
-        # set to an empty string
 
     # PATH='' doesn't match, whereas PATH=':' looks in the current directory
     if not path:
diff --git a/setuptools/_distutils/sysconfig.py b/setuptools/_distutils/sysconfig.py
index 1a38e9fa79..4ba0be5602 100644
--- a/setuptools/_distutils/sysconfig.py
+++ b/setuptools/_distutils/sysconfig.py
@@ -16,9 +16,10 @@
 import sys
 import sysconfig
 
-from . import py39compat
 from ._functools import pass_none
+from .compat import py39
 from .errors import DistutilsPlatformError
+from .util import is_mingw
 
 IS_PYPY = '__pypy__' in sys.builtin_module_names
 
@@ -108,7 +109,7 @@ def get_python_version():
     return '%d.%d' % sys.version_info[:2]
 
 
-def get_python_inc(plat_specific=0, prefix=None):
+def get_python_inc(plat_specific=False, prefix=None):
     """Return the directory containing installed Python header files.
 
     If 'plat_specific' is false (the default), this is the path to the
@@ -121,12 +122,14 @@ def get_python_inc(plat_specific=0, prefix=None):
     """
     default_prefix = BASE_EXEC_PREFIX if plat_specific else BASE_PREFIX
     resolved_prefix = prefix if prefix is not None else default_prefix
+    # MinGW imitates posix like layout, but os.name != posix
+    os_name = "posix" if is_mingw() else os.name
     try:
-        getter = globals()[f'_get_python_inc_{os.name}']
+        getter = globals()[f'_get_python_inc_{os_name}']
     except KeyError:
         raise DistutilsPlatformError(
             "I don't know where Python installs its C header files "
-            "on platform '%s'" % os.name
+            f"on platform '{os.name}'"
         )
     return getter(resolved_prefix, prefix, plat_specific)
 
@@ -213,7 +216,7 @@ def _posix_lib(standard_lib, libpython, early_prefix, prefix):
         return os.path.join(libpython, "site-packages")
 
 
-def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
+def get_python_lib(plat_specific=False, standard_lib=False, prefix=None):
     """Return the directory containing the Python library (standard or
     site additions).
 
@@ -244,7 +247,7 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
         else:
             prefix = plat_specific and EXEC_PREFIX or PREFIX
 
-    if os.name == "posix":
+    if os.name == "posix" or is_mingw():
         if plat_specific or standard_lib:
             # Platform-specific modules (any module from a non-pure-Python
             # module distribution) or standard Python library modules.
@@ -262,8 +265,7 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
             return os.path.join(prefix, "Lib", "site-packages")
     else:
         raise DistutilsPlatformError(
-            "I don't know where Python installs its library "
-            "on platform '%s'" % os.name
+            f"I don't know where Python installs its library on platform '{os.name}'"
         )
 
 
@@ -291,7 +293,7 @@ def customize_compiler(compiler):  # noqa: C901
     Mainly needed on Unix, so we can plug in the information that
     varies across Unices and is stored in Python's Makefile.
     """
-    if compiler.compiler_type == "unix":
+    if compiler.compiler_type in ["unix", "cygwin", "mingw32"]:
         _customize_macos()
 
         (
@@ -399,7 +401,11 @@ def parse_makefile(fn, g=None):  # noqa: C901
     from distutils.text_file import TextFile
 
     fp = TextFile(
-        fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape"
+        fn,
+        strip_comments=True,
+        skip_blanks=True,
+        join_lines=True,
+        errors="surrogateescape",
     )
 
     if g is None:
@@ -538,7 +544,7 @@ def get_config_vars(*args):
     global _config_vars
     if _config_vars is None:
         _config_vars = sysconfig.get_config_vars().copy()
-        py39compat.add_ext_suffix(_config_vars)
+        py39.add_ext_suffix(_config_vars)
 
     return [_config_vars.get(name) for name in args] if args else _config_vars
 
diff --git a/setuptools/_distutils/tests/__init__.py b/setuptools/_distutils/tests/__init__.py
index 20dfe8f19b..93fbf49074 100644
--- a/setuptools/_distutils/tests/__init__.py
+++ b/setuptools/_distutils/tests/__init__.py
@@ -7,6 +7,7 @@
 by import rather than matching pre-defined names.
 """
 
+import shutil
 from typing import Sequence
 
 
@@ -19,7 +20,7 @@ def missing_compiler_executable(cmd_names: Sequence[str] = []):  # pragma: no co
     missing.
 
     """
-    from distutils import ccompiler, errors, spawn, sysconfig
+    from distutils import ccompiler, errors, sysconfig
 
     compiler = ccompiler.new_compiler()
     sysconfig.customize_compiler(compiler)
@@ -34,8 +35,8 @@ def missing_compiler_executable(cmd_names: Sequence[str] = []):  # pragma: no co
             continue
         cmd = getattr(compiler, name)
         if cmd_names:
-            assert cmd is not None, "the '%s' executable is not configured" % name
+            assert cmd is not None, f"the '{name}' executable is not configured"
         elif not cmd:
             continue
-        if spawn.find_executable(cmd[0]) is None:
+        if shutil.which(cmd[0]) is None:
             return cmd[0]
diff --git a/setuptools/_distutils/tests/compat/__init__.py b/setuptools/_distutils/tests/compat/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/_distutils/tests/py38compat.py b/setuptools/_distutils/tests/compat/py38.py
similarity index 100%
rename from setuptools/_distutils/tests/py38compat.py
rename to setuptools/_distutils/tests/compat/py38.py
diff --git a/setuptools/_distutils/tests/test_archive_util.py b/setuptools/_distutils/tests/test_archive_util.py
index 145cce915d..abbcd36cb0 100644
--- a/setuptools/_distutils/tests/test_archive_util.py
+++ b/setuptools/_distutils/tests/test_archive_util.py
@@ -23,7 +23,7 @@
 import path
 import pytest
 
-from .py38compat import check_warnings
+from .compat.py38 import check_warnings
 from .unix_compat import UID_0_SUPPORT, grp, pwd, require_uid_0, require_unix_id
 
 
@@ -135,7 +135,7 @@ def _create_files(self):
         return tmpdir
 
     @pytest.mark.usefixtures('needs_zlib')
-    @pytest.mark.skipif("not (find_executable('tar') and find_executable('gzip'))")
+    @pytest.mark.skipif("not (shutil.which('tar') and shutil.which('gzip'))")
     def test_tarfile_vs_tar(self):
         tmpdir = self._create_files()
         tmpdir2 = self.mkdtemp()
@@ -190,7 +190,7 @@ def test_tarfile_vs_tar(self):
         tarball = base_name + '.tar'
         assert os.path.exists(tarball)
 
-    @pytest.mark.skipif("not find_executable('compress')")
+    @pytest.mark.skipif("not shutil.which('compress')")
     def test_compress_deprecated(self):
         tmpdir = self._create_files()
         base_name = os.path.join(self.mkdtemp(), 'archive')
diff --git a/setuptools/_distutils/tests/test_bdist.py b/setuptools/_distutils/tests/test_bdist.py
index 1804807752..d5696fc3dc 100644
--- a/setuptools/_distutils/tests/test_bdist.py
+++ b/setuptools/_distutils/tests/test_bdist.py
@@ -31,7 +31,7 @@ def test_skip_build(self):
         # bug #10946: bdist --skip-build should trickle down to subcommands
         dist = self.create_dist()[1]
         cmd = bdist(dist)
-        cmd.skip_build = 1
+        cmd.skip_build = True
         cmd.ensure_finalized()
         dist.command_obj['bdist'] = cmd
 
@@ -44,4 +44,4 @@ def test_skip_build(self):
             if getattr(subcmd, '_unsupported', False):
                 # command is not supported on this build
                 continue
-            assert subcmd.skip_build, '%s should take --skip-build from bdist' % name
+            assert subcmd.skip_build, f'{name} should take --skip-build from bdist'
diff --git a/setuptools/_distutils/tests/test_bdist_dumb.py b/setuptools/_distutils/tests/test_bdist_dumb.py
index 78928fea24..1fc51d244a 100644
--- a/setuptools/_distutils/tests/test_bdist_dumb.py
+++ b/setuptools/_distutils/tests/test_bdist_dumb.py
@@ -74,5 +74,5 @@ def test_simple_built(self):
         contents = sorted(filter(None, map(os.path.basename, contents)))
         wanted = ['foo-0.1-py{}.{}.egg-info'.format(*sys.version_info[:2]), 'foo.py']
         if not sys.dont_write_bytecode:
-            wanted.append('foo.%s.pyc' % sys.implementation.cache_tag)
+            wanted.append(f'foo.{sys.implementation.cache_tag}.pyc')
         assert contents == sorted(wanted)
diff --git a/setuptools/_distutils/tests/test_bdist_rpm.py b/setuptools/_distutils/tests/test_bdist_rpm.py
index 769623cbb8..1109fdf117 100644
--- a/setuptools/_distutils/tests/test_bdist_rpm.py
+++ b/setuptools/_distutils/tests/test_bdist_rpm.py
@@ -1,15 +1,15 @@
 """Tests for distutils.command.bdist_rpm."""
 
 import os
+import shutil  # noqa: F401
 import sys
 from distutils.command.bdist_rpm import bdist_rpm
 from distutils.core import Distribution
-from distutils.spawn import find_executable  # noqa: F401
 from distutils.tests import support
 
 import pytest
 
-from .py38compat import requires_zlib
+from .compat.py38 import requires_zlib
 
 SETUP_PY = """\
 from distutils.core import setup
@@ -43,8 +43,8 @@ class TestBuildRpm(
 ):
     @mac_woes
     @requires_zlib()
-    @pytest.mark.skipif("not find_executable('rpm')")
-    @pytest.mark.skipif("not find_executable('rpmbuild')")
+    @pytest.mark.skipif("not shutil.which('rpm')")
+    @pytest.mark.skipif("not shutil.which('rpmbuild')")
     def test_quiet(self):
         # let's create a package
         tmp_dir = self.mkdtemp()
@@ -72,7 +72,7 @@ def test_quiet(self):
         cmd.fix_python = True
 
         # running in quiet mode
-        cmd.quiet = 1
+        cmd.quiet = True
         cmd.ensure_finalized()
         cmd.run()
 
@@ -86,8 +86,8 @@ def test_quiet(self):
     @mac_woes
     @requires_zlib()
     # https://bugs.python.org/issue1533164
-    @pytest.mark.skipif("not find_executable('rpm')")
-    @pytest.mark.skipif("not find_executable('rpmbuild')")
+    @pytest.mark.skipif("not shutil.which('rpm')")
+    @pytest.mark.skipif("not shutil.which('rpmbuild')")
     def test_no_optimize_flag(self):
         # let's create a package that breaks bdist_rpm
         tmp_dir = self.mkdtemp()
@@ -114,7 +114,7 @@ def test_no_optimize_flag(self):
         cmd = bdist_rpm(dist)
         cmd.fix_python = True
 
-        cmd.quiet = 1
+        cmd.quiet = True
         cmd.ensure_finalized()
         cmd.run()
 
diff --git a/setuptools/_distutils/tests/test_build.py b/setuptools/_distutils/tests/test_build.py
index 25483ad76b..8fb1bc1b77 100644
--- a/setuptools/_distutils/tests/test_build.py
+++ b/setuptools/_distutils/tests/test_build.py
@@ -4,6 +4,7 @@
 import sys
 from distutils.command.build import build
 from distutils.tests import support
+from sysconfig import get_config_var
 from sysconfig import get_platform
 
 
@@ -24,6 +25,8 @@ def test_finalize_options(self):
         # examples:
         #   build/lib.macosx-10.3-i386-cpython39
         plat_spec = f'.{cmd.plat_name}-{sys.implementation.cache_tag}'
+        if get_config_var('Py_GIL_DISABLED'):
+            plat_spec += 't'
         if hasattr(sys, 'gettotalrefcount'):
             assert cmd.build_platlib.endswith('-pydebug')
             plat_spec += '-pydebug'
diff --git a/setuptools/_distutils/tests/test_build_clib.py b/setuptools/_distutils/tests/test_build_clib.py
index 9c69b3e7fc..f76f26bcea 100644
--- a/setuptools/_distutils/tests/test_build_clib.py
+++ b/setuptools/_distutils/tests/test_build_clib.py
@@ -125,7 +125,7 @@ def test_run(self):
         # all commands are present on the system.
         ccmd = missing_compiler_executable()
         if ccmd is not None:
-            self.skipTest('The %r command is not found' % ccmd)
+            self.skipTest(f'The {ccmd!r} command is not found')
 
         # this should work
         cmd.run()
diff --git a/setuptools/_distutils/tests/test_build_ext.py b/setuptools/_distutils/tests/test_build_ext.py
index ca5d9d57cd..6c4c4ba869 100644
--- a/setuptools/_distutils/tests/test_build_ext.py
+++ b/setuptools/_distutils/tests/test_build_ext.py
@@ -31,7 +31,7 @@
 import path
 import pytest
 
-from . import py38compat as import_helper
+from .compat import py38 as import_helper
 
 
 @pytest.fixture()
@@ -140,7 +140,7 @@ def test_solaris_enable_shared(self):
         from distutils.sysconfig import _config_vars
 
         old_var = _config_vars.get('Py_ENABLE_SHARED')
-        _config_vars['Py_ENABLE_SHARED'] = 1
+        _config_vars['Py_ENABLE_SHARED'] = True
         try:
             cmd.ensure_finalized()
         finally:
@@ -164,7 +164,7 @@ def test_user_site(self):
         assert 'user' in options
 
         # setting a value
-        cmd.user = 1
+        cmd.user = True
 
         # setting user based lib and include
         lib = os.path.join(site.USER_BASE, 'lib')
@@ -209,7 +209,7 @@ def test_finalize_options(self):
         for p in py_include.split(os.path.pathsep):
             assert p in cmd.include_dirs
 
-        plat_py_include = sysconfig.get_python_inc(plat_specific=1)
+        plat_py_include = sysconfig.get_python_inc(plat_specific=True)
         for p in plat_py_include.split(os.path.pathsep):
             assert p in cmd.include_dirs
 
@@ -223,7 +223,7 @@ def test_finalize_options(self):
         # make sure cmd.library_dirs is turned into a list
         # if it's a string
         cmd = self.build_ext(dist)
-        cmd.library_dirs = 'my_lib_dir%sother_lib_dir' % os.pathsep
+        cmd.library_dirs = f'my_lib_dir{os.pathsep}other_lib_dir'
         cmd.finalize_options()
         assert 'my_lib_dir' in cmd.library_dirs
         assert 'other_lib_dir' in cmd.library_dirs
@@ -231,7 +231,7 @@ def test_finalize_options(self):
         # make sure rpath is turned into a list
         # if it's a string
         cmd = self.build_ext(dist)
-        cmd.rpath = 'one%stwo' % os.pathsep
+        cmd.rpath = f'one{os.pathsep}two'
         cmd.finalize_options()
         assert cmd.rpath == ['one', 'two']
 
@@ -381,7 +381,7 @@ def test_get_outputs(self):
         old_wd = os.getcwd()
         os.chdir(other_tmp_dir)
         try:
-            cmd.inplace = 1
+            cmd.inplace = True
             cmd.run()
             so_file = cmd.get_outputs()[0]
         finally:
@@ -392,7 +392,7 @@ def test_get_outputs(self):
         so_dir = os.path.dirname(so_file)
         assert so_dir == other_tmp_dir
 
-        cmd.inplace = 0
+        cmd.inplace = False
         cmd.compiler = None
         cmd.run()
         so_file = cmd.get_outputs()[0]
@@ -401,7 +401,7 @@ def test_get_outputs(self):
         so_dir = os.path.dirname(so_file)
         assert so_dir == cmd.build_lib
 
-        # inplace = 0, cmd.package = 'bar'
+        # inplace = False, cmd.package = 'bar'
         build_py = cmd.get_finalized_command('build_py')
         build_py.package_dir = {'': 'bar'}
         path = cmd.get_ext_fullpath('foo')
@@ -409,8 +409,8 @@ def test_get_outputs(self):
         path = os.path.split(path)[0]
         assert path == cmd.build_lib
 
-        # inplace = 1, cmd.package = 'bar'
-        cmd.inplace = 1
+        # inplace = True, cmd.package = 'bar'
+        cmd.inplace = True
         other_tmp_dir = os.path.realpath(self.mkdtemp())
         old_wd = os.getcwd()
         os.chdir(other_tmp_dir)
@@ -431,7 +431,7 @@ def test_ext_fullpath(self):
         # dist = Distribution({'name': 'lxml', 'ext_modules': [etree_ext]})
         dist = Distribution()
         cmd = self.build_ext(dist)
-        cmd.inplace = 1
+        cmd.inplace = True
         cmd.distribution.package_dir = {'': 'src'}
         cmd.distribution.packages = ['lxml', 'lxml.html']
         curdir = os.getcwd()
@@ -440,7 +440,7 @@ def test_ext_fullpath(self):
         assert wanted == path
 
         # building lxml.etree not inplace
-        cmd.inplace = 0
+        cmd.inplace = False
         cmd.build_lib = os.path.join(curdir, 'tmpdir')
         wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + ext)
         path = cmd.get_ext_fullpath('lxml.etree')
@@ -455,7 +455,7 @@ def test_ext_fullpath(self):
         assert wanted == path
 
         # building twisted.runner.portmap inplace
-        cmd.inplace = 1
+        cmd.inplace = True
         path = cmd.get_ext_fullpath('twisted.runner.portmap')
         wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + ext)
         assert wanted == path
diff --git a/setuptools/_distutils/tests/test_build_py.py b/setuptools/_distutils/tests/test_build_py.py
index 8bc0e98a4f..b316ed43a1 100644
--- a/setuptools/_distutils/tests/test_build_py.py
+++ b/setuptools/_distutils/tests/test_build_py.py
@@ -28,13 +28,15 @@ def test_package_data(self):
         dist = Distribution({"packages": ["pkg"], "package_dir": {"pkg": sources}})
         # script_name need not exist, it just need to be initialized
         dist.script_name = os.path.join(sources, "setup.py")
-        dist.command_obj["build"] = support.DummyCommand(force=0, build_lib=destination)
+        dist.command_obj["build"] = support.DummyCommand(
+            force=False, build_lib=destination
+        )
         dist.packages = ["pkg"]
         dist.package_data = {"pkg": ["README.txt"]}
         dist.package_dir = {"pkg": sources}
 
         cmd = build_py(dist)
-        cmd.compile = 1
+        cmd.compile = True
         cmd.ensure_finalized()
         assert cmd.package_data == dist.package_data
 
@@ -53,7 +55,7 @@ def test_package_data(self):
             assert not os.path.exists(pycache_dir)
         else:
             pyc_files = os.listdir(pycache_dir)
-            assert "__init__.%s.pyc" % sys.implementation.cache_tag in pyc_files
+            assert f"__init__.{sys.implementation.cache_tag}.pyc" in pyc_files
 
     def test_empty_package_dir(self):
         # See bugs #1668596/#1720897
@@ -82,7 +84,7 @@ def test_byte_compile(self):
         os.chdir(project_dir)
         self.write_file('boiledeggs.py', 'import antigravity')
         cmd = build_py(dist)
-        cmd.compile = 1
+        cmd.compile = True
         cmd.build_lib = 'here'
         cmd.finalize_options()
         cmd.run()
@@ -90,7 +92,7 @@ def test_byte_compile(self):
         found = os.listdir(cmd.build_lib)
         assert sorted(found) == ['__pycache__', 'boiledeggs.py']
         found = os.listdir(os.path.join(cmd.build_lib, '__pycache__'))
-        assert found == ['boiledeggs.%s.pyc' % sys.implementation.cache_tag]
+        assert found == [f'boiledeggs.{sys.implementation.cache_tag}.pyc']
 
     @pytest.mark.skipif('sys.dont_write_bytecode')
     def test_byte_compile_optimized(self):
@@ -98,7 +100,7 @@ def test_byte_compile_optimized(self):
         os.chdir(project_dir)
         self.write_file('boiledeggs.py', 'import antigravity')
         cmd = build_py(dist)
-        cmd.compile = 0
+        cmd.compile = False
         cmd.optimize = 1
         cmd.build_lib = 'here'
         cmd.finalize_options()
@@ -146,7 +148,7 @@ def test_dont_write_bytecode(self, caplog):
         # makes sure byte_compile is not used
         dist = self.create_dist()[1]
         cmd = build_py(dist)
-        cmd.compile = 1
+        cmd.compile = True
         cmd.optimize = 1
 
         old_dont_write_bytecode = sys.dont_write_bytecode
diff --git a/setuptools/_distutils/tests/test_build_scripts.py b/setuptools/_distutils/tests/test_build_scripts.py
index 208b1f6e65..3582f691ef 100644
--- a/setuptools/_distutils/tests/test_build_scripts.py
+++ b/setuptools/_distutils/tests/test_build_scripts.py
@@ -42,7 +42,7 @@ def get_build_scripts_cmd(self, target, scripts):
         dist = Distribution()
         dist.scripts = scripts
         dist.command_obj["build"] = support.DummyCommand(
-            build_scripts=target, force=1, executable=sys.executable
+            build_scripts=target, force=True, executable=sys.executable
         )
         return build_scripts(dist)
 
diff --git a/setuptools/_distutils/tests/test_check.py b/setuptools/_distutils/tests/test_check.py
index 580cb2a267..b672b1f972 100644
--- a/setuptools/_distutils/tests/test_check.py
+++ b/setuptools/_distutils/tests/test_check.py
@@ -62,7 +62,7 @@ def test_check_metadata(self):
             self._run({}, **{'strict': 1})
 
         # and of course, no error when all metadata are present
-        cmd = self._run(metadata, strict=1)
+        cmd = self._run(metadata, strict=True)
         assert cmd._warnings == 0
 
         # now a test with non-ASCII characters
@@ -126,7 +126,7 @@ def test_check_restructuredtext(self):
         cmd.check_restructuredtext()
         assert cmd._warnings == 1
 
-        # let's see if we have an error with strict=1
+        # let's see if we have an error with strict=True
         metadata = {
             'url': 'xxx',
             'author': 'xxx',
@@ -140,12 +140,12 @@ def test_check_restructuredtext(self):
 
         # and non-broken rest, including a non-ASCII character to test #12114
         metadata['long_description'] = 'title\n=====\n\ntest \u00df'
-        cmd = self._run(metadata, strict=1, restructuredtext=1)
+        cmd = self._run(metadata, strict=True, restructuredtext=True)
         assert cmd._warnings == 0
 
         # check that includes work to test #31292
         metadata['long_description'] = 'title\n=====\n\n.. include:: includetest.rst'
-        cmd = self._run(metadata, cwd=HERE, strict=1, restructuredtext=1)
+        cmd = self._run(metadata, cwd=HERE, strict=True, restructuredtext=True)
         assert cmd._warnings == 0
 
     def test_check_restructuredtext_with_syntax_highlight(self):
diff --git a/setuptools/_distutils/tests/test_clean.py b/setuptools/_distutils/tests/test_clean.py
index bdbcd4fa46..cc78f30f34 100644
--- a/setuptools/_distutils/tests/test_clean.py
+++ b/setuptools/_distutils/tests/test_clean.py
@@ -37,7 +37,7 @@ def test_simple_run(self):
 
         # make sure the files where removed
         for _name, path in dirs:
-            assert not os.path.exists(path), '%s was not removed' % path
+            assert not os.path.exists(path), f'{path} was not removed'
 
         # let's run the command again (should spit warnings but succeed)
         cmd.all = 1
diff --git a/setuptools/_distutils/tests/test_cmd.py b/setuptools/_distutils/tests/test_cmd.py
index f366aa6522..76e8f5989b 100644
--- a/setuptools/_distutils/tests/test_cmd.py
+++ b/setuptools/_distutils/tests/test_cmd.py
@@ -48,7 +48,7 @@ def test_ensure_string_list(self, cmd):
     def test_make_file(self, cmd):
         # making sure it raises when infiles is not a string or a list/tuple
         with pytest.raises(TypeError):
-            cmd.make_file(infiles=1, outfile='', func='func', args=())
+            cmd.make_file(infiles=True, outfile='', func='func', args=())
 
         # making sure execute gets called properly
         def _execute(func, args, exec_msg, level):
diff --git a/setuptools/_distutils/tests/test_config_cmd.py b/setuptools/_distutils/tests/test_config_cmd.py
index fc0a7885cd..ebee2ef93f 100644
--- a/setuptools/_distutils/tests/test_config_cmd.py
+++ b/setuptools/_distutils/tests/test_config_cmd.py
@@ -36,7 +36,7 @@ def test_dump_file(self):
     def test_search_cpp(self):
         cmd = missing_compiler_executable(['preprocessor'])
         if cmd is not None:
-            self.skipTest('The %r command is not found' % cmd)
+            self.skipTest(f'The {cmd!r} command is not found')
         pkg_dir, dist = self.create_dist()
         cmd = config(dist)
         cmd._check_compiler()
@@ -58,9 +58,9 @@ def test_finalize_options(self):
         # on options
         pkg_dir, dist = self.create_dist()
         cmd = config(dist)
-        cmd.include_dirs = 'one%stwo' % os.pathsep
+        cmd.include_dirs = f'one{os.pathsep}two'
         cmd.libraries = 'one'
-        cmd.library_dirs = 'three%sfour' % os.pathsep
+        cmd.library_dirs = f'three{os.pathsep}four'
         cmd.ensure_finalized()
 
         assert cmd.include_dirs == ['one', 'two']
diff --git a/setuptools/_distutils/tests/test_cygwinccompiler.py b/setuptools/_distutils/tests/test_cygwinccompiler.py
index 0a66193d35..2e1640b757 100644
--- a/setuptools/_distutils/tests/test_cygwinccompiler.py
+++ b/setuptools/_distutils/tests/test_cygwinccompiler.py
@@ -71,34 +71,34 @@ def test_check_config_h(self):
         assert check_config_h()[0] == CONFIG_H_OK
 
     def test_get_msvcr(self):
-        # none
+        # []
         sys.version = (
             '2.6.1 (r261:67515, Dec  6 2008, 16:42:21) '
             '\n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]'
         )
-        assert get_msvcr() is None
+        assert get_msvcr() == []
 
         # MSVC 7.0
         sys.version = (
-            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' '[MSC v.1300 32 bits (Intel)]'
+            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) [MSC v.1300 32 bits (Intel)]'
         )
         assert get_msvcr() == ['msvcr70']
 
         # MSVC 7.1
         sys.version = (
-            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' '[MSC v.1310 32 bits (Intel)]'
+            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) [MSC v.1310 32 bits (Intel)]'
         )
         assert get_msvcr() == ['msvcr71']
 
         # VS2005 / MSVC 8.0
         sys.version = (
-            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' '[MSC v.1400 32 bits (Intel)]'
+            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) [MSC v.1400 32 bits (Intel)]'
         )
         assert get_msvcr() == ['msvcr80']
 
         # VS2008 / MSVC 9.0
         sys.version = (
-            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' '[MSC v.1500 32 bits (Intel)]'
+            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) [MSC v.1500 32 bits (Intel)]'
         )
         assert get_msvcr() == ['msvcr90']
 
@@ -110,7 +110,14 @@ def test_get_msvcr(self):
 
         # unknown
         sys.version = (
-            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' '[MSC v.2000 32 bits (Intel)]'
+            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) [MSC v.2000 32 bits (Intel)]'
         )
         with pytest.raises(ValueError):
             get_msvcr()
+
+    @pytest.mark.skipif('sys.platform != "cygwin"')
+    def test_dll_libraries_not_none(self):
+        from distutils.cygwinccompiler import CygwinCCompiler
+
+        compiler = CygwinCCompiler()
+        assert compiler.dll_libraries is not None
diff --git a/setuptools/_distutils/tests/test_dir_util.py b/setuptools/_distutils/tests/test_dir_util.py
index 84cda619ba..c00ffbbd81 100644
--- a/setuptools/_distutils/tests/test_dir_util.py
+++ b/setuptools/_distutils/tests/test_dir_util.py
@@ -29,17 +29,17 @@ def stuff(request, monkeypatch, distutils_managed_tempdir):
 
 class TestDirUtil(support.TempdirManager):
     def test_mkpath_remove_tree_verbosity(self, caplog):
-        mkpath(self.target, verbose=0)
+        mkpath(self.target, verbose=False)
         assert not caplog.records
-        remove_tree(self.root_target, verbose=0)
+        remove_tree(self.root_target, verbose=False)
 
-        mkpath(self.target, verbose=1)
-        wanted = ['creating %s' % self.root_target, 'creating %s' % self.target]
+        mkpath(self.target, verbose=True)
+        wanted = [f'creating {self.root_target}', f'creating {self.target}']
         assert caplog.messages == wanted
         caplog.clear()
 
-        remove_tree(self.root_target, verbose=1)
-        wanted = ["removing '%s' (and everything under it)" % self.root_target]
+        remove_tree(self.root_target, verbose=True)
+        wanted = [f"removing '{self.root_target}' (and everything under it)"]
         assert caplog.messages == wanted
 
     @pytest.mark.skipif("platform.system() == 'Windows'")
@@ -53,45 +53,45 @@ def test_mkpath_with_custom_mode(self):
         assert stat.S_IMODE(os.stat(self.target2).st_mode) == 0o555 & ~umask
 
     def test_create_tree_verbosity(self, caplog):
-        create_tree(self.root_target, ['one', 'two', 'three'], verbose=0)
+        create_tree(self.root_target, ['one', 'two', 'three'], verbose=False)
         assert caplog.messages == []
-        remove_tree(self.root_target, verbose=0)
+        remove_tree(self.root_target, verbose=False)
 
-        wanted = ['creating %s' % self.root_target]
-        create_tree(self.root_target, ['one', 'two', 'three'], verbose=1)
+        wanted = [f'creating {self.root_target}']
+        create_tree(self.root_target, ['one', 'two', 'three'], verbose=True)
         assert caplog.messages == wanted
 
-        remove_tree(self.root_target, verbose=0)
+        remove_tree(self.root_target, verbose=False)
 
     def test_copy_tree_verbosity(self, caplog):
-        mkpath(self.target, verbose=0)
+        mkpath(self.target, verbose=False)
 
-        copy_tree(self.target, self.target2, verbose=0)
+        copy_tree(self.target, self.target2, verbose=False)
         assert caplog.messages == []
 
-        remove_tree(self.root_target, verbose=0)
+        remove_tree(self.root_target, verbose=False)
 
-        mkpath(self.target, verbose=0)
+        mkpath(self.target, verbose=False)
         a_file = path.Path(self.target) / 'ok.txt'
         jaraco.path.build({'ok.txt': 'some content'}, self.target)
 
         wanted = [f'copying {a_file} -> {self.target2}']
-        copy_tree(self.target, self.target2, verbose=1)
+        copy_tree(self.target, self.target2, verbose=True)
         assert caplog.messages == wanted
 
-        remove_tree(self.root_target, verbose=0)
-        remove_tree(self.target2, verbose=0)
+        remove_tree(self.root_target, verbose=False)
+        remove_tree(self.target2, verbose=False)
 
     def test_copy_tree_skips_nfs_temp_files(self):
-        mkpath(self.target, verbose=0)
+        mkpath(self.target, verbose=False)
 
         jaraco.path.build({'ok.txt': 'some content', '.nfs123abc': ''}, self.target)
 
         copy_tree(self.target, self.target2)
         assert os.listdir(self.target2) == ['ok.txt']
 
-        remove_tree(self.root_target, verbose=0)
-        remove_tree(self.target2, verbose=0)
+        remove_tree(self.root_target, verbose=False)
+        remove_tree(self.target2, verbose=False)
 
     def test_ensure_relative(self):
         if os.sep == '/':
diff --git a/setuptools/_distutils/tests/test_dist.py b/setuptools/_distutils/tests/test_dist.py
index 9ed4d16dd8..5bd206fec1 100644
--- a/setuptools/_distutils/tests/test_dist.py
+++ b/setuptools/_distutils/tests/test_dist.py
@@ -468,7 +468,7 @@ def test_show_help(self, request, capsys):
         # smoke test, just makes sure some help is displayed
         dist = Distribution()
         sys.argv = []
-        dist.help = 1
+        dist.help = True
         dist.script_name = 'setup.py'
         dist.parse_command_line()
 
diff --git a/setuptools/_distutils/tests/test_extension.py b/setuptools/_distutils/tests/test_extension.py
index 77bb147bfd..527a135506 100644
--- a/setuptools/_distutils/tests/test_extension.py
+++ b/setuptools/_distutils/tests/test_extension.py
@@ -6,7 +6,7 @@
 
 import pytest
 
-from .py38compat import check_warnings
+from .compat.py38 import check_warnings
 
 
 class TestExtension:
diff --git a/setuptools/_distutils/tests/test_file_util.py b/setuptools/_distutils/tests/test_file_util.py
index 4c2abd2453..85ac2136b3 100644
--- a/setuptools/_distutils/tests/test_file_util.py
+++ b/setuptools/_distutils/tests/test_file_util.py
@@ -22,23 +22,23 @@ class TestFileUtil:
     def test_move_file_verbosity(self, caplog):
         jaraco.path.build({self.source: 'some content'})
 
-        move_file(self.source, self.target, verbose=0)
+        move_file(self.source, self.target, verbose=False)
         assert not caplog.messages
 
         # back to original state
-        move_file(self.target, self.source, verbose=0)
+        move_file(self.target, self.source, verbose=False)
 
-        move_file(self.source, self.target, verbose=1)
+        move_file(self.source, self.target, verbose=True)
         wanted = [f'moving {self.source} -> {self.target}']
         assert caplog.messages == wanted
 
         # back to original state
-        move_file(self.target, self.source, verbose=0)
+        move_file(self.target, self.source, verbose=False)
 
         caplog.clear()
         # now the target is a dir
         os.mkdir(self.target_dir)
-        move_file(self.source, self.target_dir, verbose=1)
+        move_file(self.source, self.target_dir, verbose=True)
         wanted = [f'moving {self.source} -> {self.target_dir}']
         assert caplog.messages == wanted
 
@@ -48,7 +48,7 @@ def test_move_file_exception_unpacking_rename(self):
             DistutilsFileError
         ):
             jaraco.path.build({self.source: 'spam eggs'})
-            move_file(self.source, self.target, verbose=0)
+            move_file(self.source, self.target, verbose=False)
 
     def test_move_file_exception_unpacking_unlink(self):
         # see issue 22182
@@ -58,7 +58,7 @@ def test_move_file_exception_unpacking_unlink(self):
             DistutilsFileError
         ):
             jaraco.path.build({self.source: 'spam eggs'})
-            move_file(self.source, self.target, verbose=0)
+            move_file(self.source, self.target, verbose=False)
 
     def test_copy_file_hard_link(self):
         jaraco.path.build({self.source: 'some content'})
@@ -67,7 +67,7 @@ def test_copy_file_hard_link(self):
         try:
             os.link(self.source, self.target)
         except OSError as e:
-            self.skipTest('os.link: %s' % e)
+            self.skipTest(f'os.link: {e}')
         else:
             self.target.unlink()
         st = os.stat(self.source)
diff --git a/setuptools/_distutils/tests/test_filelist.py b/setuptools/_distutils/tests/test_filelist.py
index 6a379a6323..ec7e5cf363 100644
--- a/setuptools/_distutils/tests/test_filelist.py
+++ b/setuptools/_distutils/tests/test_filelist.py
@@ -10,7 +10,7 @@
 import jaraco.path
 import pytest
 
-from . import py38compat as os_helper
+from .compat import py38 as os_helper
 
 MANIFEST_IN = """\
 include ok
diff --git a/setuptools/_distutils/tests/test_install.py b/setuptools/_distutils/tests/test_install.py
index 08f0f83993..b3ffb2e668 100644
--- a/setuptools/_distutils/tests/test_install.py
+++ b/setuptools/_distutils/tests/test_install.py
@@ -13,6 +13,7 @@
 from distutils.errors import DistutilsOptionError
 from distutils.extension import Extension
 from distutils.tests import missing_compiler_executable, support
+from distutils.util import is_mingw
 
 import pytest
 
@@ -100,7 +101,7 @@ def _expanduser(path):
         assert 'user' in options
 
         # setting a value
-        cmd.user = 1
+        cmd.user = True
 
         # user base and site shouldn't be created yet
         assert not os.path.exists(site.USER_BASE)
@@ -117,7 +118,7 @@ def _expanduser(path):
         assert 'usersite' in cmd.config_vars
 
         actual_headers = os.path.relpath(cmd.install_headers, site.USER_BASE)
-        if os.name == 'nt':
+        if os.name == 'nt' and not is_mingw():
             site_path = os.path.relpath(os.path.dirname(orig_site), orig_base)
             include = os.path.join(site_path, 'Include')
         else:
@@ -198,7 +199,7 @@ def test_record(self):
         found = [pathlib.Path(line).name for line in content.splitlines()]
         expected = [
             'hello.py',
-            'hello.%s.pyc' % sys.implementation.cache_tag,
+            f'hello.{sys.implementation.cache_tag}.pyc',
             'sayhi',
             'UNKNOWN-0.0.0-py{}.{}.egg-info'.format(*sys.version_info[:2]),
         ]
@@ -207,7 +208,7 @@ def test_record(self):
     def test_record_extensions(self):
         cmd = missing_compiler_executable()
         if cmd is not None:
-            pytest.skip('The %r command is not found' % cmd)
+            pytest.skip(f'The {cmd!r} command is not found')
         install_dir = self.mkdtemp()
         project_dir, dist = self.create_dist(
             ext_modules=[Extension('xx', ['xxmodule.c'])]
diff --git a/setuptools/_distutils/tests/test_install_data.py b/setuptools/_distutils/tests/test_install_data.py
index e453d01f1a..f34070b10b 100644
--- a/setuptools/_distutils/tests/test_install_data.py
+++ b/setuptools/_distutils/tests/test_install_data.py
@@ -41,7 +41,7 @@ def test_simple_run(self):
         cmd.outfiles = []
 
         # let's try with warn_dir one
-        cmd.warn_dir = 1
+        cmd.warn_dir = True
         cmd.ensure_finalized()
         cmd.run()
 
diff --git a/setuptools/_distutils/tests/test_install_lib.py b/setuptools/_distutils/tests/test_install_lib.py
index 964106fa00..f685a57956 100644
--- a/setuptools/_distutils/tests/test_install_lib.py
+++ b/setuptools/_distutils/tests/test_install_lib.py
@@ -97,7 +97,7 @@ def test_dont_write_bytecode(self, caplog):
         # makes sure byte_compile is not used
         dist = self.create_dist()[1]
         cmd = install_lib(dist)
-        cmd.compile = 1
+        cmd.compile = True
         cmd.optimize = 1
 
         old_dont_write_bytecode = sys.dont_write_bytecode
diff --git a/setuptools/_distutils/tests/test_install_scripts.py b/setuptools/_distutils/tests/test_install_scripts.py
index 5d9f13a426..868b1c2252 100644
--- a/setuptools/_distutils/tests/test_install_scripts.py
+++ b/setuptools/_distutils/tests/test_install_scripts.py
@@ -14,8 +14,8 @@ def test_default_settings(self):
         dist.command_obj["build"] = support.DummyCommand(build_scripts="/foo/bar")
         dist.command_obj["install"] = support.DummyCommand(
             install_scripts="/splat/funk",
-            force=1,
-            skip_build=1,
+            force=True,
+            skip_build=True,
         )
         cmd = install_scripts(dist)
         assert not cmd.force
@@ -40,8 +40,8 @@ def test_installation(self):
         dist.command_obj["build"] = support.DummyCommand(build_scripts=source)
         dist.command_obj["install"] = support.DummyCommand(
             install_scripts=target,
-            force=1,
-            skip_build=1,
+            force=True,
+            skip_build=True,
         )
         cmd = install_scripts(dist)
         cmd.finalize_options()
diff --git a/setuptools/_distutils/tests/test_mingwccompiler.py b/setuptools/_distutils/tests/test_mingwccompiler.py
new file mode 100644
index 0000000000..d81360e782
--- /dev/null
+++ b/setuptools/_distutils/tests/test_mingwccompiler.py
@@ -0,0 +1,45 @@
+import pytest
+
+from distutils.util import split_quoted, is_mingw
+from distutils.errors import DistutilsPlatformError, CCompilerError
+
+
+class TestMingw32CCompiler:
+    @pytest.mark.skipif(not is_mingw(), reason='not on mingw')
+    def test_compiler_type(self):
+        from distutils.cygwinccompiler import Mingw32CCompiler
+
+        compiler = Mingw32CCompiler()
+        assert compiler.compiler_type == 'mingw32'
+
+    @pytest.mark.skipif(not is_mingw(), reason='not on mingw')
+    def test_set_executables(self, monkeypatch):
+        from distutils.cygwinccompiler import Mingw32CCompiler
+
+        monkeypatch.setenv('CC', 'cc')
+        monkeypatch.setenv('CXX', 'c++')
+
+        compiler = Mingw32CCompiler()
+
+        assert compiler.compiler == split_quoted('cc -O -Wall')
+        assert compiler.compiler_so == split_quoted('cc -shared -O -Wall')
+        assert compiler.compiler_cxx == split_quoted('c++ -O -Wall')
+        assert compiler.linker_exe == split_quoted('cc')
+        assert compiler.linker_so == split_quoted('cc -shared')
+
+    @pytest.mark.skipif(not is_mingw(), reason='not on mingw')
+    def test_runtime_library_dir_option(self):
+        from distutils.cygwinccompiler import Mingw32CCompiler
+
+        compiler = Mingw32CCompiler()
+        with pytest.raises(DistutilsPlatformError):
+            compiler.runtime_library_dir_option('/usr/lib')
+
+    @pytest.mark.skipif(not is_mingw(), reason='not on mingw')
+    def test_cygwincc_error(self, monkeypatch):
+        import distutils.cygwinccompiler
+
+        monkeypatch.setattr(distutils.cygwinccompiler, 'is_cygwincc', lambda _: True)
+
+        with pytest.raises(CCompilerError):
+            distutils.cygwinccompiler.Mingw32CCompiler()
diff --git a/setuptools/_distutils/tests/test_register.py b/setuptools/_distutils/tests/test_register.py
index d071bbe951..14dfb832c7 100644
--- a/setuptools/_distutils/tests/test_register.py
+++ b/setuptools/_distutils/tests/test_register.py
@@ -137,7 +137,7 @@ def _no_way(prompt=''):
 
         register_module.input = _no_way
 
-        cmd.show_response = 1
+        cmd.show_response = True
         cmd.run()
 
         # let's see what the server received : we should
@@ -208,7 +208,7 @@ def test_strict(self):
         # empty metadata
         cmd = self._get_cmd({})
         cmd.ensure_finalized()
-        cmd.strict = 1
+        cmd.strict = True
         with pytest.raises(DistutilsSetupError):
             cmd.run()
 
@@ -224,7 +224,7 @@ def test_strict(self):
 
         cmd = self._get_cmd(metadata)
         cmd.ensure_finalized()
-        cmd.strict = 1
+        cmd.strict = True
         with pytest.raises(DistutilsSetupError):
             cmd.run()
 
@@ -232,7 +232,7 @@ def test_strict(self):
         metadata['long_description'] = 'title\n=====\n\ntext'
         cmd = self._get_cmd(metadata)
         cmd.ensure_finalized()
-        cmd.strict = 1
+        cmd.strict = True
         inputs = Inputs('1', 'tarek', 'y')
         register_module.input = inputs.__call__
         # let's run the command
@@ -265,7 +265,7 @@ def test_strict(self):
 
         cmd = self._get_cmd(metadata)
         cmd.ensure_finalized()
-        cmd.strict = 1
+        cmd.strict = True
         inputs = Inputs('1', 'tarek', 'y')
         register_module.input = inputs.__call__
         # let's run the command
@@ -296,7 +296,7 @@ def test_register_invalid_long_description(self, monkeypatch):
 
     def test_list_classifiers(self, caplog):
         cmd = self._get_cmd()
-        cmd.list_classifiers = 1
+        cmd.list_classifiers = True
         cmd.run()
         assert caplog.messages == ['running check', 'xxx']
 
@@ -305,7 +305,7 @@ def test_show_response(self, caplog):
         cmd = self._get_cmd()
         inputs = Inputs('1', 'tarek', 'y')
         register_module.input = inputs.__call__
-        cmd.show_response = 1
+        cmd.show_response = True
         try:
             cmd.run()
         finally:
diff --git a/setuptools/_distutils/tests/test_sdist.py b/setuptools/_distutils/tests/test_sdist.py
index 66a4194706..7daaaa6323 100644
--- a/setuptools/_distutils/tests/test_sdist.py
+++ b/setuptools/_distutils/tests/test_sdist.py
@@ -2,6 +2,7 @@
 
 import os
 import pathlib
+import shutil  # noqa: F401
 import tarfile
 import warnings
 import zipfile
@@ -10,7 +11,6 @@
 from distutils.core import Distribution
 from distutils.errors import DistutilsOptionError
 from distutils.filelist import FileList
-from distutils.spawn import find_executable  # noqa: F401
 from distutils.tests.test_config import BasePyPIRCCommandTestCase
 from os.path import join
 from textwrap import dedent
@@ -20,7 +20,7 @@
 import pytest
 from more_itertools import ilen
 
-from .py38compat import check_warnings
+from .compat.py38 import check_warnings
 from .unix_compat import grp, pwd, require_uid_0, require_unix_id
 
 SETUP_PY = """
@@ -73,7 +73,7 @@ def get_cmd(self, metadata=None):
         """Returns a cmd"""
         if metadata is None:
             metadata = {
-                'name': 'fake',
+                'name': 'ns.fake--pkg',
                 'version': '1.0',
                 'url': 'xxx',
                 'author': 'xxx',
@@ -117,9 +117,9 @@ def test_prune_file_list(self):
         # now let's check what we have
         dist_folder = join(self.tmp_dir, 'dist')
         files = os.listdir(dist_folder)
-        assert files == ['fake-1.0.zip']
+        assert files == ['ns_fake_pkg-1.0.zip']
 
-        zip_file = zipfile.ZipFile(join(dist_folder, 'fake-1.0.zip'))
+        zip_file = zipfile.ZipFile(join(dist_folder, 'ns_fake_pkg-1.0.zip'))
         try:
             content = zip_file.namelist()
         finally:
@@ -134,11 +134,11 @@ def test_prune_file_list(self):
             'somecode/',
             'somecode/__init__.py',
         ]
-        assert sorted(content) == ['fake-1.0/' + x for x in expected]
+        assert sorted(content) == ['ns_fake_pkg-1.0/' + x for x in expected]
 
     @pytest.mark.usefixtures('needs_zlib')
-    @pytest.mark.skipif("not find_executable('tar')")
-    @pytest.mark.skipif("not find_executable('gzip')")
+    @pytest.mark.skipif("not shutil.which('tar')")
+    @pytest.mark.skipif("not shutil.which('gzip')")
     def test_make_distribution(self):
         # now building a sdist
         dist, cmd = self.get_cmd()
@@ -152,10 +152,10 @@ def test_make_distribution(self):
         dist_folder = join(self.tmp_dir, 'dist')
         result = os.listdir(dist_folder)
         result.sort()
-        assert result == ['fake-1.0.tar', 'fake-1.0.tar.gz']
+        assert result == ['ns_fake_pkg-1.0.tar', 'ns_fake_pkg-1.0.tar.gz']
 
-        os.remove(join(dist_folder, 'fake-1.0.tar'))
-        os.remove(join(dist_folder, 'fake-1.0.tar.gz'))
+        os.remove(join(dist_folder, 'ns_fake_pkg-1.0.tar'))
+        os.remove(join(dist_folder, 'ns_fake_pkg-1.0.tar.gz'))
 
         # now trying a tar then a gztar
         cmd.formats = ['tar', 'gztar']
@@ -165,7 +165,7 @@ def test_make_distribution(self):
 
         result = os.listdir(dist_folder)
         result.sort()
-        assert result == ['fake-1.0.tar', 'fake-1.0.tar.gz']
+        assert result == ['ns_fake_pkg-1.0.tar', 'ns_fake_pkg-1.0.tar.gz']
 
     @pytest.mark.usefixtures('needs_zlib')
     def test_add_defaults(self):
@@ -218,9 +218,9 @@ def test_add_defaults(self):
         # now let's check what we have
         dist_folder = join(self.tmp_dir, 'dist')
         files = os.listdir(dist_folder)
-        assert files == ['fake-1.0.zip']
+        assert files == ['ns_fake_pkg-1.0.zip']
 
-        zip_file = zipfile.ZipFile(join(dist_folder, 'fake-1.0.zip'))
+        zip_file = zipfile.ZipFile(join(dist_folder, 'ns_fake_pkg-1.0.zip'))
         try:
             content = zip_file.namelist()
         finally:
@@ -246,7 +246,7 @@ def test_add_defaults(self):
             'somecode/doc.dat',
             'somecode/doc.txt',
         ]
-        assert sorted(content) == ['fake-1.0/' + x for x in expected]
+        assert sorted(content) == ['ns_fake_pkg-1.0/' + x for x in expected]
 
         # checking the MANIFEST
         manifest = pathlib.Path(self.tmp_dir, 'MANIFEST').read_text(encoding='utf-8')
@@ -419,23 +419,23 @@ def test_manual_manifest(self):
 
         assert list(clean_lines(cmd.manifest)) == ['README.manual']
 
-        archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz')
+        archive_name = join(self.tmp_dir, 'dist', 'ns_fake_pkg-1.0.tar.gz')
         archive = tarfile.open(archive_name)
         try:
             filenames = [tarinfo.name for tarinfo in archive]
         finally:
             archive.close()
         assert sorted(filenames) == [
-            'fake-1.0',
-            'fake-1.0/PKG-INFO',
-            'fake-1.0/README.manual',
+            'ns_fake_pkg-1.0',
+            'ns_fake_pkg-1.0/PKG-INFO',
+            'ns_fake_pkg-1.0/README.manual',
         ]
 
     @pytest.mark.usefixtures('needs_zlib')
     @require_unix_id
     @require_uid_0
-    @pytest.mark.skipif("not find_executable('tar')")
-    @pytest.mark.skipif("not find_executable('gzip')")
+    @pytest.mark.skipif("not shutil.which('tar')")
+    @pytest.mark.skipif("not shutil.which('gzip')")
     def test_make_distribution_owner_group(self):
         # now building a sdist
         dist, cmd = self.get_cmd()
@@ -448,7 +448,7 @@ def test_make_distribution_owner_group(self):
         cmd.run()
 
         # making sure we have the good rights
-        archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz')
+        archive_name = join(self.tmp_dir, 'dist', 'ns_fake_pkg-1.0.tar.gz')
         archive = tarfile.open(archive_name)
         try:
             for member in archive.getmembers():
@@ -466,7 +466,7 @@ def test_make_distribution_owner_group(self):
         cmd.run()
 
         # making sure we have the good rights
-        archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz')
+        archive_name = join(self.tmp_dir, 'dist', 'ns_fake_pkg-1.0.tar.gz')
         archive = tarfile.open(archive_name)
 
         # note that we are not testing the group ownership here
diff --git a/setuptools/_distutils/tests/test_spawn.py b/setuptools/_distutils/tests/test_spawn.py
index abbac4c23f..2576bdd53d 100644
--- a/setuptools/_distutils/tests/test_spawn.py
+++ b/setuptools/_distutils/tests/test_spawn.py
@@ -12,7 +12,7 @@
 import path
 import pytest
 
-from . import py38compat as os_helper
+from .compat import py38 as os_helper
 
 
 class TestSpawn(support.TempdirManager):
@@ -24,7 +24,7 @@ def test_spawn(self):
         # through the shell that returns 1
         if sys.platform != 'win32':
             exe = os.path.join(tmpdir, 'foo.sh')
-            self.write_file(exe, '#!%s\nexit 1' % unix_shell)
+            self.write_file(exe, f'#!{unix_shell}\nexit 1')
         else:
             exe = os.path.join(tmpdir, 'foo.bat')
             self.write_file(exe, 'exit 1')
@@ -36,7 +36,7 @@ def test_spawn(self):
         # now something that works
         if sys.platform != 'win32':
             exe = os.path.join(tmpdir, 'foo.sh')
-            self.write_file(exe, '#!%s\nexit 0' % unix_shell)
+            self.write_file(exe, f'#!{unix_shell}\nexit 0')
         else:
             exe = os.path.join(tmpdir, 'foo.bat')
             self.write_file(exe, 'exit 0')
@@ -45,14 +45,9 @@ def test_spawn(self):
         spawn([exe])  # should work without any error
 
     def test_find_executable(self, tmp_path):
-        program_noeext = 'program'
-        # Give the temporary program an ".exe" suffix for all.
-        # It's needed on Windows and not harmful on other platforms.
-        program = program_noeext + ".exe"
-
-        program_path = tmp_path / program
-        program_path.write_text("", encoding='utf-8')
-        program_path.chmod(stat.S_IXUSR)
+        program_path = self._make_executable(tmp_path, '.exe')
+        program = program_path.name
+        program_noeext = program_path.with_suffix('').name
         filename = str(program_path)
         tmp_dir = path.Path(tmp_path)
 
@@ -121,6 +116,15 @@ def test_find_executable(self, tmp_path):
                 rv = find_executable(program)
                 assert rv == filename
 
+    @staticmethod
+    def _make_executable(tmp_path, ext):
+        # Give the temporary program a suffix regardless of platform.
+        # It's needed on Windows and not harmful on others.
+        program = tmp_path.joinpath('program').with_suffix(ext)
+        program.write_text("", encoding='utf-8')
+        program.chmod(stat.S_IXUSR)
+        return program
+
     def test_spawn_missing_exe(self):
         with pytest.raises(DistutilsExecError) as ctx:
             spawn(['does-not-exist'])
diff --git a/setuptools/_distutils/tests/test_sysconfig.py b/setuptools/_distutils/tests/test_sysconfig.py
index c55896661f..889a398c22 100644
--- a/setuptools/_distutils/tests/test_sysconfig.py
+++ b/setuptools/_distutils/tests/test_sysconfig.py
@@ -7,7 +7,7 @@
 import subprocess
 import sys
 from distutils import sysconfig
-from distutils.ccompiler import get_default_compiler  # noqa: F401
+from distutils.ccompiler import new_compiler  # noqa: F401
 from distutils.unixccompiler import UnixCCompiler
 from test.support import swap_item
 
@@ -110,7 +110,7 @@ def set_executables(self, **kw):
 
         return comp
 
-    @pytest.mark.skipif("get_default_compiler() != 'unix'")
+    @pytest.mark.skipif("not isinstance(new_compiler(), UnixCCompiler)")
     @pytest.mark.usefixtures('disable_macos_customization')
     def test_customize_compiler(self):
         # Make sure that sysconfig._config_vars is initialized
@@ -132,12 +132,12 @@ def test_customize_compiler(self):
         assert comp.exes['preprocessor'] == 'env_cpp --env-cppflags'
         assert comp.exes['compiler'] == 'env_cc --sc-cflags --env-cflags --env-cppflags'
         assert comp.exes['compiler_so'] == (
-            'env_cc --sc-cflags ' '--env-cflags ' '--env-cppflags --sc-ccshared'
+            'env_cc --sc-cflags --env-cflags --env-cppflags --sc-ccshared'
         )
         assert comp.exes['compiler_cxx'] == 'env_cxx --env-cxx-flags'
         assert comp.exes['linker_exe'] == 'env_cc'
         assert comp.exes['linker_so'] == (
-            'env_ldshared --env-ldflags --env-cflags' ' --env-cppflags'
+            'env_ldshared --env-ldflags --env-cflags --env-cppflags'
         )
         assert comp.shared_lib_extension == 'sc_shutil_suffix'
 
diff --git a/setuptools/_distutils/tests/test_text_file.py b/setuptools/_distutils/tests/test_text_file.py
index c5c910a820..f511156561 100644
--- a/setuptools/_distutils/tests/test_text_file.py
+++ b/setuptools/_distutils/tests/test_text_file.py
@@ -60,7 +60,11 @@ def test_input(count, description, file, expected_result):
         jaraco.path.build({filename.name: TEST_DATA}, tmp_path)
 
         in_file = TextFile(
-            filename, strip_comments=0, skip_blanks=0, lstrip_ws=0, rstrip_ws=0
+            filename,
+            strip_comments=False,
+            skip_blanks=False,
+            lstrip_ws=False,
+            rstrip_ws=False,
         )
         try:
             test_input(1, "no processing", in_file, result1)
@@ -68,7 +72,11 @@ def test_input(count, description, file, expected_result):
             in_file.close()
 
         in_file = TextFile(
-            filename, strip_comments=1, skip_blanks=0, lstrip_ws=0, rstrip_ws=0
+            filename,
+            strip_comments=True,
+            skip_blanks=False,
+            lstrip_ws=False,
+            rstrip_ws=False,
         )
         try:
             test_input(2, "strip comments", in_file, result2)
@@ -76,7 +84,11 @@ def test_input(count, description, file, expected_result):
             in_file.close()
 
         in_file = TextFile(
-            filename, strip_comments=0, skip_blanks=1, lstrip_ws=0, rstrip_ws=0
+            filename,
+            strip_comments=False,
+            skip_blanks=True,
+            lstrip_ws=False,
+            rstrip_ws=False,
         )
         try:
             test_input(3, "strip blanks", in_file, result3)
@@ -90,7 +102,11 @@ def test_input(count, description, file, expected_result):
             in_file.close()
 
         in_file = TextFile(
-            filename, strip_comments=1, skip_blanks=1, join_lines=1, rstrip_ws=1
+            filename,
+            strip_comments=True,
+            skip_blanks=True,
+            join_lines=True,
+            rstrip_ws=True,
         )
         try:
             test_input(5, "join lines without collapsing", in_file, result5)
@@ -99,11 +115,11 @@ def test_input(count, description, file, expected_result):
 
         in_file = TextFile(
             filename,
-            strip_comments=1,
-            skip_blanks=1,
-            join_lines=1,
-            rstrip_ws=1,
-            collapse_join=1,
+            strip_comments=True,
+            skip_blanks=True,
+            join_lines=True,
+            rstrip_ws=True,
+            collapse_join=True,
         )
         try:
             test_input(6, "join lines with collapsing", in_file, result6)
diff --git a/setuptools/_distutils/tests/test_unixccompiler.py b/setuptools/_distutils/tests/test_unixccompiler.py
index 6f05fa6989..d2c88e9116 100644
--- a/setuptools/_distutils/tests/test_unixccompiler.py
+++ b/setuptools/_distutils/tests/test_unixccompiler.py
@@ -12,7 +12,7 @@
 import pytest
 
 from . import support
-from .py38compat import EnvironmentVarGuard
+from .compat.py38 import EnvironmentVarGuard
 
 
 @pytest.fixture(autouse=True)
@@ -32,7 +32,7 @@ def rpath_foo(self):
 
 
 class TestUnixCCompiler(support.TempdirManager):
-    @pytest.mark.skipif('platform.system == "Windows"')  # noqa: C901
+    @pytest.mark.skipif('platform.system == "Windows"')
     def test_runtime_libdir_option(self):  # noqa: C901
         # Issue #5900; GitHub Issue #37
         #
diff --git a/setuptools/_distutils/tests/test_upload.py b/setuptools/_distutils/tests/test_upload.py
index 0692f00160..56df209c73 100644
--- a/setuptools/_distutils/tests/test_upload.py
+++ b/setuptools/_distutils/tests/test_upload.py
@@ -117,7 +117,7 @@ def test_upload(self, caplog):
         # lets run it
         pkg_dir, dist = self.create_dist(dist_files=dist_files)
         cmd = upload(dist)
-        cmd.show_response = 1
+        cmd.show_response = True
         cmd.ensure_finalized()
         cmd.run()
 
@@ -167,7 +167,7 @@ def test_upload_correct_cr(self):
             dist_files=dist_files, description='long description\r'
         )
         cmd = upload(dist)
-        cmd.show_response = 1
+        cmd.show_response = True
         cmd.ensure_finalized()
         cmd.run()
 
diff --git a/setuptools/_distutils/tests/test_util.py b/setuptools/_distutils/tests/test_util.py
index 78d8b1e3b6..0de4e1a59c 100644
--- a/setuptools/_distutils/tests/test_util.py
+++ b/setuptools/_distutils/tests/test_util.py
@@ -108,6 +108,7 @@ def _join(*path):
 
         # windows
         os.name = 'nt'
+        os.sep = '\\'
 
         def _isabs(path):
             return path.startswith('c:\\')
diff --git a/setuptools/_distutils/text_file.py b/setuptools/_distutils/text_file.py
index 0f846e3c52..fec29c73b0 100644
--- a/setuptools/_distutils/text_file.py
+++ b/setuptools/_distutils/text_file.py
@@ -97,7 +97,7 @@ def __init__(self, filename=None, file=None, **options):
         # sanity check client option hash
         for opt in options.keys():
             if opt not in self.default_options:
-                raise KeyError("invalid TextFile option '%s'" % opt)
+                raise KeyError(f"invalid TextFile option '{opt}'")
 
         if file is None:
             self.open(filename)
diff --git a/setuptools/_distutils/unixccompiler.py b/setuptools/_distutils/unixccompiler.py
index 0248bde87b..7e68596b26 100644
--- a/setuptools/_distutils/unixccompiler.py
+++ b/setuptools/_distutils/unixccompiler.py
@@ -22,11 +22,11 @@
 import sys
 
 from . import sysconfig
-from .compat import consolidate_linker_args
 from ._log import log
 from ._macos_compat import compiler_fixup
 from ._modified import newer
 from .ccompiler import CCompiler, gen_lib_options, gen_preprocess_options
+from .compat import consolidate_linker_args
 from .errors import CompileError, DistutilsExecError, LibError, LinkError
 
 # XXX Things not currently handled:
@@ -144,6 +144,9 @@ class UnixCCompiler(CCompiler):
     xcode_stub_lib_format = dylib_lib_format
     if sys.platform == "cygwin":
         exe_extension = ".exe"
+        shared_lib_extension = ".dll.a"
+        dylib_lib_extension = ".dll"
+        dylib_lib_format = "cyg%s%s"
 
     def preprocess(
         self,
@@ -190,7 +193,7 @@ def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
             raise CompileError(msg)
 
     def create_static_lib(
-        self, objects, output_libname, output_dir=None, debug=0, target_lang=None
+        self, objects, output_libname, output_dir=None, debug=False, target_lang=None
     ):
         objects, output_dir = self._fix_object_args(objects, output_dir)
 
@@ -223,7 +226,7 @@ def link(
         library_dirs=None,
         runtime_library_dirs=None,
         export_symbols=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         build_temp=None,
@@ -362,7 +365,7 @@ def _library_root(dir):
 
         return os.path.join(match.group(1), dir[1:]) if apply_root else dir
 
-    def find_library_file(self, dirs, lib, debug=0):
+    def find_library_file(self, dirs, lib, debug=False):
         r"""
         Second-guess the linker with not much hard
         data to go on: GCC seems to prefer the shared library, so
diff --git a/setuptools/_distutils/util.py b/setuptools/_distutils/util.py
index 9ee77721b3..9db89b0979 100644
--- a/setuptools/_distutils/util.py
+++ b/setuptools/_distutils/util.py
@@ -12,6 +12,7 @@
 import subprocess
 import sys
 import sysconfig
+import tempfile
 
 from ._log import log
 from ._modified import newer
@@ -34,7 +35,7 @@ def get_host_platform():
         if os.name == "posix" and hasattr(os, 'uname'):
             osname, host, release, version, machine = os.uname()
             if osname[:3] == "aix":
-                from .py38compat import aix_platform
+                from .compat.py38 import aix_platform
 
                 return aix_platform(osname, version, release)
 
@@ -129,9 +130,9 @@ def convert_path(pathname):
     if not pathname:
         return pathname
     if pathname[0] == '/':
-        raise ValueError("path '%s' cannot be absolute" % pathname)
+        raise ValueError(f"path '{pathname}' cannot be absolute")
     if pathname[-1] == '/':
-        raise ValueError("path '%s' cannot end with '/'" % pathname)
+        raise ValueError(f"path '{pathname}' cannot end with '/'")
 
     paths = pathname.split('/')
     while '.' in paths:
@@ -158,7 +159,7 @@ def change_root(new_root, pathname):
 
     elif os.name == 'nt':
         (drive, path) = os.path.splitdrive(pathname)
-        if path[0] == '\\':
+        if path[0] == os.sep:
             path = path[1:]
         return os.path.join(new_root, path)
 
@@ -240,7 +241,7 @@ def grok_environment_error(exc, prefix="error: "):
 
 def _init_regex():
     global _wordchars_re, _squote_re, _dquote_re
-    _wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace)
+    _wordchars_re = re.compile(rf'[^\\\'\"{string.whitespace} ]*')
     _squote_re = re.compile(r"'(?:[^'\\]|\\.)*'")
     _dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"')
 
@@ -295,7 +296,7 @@ def split_quoted(s):
                 raise RuntimeError("this can't happen (bad char '%c')" % s[end])
 
             if m is None:
-                raise ValueError("bad string (mismatched %s quotes?)" % s[end])
+                raise ValueError(f"bad string (mismatched {s[end]} quotes?)")
 
             (beg, end) = m.span()
             s = s[:beg] + s[beg + 1 : end - 1] + s[end:]
@@ -311,7 +312,7 @@ def split_quoted(s):
 # split_quoted ()
 
 
-def execute(func, args, msg=None, verbose=0, dry_run=0):
+def execute(func, args, msg=None, verbose=False, dry_run=False):
     """Perform some action that affects the outside world (eg.  by
     writing to the filesystem).  Such actions are special because they
     are disabled by the 'dry_run' flag.  This method takes care of all
@@ -349,11 +350,11 @@ def strtobool(val):
 def byte_compile(  # noqa: C901
     py_files,
     optimize=0,
-    force=0,
+    force=False,
     prefix=None,
     base_dir=None,
-    verbose=1,
-    dry_run=0,
+    verbose=True,
+    dry_run=False,
     direct=None,
 ):
     """Byte-compile a collection of Python source files to .pyc
@@ -405,20 +406,10 @@ def byte_compile(  # noqa: C901
     # "Indirect" byte-compilation: write a temporary script and then
     # run it with the appropriate flags.
     if not direct:
-        try:
-            from tempfile import mkstemp
-
-            (script_fd, script_name) = mkstemp(".py")
-        except ImportError:
-            from tempfile import mktemp
-
-            (script_fd, script_name) = None, mktemp(".py")
+        (script_fd, script_name) = tempfile.mkstemp(".py")
         log.info("writing byte-compilation script '%s'", script_name)
         if not dry_run:
-            if script_fd is not None:
-                script = os.fdopen(script_fd, "w", encoding='utf-8')
-            else:  # pragma: no cover
-                script = open(script_name, "w", encoding='utf-8')
+            script = os.fdopen(script_fd, "w", encoding='utf-8')
 
             with script:
                 script.write(
@@ -443,8 +434,8 @@ def byte_compile(  # noqa: C901
                     f"""
 byte_compile(files, optimize={optimize!r}, force={force!r},
              prefix={prefix!r}, base_dir={base_dir!r},
-             verbose={verbose!r}, dry_run=0,
-             direct=1)
+             verbose={verbose!r}, dry_run=False,
+             direct=True)
 """
                 )
 
@@ -452,7 +443,7 @@ def byte_compile(  # noqa: C901
         cmd.extend(subprocess._optim_args_from_interpreter_flags())
         cmd.append(script_name)
         spawn(cmd, dry_run=dry_run)
-        execute(os.remove, (script_name,), "removing %s" % script_name, dry_run=dry_run)
+        execute(os.remove, (script_name,), f"removing {script_name}", dry_run=dry_run)
 
     # "Direct" byte-compilation: use the py_compile module to compile
     # right here, right now.  Note that the script generated in indirect
@@ -508,3 +499,12 @@ def rfc822_escape(header):
     suffix = indent if ends_in_newline else ""
 
     return indent.join(lines) + suffix
+
+
+def is_mingw():
+    """Returns True if the current platform is mingw.
+
+    Python compiled with Mingw-w64 has sys.platform == 'win32' and
+    get_platform() starts with 'mingw'.
+    """
+    return sys.platform == 'win32' and get_platform().startswith('mingw')
diff --git a/setuptools/_distutils/version.py b/setuptools/_distutils/version.py
index 806d233ca5..942b56bf94 100644
--- a/setuptools/_distutils/version.py
+++ b/setuptools/_distutils/version.py
@@ -60,7 +60,7 @@ def __init__(self, vstring=None):
         )
 
     def __repr__(self):
-        return f"{self.__class__.__name__} ('{str(self)}')"
+        return f"{self.__class__.__name__} ('{self}')"
 
     def __eq__(self, other):
         c = self._cmp(other)
@@ -153,7 +153,7 @@ class StrictVersion(Version):
     def parse(self, vstring):
         match = self.version_re.match(vstring)
         if not match:
-            raise ValueError("invalid version number '%s'" % vstring)
+            raise ValueError(f"invalid version number '{vstring}'")
 
         (major, minor, patch, prerelease, prerelease_num) = match.group(1, 2, 4, 5, 6)
 
@@ -330,7 +330,7 @@ def __str__(self):
         return self.vstring
 
     def __repr__(self):
-        return "LooseVersion ('%s')" % str(self)
+        return f"LooseVersion ('{self}')"
 
     def _cmp(self, other):
         if isinstance(other, str):
diff --git a/setuptools/_distutils/versionpredicate.py b/setuptools/_distutils/versionpredicate.py
index 31c420168c..fe31b0ed8e 100644
--- a/setuptools/_distutils/versionpredicate.py
+++ b/setuptools/_distutils/versionpredicate.py
@@ -20,7 +20,7 @@ def splitUp(pred):
     """
     res = re_splitComparison.match(pred)
     if not res:
-        raise ValueError("bad package restriction syntax: %r" % pred)
+        raise ValueError(f"bad package restriction syntax: {pred!r}")
     comp, verStr = res.groups()
     with version.suppress_known_deprecation():
         other = version.StrictVersion(verStr)
@@ -113,17 +113,17 @@ def __init__(self, versionPredicateStr):
             raise ValueError("empty package restriction")
         match = re_validPackage.match(versionPredicateStr)
         if not match:
-            raise ValueError("bad package name in %r" % versionPredicateStr)
+            raise ValueError(f"bad package name in {versionPredicateStr!r}")
         self.name, paren = match.groups()
         paren = paren.strip()
         if paren:
             match = re_paren.match(paren)
             if not match:
-                raise ValueError("expected parenthesized list: %r" % paren)
+                raise ValueError(f"expected parenthesized list: {paren!r}")
             str = match.groups()[0]
             self.pred = [splitUp(aPred) for aPred in str.split(",")]
             if not self.pred:
-                raise ValueError("empty parenthesized list in %r" % versionPredicateStr)
+                raise ValueError(f"empty parenthesized list in {versionPredicateStr!r}")
         else:
             self.pred = []
 
@@ -167,7 +167,7 @@ def split_provision(value):
     value = value.strip()
     m = _provision_rx.match(value)
     if not m:
-        raise ValueError("illegal provides specification: %r" % value)
+        raise ValueError(f"illegal provides specification: {value!r}")
     ver = m.group(2) or None
     if ver:
         with version.suppress_known_deprecation():
diff --git a/setuptools/_distutils/zosccompiler.py b/setuptools/_distutils/zosccompiler.py
index c7a7ca61cf..af1e7fa5cc 100644
--- a/setuptools/_distutils/zosccompiler.py
+++ b/setuptools/_distutils/zosccompiler.py
@@ -135,7 +135,7 @@ def _get_zos_compiler_name(self):
 
         return zos_compilers.get(zos_compiler_names[0], 'ibm-openxl')
 
-    def __init__(self, verbose=0, dry_run=0, force=0):
+    def __init__(self, verbose=False, dry_run=False, force=False):
         super().__init__(verbose, dry_run, force)
         self.zos_compiler = self._get_zos_compiler_name()
         sysconfig.customize_compiler(self)
@@ -172,7 +172,7 @@ def link(
         library_dirs=None,
         runtime_library_dirs=None,
         export_symbols=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         build_temp=None,
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index eb6ba1025f..cda9fc6cfc 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -23,7 +23,6 @@
 )
 from distutils import log, dir_util
 from distutils.command.build_scripts import first_line_re
-from distutils.spawn import find_executable
 from distutils.command import install
 import sys
 import os
@@ -2275,7 +2274,7 @@ def _use_header(new_header):
         to an executable on the system.
         """
         clean_header = new_header[2:-1].strip('"')
-        return sys.platform != 'win32' or find_executable(clean_header)
+        return sys.platform != 'win32' or shutil.which(clean_header)
 
 
 class WindowsExecutableLauncherWriter(WindowsScriptWriter):