diff -pruN 6.1.1-2/.pre-commit-config.yaml 7.0.1-2/.pre-commit-config.yaml
--- 6.1.1-2/.pre-commit-config.yaml	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/.pre-commit-config.yaml	2025-08-14 16:07:35.000000000 +0000
@@ -1,15 +1,6 @@
-# We from the Oslo project decided to pin repos based on the
-# commit hash instead of the version tag to prevend arbitrary
-# code from running in developer's machines.  To update to a
-# newer version, run `pre-commit autoupdate` and then replace
-# the newer versions with their commit hash.
-
-default_language_version:
-  python: python3
-
 repos:
   - repo: https://github.com/pre-commit/pre-commit-hooks
-    rev: 9136088a246768144165fcc3ecc3d31bb686920a # v3.3.0
+    rev: v5.0.0
     hooks:
       - id: trailing-whitespace
       # Replaces or checks mixed line ending
@@ -27,13 +18,13 @@ repos:
       - id: debug-statements
       - id: check-yaml
         files: .*\.(yaml|yml)$
-  - repo: local
+  - repo: https://github.com/psf/black-pre-commit-mirror
+    rev: 25.1.0
     hooks:
-      - id: flake8
-        name: flake8
-        additional_dependencies:
-          - hacking>=7.0.0,<7.1.0
-        language: python
-        entry: flake8
-        files: '^.*\.py$'
+      - id: black
+  - repo: https://opendev.org/openstack/hacking
+    rev: 7.0.0
+    hooks:
+      - id: hacking
+        additional_dependencies: []
         exclude: '^(doc|releasenotes|tools)/.*$'
diff -pruN 6.1.1-2/.zuul.yaml 7.0.1-2/.zuul.yaml
--- 6.1.1-2/.zuul.yaml	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/.zuul.yaml	2025-08-14 16:07:35.000000000 +0000
@@ -117,6 +117,8 @@
     nodeset: ubuntu-noble
 
 - project:
+    vars:
+      release_python: python3
     templates:
       - lib-forward-testing-python3
       - periodic-stable-jobs
@@ -124,6 +126,7 @@
     check:
       jobs:
         - openstack-tox-pep8
+        - build-python-release
         - openstack-tox-cover
         - openstack-tox-py27
         - openstack-tox-py36
@@ -133,6 +136,7 @@
         - openstack-tox-py310
         - openstack-tox-py311
         - openstack-tox-py312
+        - openstack-tox-py313
         - pbr-installation-openstack-jammy
         - pbr-installation-openstack-pip-dev-jammy
         - pbr-installation-openstack-noble
@@ -140,6 +144,7 @@
     gate:
       jobs:
         - openstack-tox-pep8
+        - build-python-release
         - openstack-tox-cover
         - openstack-tox-py27
         - openstack-tox-py36
@@ -149,6 +154,7 @@
         - openstack-tox-py310
         - openstack-tox-py311
         - openstack-tox-py312
+        - openstack-tox-py313
         - pbr-installation-openstack-jammy
         - pbr-installation-openstack-pip-dev-jammy
         - pbr-installation-openstack-noble
diff -pruN 6.1.1-2/README.rst 7.0.1-2/README.rst
--- 6.1.1-2/README.rst	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/README.rst	2025-08-14 16:07:35.000000000 +0000
@@ -21,14 +21,21 @@ it's simple and repeatable. If you want
 you've already got the power of Python at your fingertips, so you don't
 really need PBR.
 
+PBR also aims to maintain a stable base for packaging. While we occasionally
+deprecate features, we do our best to avoid removing them unless absolutely
+necessary. This is important since while projects often do a good job of
+constraining their runtime dependencies they often don't do so for their
+install time dependencies. By limiting feature removals, we ensure the long
+tail of older software continues to be installable with recent versions of PBR
+automatically installed.
+
 PBR builds on top of the work that `d2to1`_ started to provide for declarative
 configuration. `d2to1`_ is itself an implementation of the ideas behind
-`distutils2`_. Although `distutils2`_ is now abandoned in favor of work towards
-`PEP 426`_ and Metadata 2.0, declarative config is still a great idea and
-specifically important in trying to distribute setup code as a library
-when that library itself will alter how the setup is processed. As Metadata
-2.0 and other modern Python packaging PEPs come out, PBR aims to support
-them as quickly as possible.
+`distutils2`_. Although `distutils2`_ is long-since abandoned, declarative
+config is still a great idea and it has since been adopted elsewhere, starting
+with setuptools' own support for ``setup.cfg`` files and extending to the
+``pyproject.toml`` file format introduced in `PEP 517`_. PBR attempts to
+support these changes as they are introduced.
 
 * License: Apache License, Version 2.0
 * Documentation: https://docs.openstack.org/pbr/latest/
@@ -39,5 +46,5 @@ them as quickly as possible.
 
 .. _d2to1: https://pypi.python.org/pypi/d2to1
 .. _distutils2: https://pypi.python.org/pypi/Distutils2
-.. _PEP 426: http://legacy.python.org/dev/peps/pep-0426/
 .. _OpenStack: https://www.openstack.org/
+.. _PEP 517: https://peps.python.org/pep-0517/
diff -pruN 6.1.1-2/debian/changelog 7.0.1-2/debian/changelog
--- 6.1.1-2/debian/changelog	2025-03-28 06:57:02.000000000 +0000
+++ 7.0.1-2/debian/changelog	2025-08-26 13:13:23.000000000 +0000
@@ -1,3 +1,24 @@
+python-pbr (7.0.1-2) experimental; urgency=medium
+
+  * Relax python3-sphinxcontrib.apidoc b-d version.
+
+ -- Thomas Goirand <zigo@debian.org>  Tue, 26 Aug 2025 15:13:23 +0200
+
+python-pbr (7.0.1-1) experimental; urgency=medium
+
+  * New upstream release.
+  * Remove disable_tests.patch as test file is gone upstream.
+  * Blacklist 5 tests that are needed either an internet connection, or a
+    setup of pbr_wsgi_script in PATH.
+  * b-d: require sphinxcontrib.apidoc (>= 0.6.0).
+  * Add python3-reno to b-d.
+  * Add remove-inclusion-of-missing-ChangeLog-file.patch.
+  * Dump sphinx error log if failure.
+  * Add do-not-use-reno-for-doc.patch, as reno is failing if toplevel isn't a
+    git repository.
+
+ -- Thomas Goirand <zigo@debian.org>  Mon, 25 Aug 2025 14:38:58 +0200
+
 python-pbr (6.1.1-2) unstable; urgency=medium
 
   * Uploading to unstable.
diff -pruN 6.1.1-2/debian/control 7.0.1-2/debian/control
--- 6.1.1-2/debian/control	2025-03-28 06:57:02.000000000 +0000
+++ 7.0.1-2/debian/control	2025-08-26 13:13:23.000000000 +0000
@@ -21,6 +21,7 @@ Build-Depends-Indep:
  python3-markupsafe <!nocheck>,
  python3-openstackdocstheme <!nocheck>,
  python3-pip <!nocheck>,
+ python3-reno <!nodoc>,
  python3-sphinxcontrib.apidoc <!nodoc>,
  python3-stestr <!nocheck>,
  python3-testresources <!nocheck>,
diff -pruN 6.1.1-2/debian/patches/disable_tests.patch 7.0.1-2/debian/patches/disable_tests.patch
--- 6.1.1-2/debian/patches/disable_tests.patch	2025-03-28 06:57:02.000000000 +0000
+++ 7.0.1-2/debian/patches/disable_tests.patch	1970-01-01 00:00:00.000000000 +0000
@@ -1,23 +0,0 @@
-Description: Disable tests which don't work in debian
-Author: Ondřej Nový <onovy@debian.org>
-Forwarded: not-needed
-Last-Update: 2017-08-08
-
---- a/pbr/tests/test_wsgi.py
-+++ b/pbr/tests/test_wsgi.py
-@@ -62,6 +62,7 @@
-         wsgi script using simple server.
- 
-         """
-+        self.skipTest('Not working on Debian')
-         if os.name == 'nt':
-             self.skipTest('Windows support is passthrough')
- 
-@@ -154,6 +155,7 @@
-             self.assertIn(else_block, script_txt)
- 
-     def test_with_argument(self):
-+        self.skipTest('Not working on Debian')
-         if os.name == 'nt':
-             self.skipTest('Windows support is passthrough')
- 
diff -pruN 6.1.1-2/debian/patches/do-not-use-reno-for-doc.patch 7.0.1-2/debian/patches/do-not-use-reno-for-doc.patch
--- 6.1.1-2/debian/patches/do-not-use-reno-for-doc.patch	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/debian/patches/do-not-use-reno-for-doc.patch	2025-08-26 13:13:23.000000000 +0000
@@ -0,0 +1,16 @@
+Description: Do not use reno for docs
+Author: Thomas Goirand <zigo@debian.org>
+Forwarded: not-needed
+Last-Update: 2025-08-26
+
+--- python-pbr-7.0.1.orig/doc/source/conf.py
++++ python-pbr-7.0.1/doc/source/conf.py
+@@ -20,7 +20,7 @@ sys.path.insert(0, os.path.abspath('../.
+ 
+ # -- General configuration ----------------------------------------------------
+ 
+-extensions = ['sphinx.ext.apidoc', 'sphinx.ext.todo', 'reno.sphinxext']
++extensions = ['sphinx.ext.apidoc', 'sphinx.ext.todo']
+ 
+ # make openstackdocstheme optional to not increase the needed dependencies
+ try:
diff -pruN 6.1.1-2/debian/patches/remove-inclusion-of-missing-ChangeLog-file.patch 7.0.1-2/debian/patches/remove-inclusion-of-missing-ChangeLog-file.patch
--- 6.1.1-2/debian/patches/remove-inclusion-of-missing-ChangeLog-file.patch	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/debian/patches/remove-inclusion-of-missing-ChangeLog-file.patch	2025-08-26 13:13:23.000000000 +0000
@@ -0,0 +1,9 @@
+Description: The ChangeLog file isn't present in upstream git
+Author: Thomas Goirand <zigo@debian.org>
+Forwarded: not-needed
+Last-Update: 2025-08-26
+
+--- python-pbr-7.0.1.orig/doc/source/user/history.rst
++++ python-pbr-7.0.1/doc/source/user/history.rst
+@@ -1 +0,0 @@
+-.. include:: ../../../ChangeLog
diff -pruN 6.1.1-2/debian/patches/series 7.0.1-2/debian/patches/series
--- 6.1.1-2/debian/patches/series	2025-03-28 06:57:02.000000000 +0000
+++ 7.0.1-2/debian/patches/series	2025-08-26 13:13:23.000000000 +0000
@@ -1 +1,2 @@
-disable_tests.patch
+remove-inclusion-of-missing-ChangeLog-file.patch
+do-not-use-reno-for-doc.patch
diff -pruN 6.1.1-2/debian/rules 7.0.1-2/debian/rules
--- 6.1.1-2/debian/rules	2025-03-28 06:57:02.000000000 +0000
+++ 7.0.1-2/debian/rules	2025-08-26 13:13:23.000000000 +0000
@@ -22,12 +22,15 @@ override_dh_auto_install:
 
 override_dh_auto_test:
 ifeq (,$(findstring nocheck, $(DEB_BUILD_OPTIONS)))
-	pkgos-dh_auto_test --no-py2 'pbr\.tests(?!.*test_packaging\.TestRequirementParsing\.test_requirement_parsing.*|.*test_core\.TestCore\.test_setup_py_keywords.*|.*test_packaging\.TestPEP517Support\.test_pep_517_support.*|.*test_wsgi\.TestWsgiScripts\.test_wsgi_script_install.*)'
+	pkgos-dh_auto_test --no-py2 'pbr\.tests\.(?!test_packaging\.TestRequirementParsing\.test_requirement_parsing.*|test_packaging\.TestPEP517Support\.test_pep_517_support.*|functional\.test_wsgi_scripts\.TestWsgiScripts\.test_with_argument|functional\.test_wsgi_scripts\.TestWsgiScripts\.test_wsgi_script_install|functional\.test_wsgi_scripts.TestWsgiScripts\.test_wsgi_script_run|functional\.test_pep517\.TestPEP517Support\.test_pep_517_support|functional\.test_requirements\.TestRequirementParsing\.test_requirement_parsing)'
 endif
 
 override_dh_sphinxdoc:
 ifeq (,$(findstring nodoc, $(DEB_BUILD_OPTIONS)))
-	PYTHONPATH=. PYTHON=python3 python3 -m sphinx -b html doc/source $(CURDIR)/debian/python-pbr-doc/usr/share/doc/python-pbr-doc/html
+	if ! PYTHONPATH=. PYTHON=python3 python3 -m sphinx -b html doc/source $(CURDIR)/debian/python-pbr-doc/usr/share/doc/python-pbr-doc/html ; then \
+		cat /tmp/sphinx-err-*.log ; \
+		exit 1 ; \
+	fi
 	dh_sphinxdoc
 endif
 
diff -pruN 6.1.1-2/debian/tests/unit 7.0.1-2/debian/tests/unit
--- 6.1.1-2/debian/tests/unit	2025-03-28 06:57:02.000000000 +0000
+++ 7.0.1-2/debian/tests/unit	2025-08-26 13:13:23.000000000 +0000
@@ -2,4 +2,4 @@
 
 set -e
 
-pkgos-dh_auto_test --no-py2 'pbr\.tests(?!.*test_packaging\.TestRequirementParsing\.test_requirement_parsing.*|.*test_wsgi\.TestWsgiScripts\.test_wsgi_script_install.*|.*test_packaging\.TestPEP517Support\.test_pep_517_support.*)'
+pkgos-dh_auto_test --no-py2 'pbr\.tests\.(?!test_packaging\.TestRequirementParsing\.test_requirement_parsing.*|test_packaging\.TestPEP517Support\.test_pep_517_support.*|functional\.test_wsgi_scripts\.TestWsgiScripts\.test_with_argument|functional\.test_wsgi_scripts\.TestWsgiScripts\.test_wsgi_script_install|functional\.test_wsgi_scripts.TestWsgiScripts\.test_wsgi_script_run|functional\.test_pep517\.TestPEP517Support\.test_pep_517_support|functional\.test_requirements\.TestRequirementParsing\.test_requirement_parsing)'
diff -pruN 6.1.1-2/doc/requirements.txt 7.0.1-2/doc/requirements.txt
--- 6.1.1-2/doc/requirements.txt	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/doc/requirements.txt	2025-08-14 16:07:35.000000000 +0000
@@ -1,6 +1,4 @@
 setuptools;python_version>='3.12'
-sphinx!=1.6.6,!=1.6.7,>=1.6.2,<2.0.0;python_version=='2.7' # BSD
-sphinx!=1.6.6,!=1.6.7,>=1.6.2;python_version>='3.4' # BSD
-sphinxcontrib-apidoc>=0.2.0  # BSD
+sphinx>=8.2.0 # BSD
 openstackdocstheme>=1.18.1 # Apache-2.0
 reno>=2.5.0 # Apache-2.0
diff -pruN 6.1.1-2/doc/source/conf.py 7.0.1-2/doc/source/conf.py
--- 6.1.1-2/doc/source/conf.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/doc/source/conf.py	2025-08-14 16:07:35.000000000 +0000
@@ -17,38 +17,27 @@ import os
 import sys
 
 sys.path.insert(0, os.path.abspath('../..'))
+
 # -- General configuration ----------------------------------------------------
 
-# Add any Sphinx extension module names here, as strings. They can be
-# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
-extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinxcontrib.apidoc']
+extensions = ['sphinx.ext.apidoc', 'sphinx.ext.todo', 'reno.sphinxext']
+
 # make openstackdocstheme optional to not increase the needed dependencies
 try:
     import openstackdocstheme
+
     extensions.append('openstackdocstheme')
 except ImportError:
     openstackdocstheme = None
 
 # openstackdocstheme options
 
-# Deprecated options for docstheme < 2.2.0, can be removed once
-# pbr stops supporting py27.
-repository_name = 'openstack/pbr'
-bug_project = 'pbr'
-bug_tag = ''
 # New options with openstackdocstheme >=2.2.0
 openstackdocs_repo_name = 'openstack/pbr'
 openstackdocs_auto_name = False
 openstackdocs_bug_project = 'pbr'
 openstackdocs_bug_tag = ''
 
-# autodoc generation is a bit aggressive and a nuisance when doing heavy
-# text edit cycles.
-# execute "export SPHINX_DEBUG=1" in your terminal to disable
-
-# Add any paths that contain templates here, relative to this directory.
-# templates_path = ['_templates']
-
 # The suffix of source filenames.
 source_suffix = '.rst'
 
@@ -57,7 +46,7 @@ master_doc = 'index'
 
 # General information about the project.
 project = 'pbr'
-copyright = '2013, OpenStack Foundation'
+copyright = '2013-, OpenStack Foundation'
 
 # If true, '()' will be appended to :func: etc. cross-reference text.
 add_function_parentheses = True
@@ -87,16 +76,21 @@ htmlhelp_basename = '%sdoc' % project
 # (source start file, target name, title, author, documentclass
 # [howto/manual]).
 latex_documents = [
-    ('index',
-     '%s.tex' % project,
-     '%s Documentation' % project,
-     'OpenStack Foundation', 'manual'),
+    (
+        'index',
+        '%s.tex' % project,
+        '%s Documentation' % project,
+        'OpenStack Foundation',
+        'manual',
+    ),
 ]
 
-# -- sphinxcontrib.apidoc configuration --------------------------------------
+# -- Options for sphinx.ext.apidoc extension ----------------------------------
 
-apidoc_module_dir = '../../pbr'
-apidoc_output_dir = 'reference/api'
-apidoc_excluded_paths = [
-    'tests',
+apidoc_modules = [
+    {
+        'path': '../../pbr',
+        'destination': 'reference/api',
+        'exclude_patterns': ['**/tests/*'],
+    },
 ]
diff -pruN 6.1.1-2/doc/source/user/features.rst 7.0.1-2/doc/source/user/features.rst
--- 6.1.1-2/doc/source/user/features.rst	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/doc/source/user/features.rst	2025-08-14 16:07:35.000000000 +0000
@@ -191,8 +191,8 @@ Requirements
 .. admonition:: Summary
 
     *pbr* will extract requirements from ``requirements.txt`` files and
-    automatically populate the ``install_requires``, ``tests_require`` and
-    ``dependency_links`` arguments to ``setup`` with them.
+    automatically populate the ``install_requires`` argument to ``setup`` with
+    them.
 
 You may not have noticed, but there are differences in how pip
 ``requirements.txt`` files work and how *setuptools* wants to be told about
@@ -201,16 +201,25 @@ populate a *virtualenv* for testing or t
 Duplicating the information, though, is super lame. To solve this issue, *pbr*
 will let you use ``requirements.txt``-format files to describe the requirements
 for your project and will then parse these files, split them up appropriately,
-and inject them into the ``install_requires``, ``tests_require`` and/or
-``dependency_links`` arguments to ``setup``. Voila!
+and inject them into the ``install_requires`` argument to ``setup``. Voila!
 
 Finally, it is possible to specify groups of optional dependencies, or
 :ref:`"extra" requirements <extra-requirements>`, in your ``setup.cfg`` rather
 than ``setup.py``.
 
+.. versionchanged:: 7.0
+
+   Previously, the ``tests_require`` and ``dependency_links`` setup arguments
+   were also populated by *pbr*. The ``tests_require`` argument is no longer
+   supported as of `setuptools v72.0.0`__, while the ``dependency_links``
+   argument is deprecated and ignored by `pip 19.0 or later`__.
+
+   .. __: https://setuptools.pypa.io/en/stable/history.html#v72-0-0
+   .. __: https://github.com/pypa/pip/pull/6060
+
 .. versionchanged:: 5.0
 
-   Previously you could specify requirements for a given major version of
+   Previously, you could specify requirements for a given major version of
    Python using requirments files with a ``-pyN`` suffix. This was deprecated
    in 4.0 and removed in 5.0 in favour of environment markers.
 
@@ -282,54 +291,35 @@ Setup Commands
 ``build_sphinx``
 ~~~~~~~~~~~~~~~~
 
-.. admonition:: Summary
+.. versionremoved:: 6.0
 
-    *pbr* will override the Sphinx ``build_sphinx`` command to use
-    *pbr*-provided package metadata and automatically generate API
-    documentation.
-
-.. deprecated:: 4.2
-
-   This feature has been superseded by the `sphinxcontrib-apidoc`__ (for
-   generation of API documentation) and :ref:`pbr.sphinxext` (for configuration
-   of versioning via package metadata) extensions. It has been removed in
-   version 6.0.
+    *Sphinx* deprecated the ``build_sphinx`` distutils commands in *Sphinx*
+    v5.0.0 and removed it in *Sphinx* v7.0.0. *pbr* deprecated its override of
+    this command in *pbr* v4.2.0 and removed it in *pbr* v6.0.0.
+
+    For automated generation of API documentation, consider either the
+    `sphinx.ext.apidoc`__ extension, provided in Sphinx since v8.2.0, or the
+    `sphinxcontrib-apidoc`__ extension if you are stuck with older versions of
+    Sphinx.
 
-   __ https://pypi.org/project/sphinxcontrib-apidoc/
+    For configuration of versioning via package metadata, consider the
+    :ref:`pbr.sphinxext` extension.
+
+    .. __: https://www.sphinx-doc.org/en/master/usage/extensions/apidoc.html
+    .. __: https://pypi.org/project/sphinxcontrib-apidoc/
 
 ``test``
 ~~~~~~~~
 
-.. admonition:: Summary
-
-    *pbr* will automatically alias the ``test`` command to use the testing tool
-    of your choice.
-
-.. deprecated:: 4.0
-
-*pbr* overrides the *setuptools* ``test`` command if using `testrepository`__
-or `nose`__ (deprecated).
-
-- *pbr* will check for a ``.testr.conf`` file. If this exists and
-  *testrepository* is installed, the ``test`` command will alias the *testr*
-  test runner. If this is not the case...
-
-  .. note::
-
-    This is separate to ``setup.py testr`` (note the extra ``r``) which is
-    provided directly by the ``testrepository`` package. Be careful as there is
-    some overlap of command arguments.
-
-- *pbr* will check if ``[nosetests]`` is defined in ``setup.cfg``. If this
-  exists and *nose* is installed, the ``test`` command will alias the *nose*
-  runner. If this is not the case...
+.. versionremoved:: 7.0
 
-- In other cases no override will be installed and the ``test`` command will
-  revert to the `setuptools default`__.
+    *pbr* previously aliased the ``test`` command to use the testing tool of
+    your choice. However, the two test runners it supported - ``testr`` and
+    ``nose`` - are no longer maintained. The override of this command was
+    therefore removed in *pbr* v7.0.0.
 
-__ https://testrepository.readthedocs.io/en/latest/
-__ https://nose.readthedocs.io/en/latest/
-__ https://setuptools.readthedocs.io/en/latest/setuptools.html#test-build-package-and-run-a-unittest-suite
+    If you relied on this command, you should switch to calling the test runner
+    directly.
 
 .. _pbr.sphinxext:
 
diff -pruN 6.1.1-2/doc/source/user/releasenotes.rst 7.0.1-2/doc/source/user/releasenotes.rst
--- 6.1.1-2/doc/source/user/releasenotes.rst	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/doc/source/user/releasenotes.rst	2025-08-14 16:07:35.000000000 +0000
@@ -2,5 +2,5 @@
  Release Notes
 ===============
 
-.. include:: ../../../RELEASENOTES.rst
-  :start-line: 4
+.. release-notes::
+   :unreleased-version-title: In Development
diff -pruN 6.1.1-2/doc/source/user/using.rst 7.0.1-2/doc/source/user/using.rst
--- 6.1.1-2/doc/source/user/using.rst	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/doc/source/user/using.rst	2025-08-14 16:07:35.000000000 +0000
@@ -14,7 +14,9 @@ available, *pbr* makes it possible to ex
 *pbr* only requires a minimal ``setup.py`` file compared to a standard
 *setuptools* project. This is because most configuration is located in static
 configuration files. This recommended minimal ``setup.py`` file should look
-something like this::
+something like this:
+
+.. code-block:: python
 
     #!/usr/bin/env python
 
@@ -32,7 +34,11 @@ something like this::
 .. note::
 
    While one can pass any arguments supported by setuptools to ``setup()``,
-   any conflicting arguments supplied in ``setup.cfg`` will take precedence.
+   any conflicting arguments supplied in ``pyproject.toml`` or ``setup.cfg``
+   will take precedence.
+
+Once configured, you can place your configuration into either
+``pyproject.toml`` or ``setup.cfg``.
 
 ``pyproject.toml``
 ------------------
@@ -40,8 +46,10 @@ something like this::
 *If your project only supports Python 3.7 or newer*, PBR can be configured as a
 PEP517 build-system in ``pyproject.toml``. The main benefits are that you can
 control the versions of PBR and setuptools that are used avoiding easy_install
-invocation. Your build-system block in ``pyproject.toml`` will need to look
-like this::
+invocation. Your ``[build-system]`` block in ``pyproject.toml`` will need to
+look like this:
+
+.. code-block:: toml
 
     [build-system]
     requires = ["pbr>=6.1.1"]
@@ -49,20 +57,61 @@ like this::
 
 Eventually PBR may grow its own direct support for PEP517 build hooks, but
 until then it will continue to need setuptools with a minimal ``setup.py`` and
-``setup.cfg`` as follows...
+``setup.cfg`` as follows. First, ``setup.py``:
 
-``setup.py``::
+.. code-block:: python
 
     import setuptools
     setuptools.setup(pbr=True)
 
-``setup.cfg``::
+Then ``setup.cfg``:
+
+.. code-block:: ini
 
     [metadata]
-    name = myproject
+    name = my_project
 
-If desired, any other metadata can be placed in your ``pyproject.toml`` instead
-of ``setup.cfg``.
+Almost all other metadata can be placed into ``pyproject.toml``. A simple example:
+
+.. code-block:: toml
+
+    [project]
+    name = "my_project"
+    description = "A brief one-line descriptive title of my project"
+    authors = [
+        {name = "John Doe", email = "john@example.com"},
+    ]
+    requires-python = ">=3.10"
+    classifiers = [
+        "Development Status :: 5 - Production/Stable",
+        "Environment :: Console",
+        "Intended Audience :: Developers",
+        "Intended Audience :: Information Technology",
+        "License :: OSI Approved :: Apache Software License",
+        "Operating System :: OS Independent",
+        "Programming Language :: Python",
+        "Programming Language :: Python :: 3",
+        "Programming Language :: Python :: 3.10",
+        "Programming Language :: Python :: 3.11",
+        "Programming Language :: Python :: 3.12",
+        "Programming Language :: Python :: 3.13",
+        "Topic :: Utilities",
+    ]
+    keywords = ["commandline", "utility"]
+    readme = "README.rst"
+
+    [project.scripts]
+    my-project = "my_project.cmd:main"
+
+    [project.urls]
+    Homepage = "https://my-project.example.org/"
+    "Bug Tracker" = "https://my-project.example.org/bugs/"
+    Documentation = "https://my-project.example.org/docs/"
+    "Release Notes" = "https://my-project.example.org/releasenotes/"
+    "Source Code" = "https://my-project.example.org/code/"
+
+    [tool.setuptools]
+    packages = ["my_project"]
 
 .. _setup_cfg:
 
@@ -71,13 +120,23 @@ of ``setup.cfg``.
 
 The ``setup.cfg`` file is an INI-like file that can mostly replace the
 ``setup.py`` file. It is similar to the ``setup.cfg`` file found in recent
-versions of `setuptools`__. A simple example::
+versions of `setuptools`__. As with setuptools itself, you need to retain a
+minimal ``setup.py`` as follows:
+
+.. code-block:: python
+
+    import setuptools
+    setuptools.setup(pbr=True)
+
+All other metadata can be placed in your ``setup.cfg``. A simple example:
+
+.. code-block:: ini
 
     [metadata]
     name = my_project
-    summary = A brief one-line descriptive title of my project
-    author = My Project's Contributors
-    author_email = my-project-mailing-list@lists.example.org
+    description = A brief one-line descriptive title of my project
+    author = John Doe
+    author_email = john@example.com
     classifiers =
         Development Status :: 5 - Production/Stable
         Environment :: Console
@@ -104,25 +163,21 @@ versions of `setuptools`__. A simple exa
 
     [options]
     python_requires = >=3.10
-
-    [files]
     packages =
         my_project
 
-    [entry_points]
+    [options.entry_points]
     console_scripts =
         my-project = my_project.cmd:main
 
-    [pbr]
-    manpages =
-        my-project.1
-
 Recent versions of `setuptools`_ provide many of the same sections as *pbr*.
-However, *pbr* does provide a number of additional sections:
-
-- ``files``
-- ``entry_points``
-- ``backwards_compat``
+*pbr*'s support for setup.cfg predates that of setuptools'. For this reason
+*pbr* supports sections and functionality that setuptools never adopted.
+These sections are:
+
+- ``files`` (deprecated)
+- ``entry_points`` (deprecated)
+- ``backwards_compat`` (deprecated)
 - ``pbr``
 
 In addition, there are some modifications to other sections:
@@ -138,7 +193,9 @@ such as the ``extract_messages`` section
    Comments may be used in ``setup.cfg``, however all comments should start
    with a ``#`` and may be on a single line, or in line, with at least one
    white space character immediately preceding the ``#``. Semicolons are not a
-   supported comment delimiter. For instance::
+   supported comment delimiter. For instance:
+
+   .. code-block:: ini
 
        [section]
        # A comment at the start of a dedicated line
@@ -159,20 +216,89 @@ __ http://babel.pocoo.org/en/latest/setu
 ``files``
 ~~~~~~~~~
 
-The ``files`` section defines the install location of files in the package
-using three fundamental keys: ``packages``, ``namespace_packages``, and
-``data_files``.
+The ``files`` section defines the install location of files in the package.
+
+.. deprecated:: 7.0.0
+
+    `setuptools v30.3.0`__ introduced built-in support for configuring the
+    below information via the ``[options]`` section in ``setup.cfg``, while
+    `setuptools v68.1.0`__ adds support for doing this via ``pyproject.toml``
+    using the ``[tool.setuptools]`` section. For example, given the following
+    ``setup.cfg`` configuration:
+
+    .. code-block:: ini
+
+        [files]
+        packages =
+            foo
+        namespace_packages =
+            fooext
+        data_files =
+            etc/foo = etc/foo/*
+            etc/foo-api =
+                etc/api-paste.ini
+            etc/init.d = foo.init
+
+    You can represent this in ``setup.cfg`` like so:
+
+    .. code-block:: ini
+
+        [options]
+        packages =
+            foo
+        namespace_packages =
+            fooext
+
+        [options.data_files]
+        etc/foo = etc/foo/*
+        etc/foo-api =
+            etc/api-paste.ini
+        etc/init.d = foo.init
+
+    Neither namespace packages nor non-package data files are supported in
+    ``pyproject.toml`` format so only ``[files] packages`` can be migrated in
+    this example:
+
+    .. code-block:: toml
+
+        [tool.setuptools]
+        packages = ["foo"]
+
+    For more information, refer to the `Configuring setuptools using setup.cfg
+    files`__, `Package Discovery and Namespace Packages`__ and `Data Files
+    Support`__ documents in the setuptools docs.
+
+    .. __: https://pypi.org/project/setuptools/30.3.0/
+    .. __: https://pypi.org/project/setuptools/68.1.0/
+    .. __: https://setuptools.pypa.io/en/latest/userguide/declarative_config.html
+    .. __: https://setuptools.pypa.io/en/latest/userguide/package_discovery.html
+    .. __: https://setuptools.pypa.io/en/latest/userguide/datafiles.html
+
+The ``files`` section uses three fundamental keys: ``packages``,
+``namespace_packages``, and ``data_files``.
 
 ``packages``
   A list of top-level packages that should be installed. The behavior of
   packages is similar to ``setuptools.find_packages`` in that it recurses the
   Python package hierarchy below the given top level and installs all of it. If
   ``packages`` is not specified, it defaults to the value of the ``name`` field
-  given in the ``[metadata]`` section.
+  given in the ``[metadata]`` section. For example:
+
+  .. code-block:: ini
+
+      [files]
+      packages =
+          pbr
 
 ``namespace_packages``
   Similar to ``packages``, but is a list of packages that provide namespace
-  packages.
+  packages. For example:
+
+  .. code-block:: ini
+
+      [files]
+      namespace_packages =
+          pbrext
 
 ``data_files``
   A list of files to be installed. The format is an indented block that
@@ -180,7 +306,9 @@ using three fundamental keys: ``packages
   install there. More than one source file for a directory may be indicated
   with a further indented list. Source files are stripped of leading
   directories. Additionally, *pbr* supports a simple file globbing syntax for
-  installing entire directory structures. For example::
+  installing entire directory structures. For example:
+
+  .. code-block:: ini
 
       [files]
       data_files =
@@ -206,12 +334,59 @@ using three fundamental keys: ``packages
 ~~~~~~~~~~~~~~~~
 
 The ``entry_points`` section defines entry points for generated console scripts
-and Python libraries. This is actually provided by *setuptools* but is
-documented here owing to its importance.
+and Python libraries.
+
+.. deprecated:: 7.0.0
+
+    `setuptools v30.3.0`__ introduced built-in support for configuring the
+    below information via the ``[options.entry_points]`` section in
+    ``setup.cfg``, while `setuptools v68.1.0`__ adds support for doing this via
+    ``pyproject.toml`` using the ``[project.scripts]`` section. For example,
+    given the following ``setup.cfg`` configuration:
+
+    .. code-block:: ini
+
+        [entry_points]
+        console_scripts =
+            pbr = pbr.cmd:main
+        pbr.config.drivers =
+            plain = pbr.cfg.driver:Plain
+            fancy = pbr.cfg.driver:Fancy
+
+    You can represent this in ``setup.cfg`` like so:
+
+    .. code-block:: ini
+
+        [options.entry_points]
+        console_scripts =
+            pbr = pbr.cmd:main
+        pbr.config.drivers =
+            plain = pbr.cfg.driver:Plain
+            fancy = pbr.cfg.driver:Fancy
+
+    Or in ``pyproject.toml`` like so:
+
+    .. code-block:: toml
+
+        [project.scripts]
+        pbr = "pbr.cmd:main"
+
+        [project.entry-points."pbr.config.drivers"]
+        plain = "pbr.cfg.driver:Plain"
+        fancy = "pbr.cfg.driver:Fancy"
+
+    For more information, refer to the `Entry Points`__ document in the
+    setuptools docs.
+
+    .. __: https://pypi.org/project/setuptools/30.3.0/
+    .. __: https://pypi.org/project/setuptools/68.1.0/
+    .. __: https://setuptools.pypa.io/en/latest/userguide/entry_point.html
 
 The general syntax of specifying entry points is a top level name indicating
-the entry point group name, followed by one or more key value pairs naming
-the entry point to be installed. For instance::
+the entry point group name, followed by one or more key value pairs naming the
+entry point to be installed. For example:
+
+.. code-block:: ini
 
     [entry_points]
     console_scripts =
@@ -266,70 +441,11 @@ The ``pbr`` section controls *pbr*-speci
   This can also be configured using the ``SKIP_GENERATE_RENO`` environment
   variable, as described :ref:`here <packaging-releasenotes>`.
 
-``autodoc_tree_index_modules``
-  A boolean option controlling whether *pbr* should generate an index of
-  modules using ``sphinx-apidoc``. By default, all files except ``setup.py``
-  are included, but this can be overridden using the ``autodoc_tree_excludes``
-  option.
-
-  .. deprecated:: 4.2
-
-      This feature has been replaced by the `sphinxcontrib-apidoc`_ extension.
-      Refer to the :ref:`build_sphinx` overview for more information.
-
-``autodoc_tree_excludes``
-  A list of modules to exclude when building documentation using
-  ``sphinx-apidoc``. Defaults to ``[setup.py]``. Refer to the
-  `sphinx-apidoc man page`__ for more information.
-
-  __ http://sphinx-doc.org/man/sphinx-apidoc.html
-
-  .. deprecated:: 4.2
-
-      This feature has been replaced by the `sphinxcontrib-apidoc`_ extension.
-      Refer to the :ref:`build_sphinx` overview for more information.
-
-``autodoc_index_modules``
-  A boolean option controlling whether *pbr* should itself generates
-  documentation for Python modules of the project. By default, all found Python
-  modules are included; some of them can be excluded by listing them in
-  ``autodoc_exclude_modules``.
-
-  .. deprecated:: 4.2
-
-      This feature has been replaced by the `sphinxcontrib-apidoc`_ extension.
-      Refer to the :ref:`build_sphinx` overview for more information.
+.. versionchanged:: 6.0
 
-``autodoc_exclude_modules``
-  A list of modules to exclude when building module documentation using *pbr*.
-  *fnmatch* style pattern (e.g. ``myapp.tests.*``) can be used.
-
-  .. deprecated:: 4.2
-
-      This feature has been replaced by the `sphinxcontrib-apidoc`_ extension.
-      Refer to the :ref:`build_sphinx` overview for more information.
-
-``api_doc_dir``
-  A subdirectory inside the ``build_sphinx.source_dir`` where auto-generated
-  API documentation should be written, if ``autodoc_index_modules`` is set to
-  True. Defaults to ``"api"``.
-
-  .. deprecated:: 4.2
-
-      This feature has been replaced by the `sphinxcontrib-apidoc`_ extension.
-      Refer to the :ref:`build_sphinx` overview for more information.
-
-.. note::
-
-   When using ``autodoc_tree_excludes`` or ``autodoc_index_modules`` you may
-   also need to set ``exclude_patterns`` in your Sphinx configuration file
-   (generally found at ``doc/source/conf.py`` in most OpenStack projects)
-   otherwise Sphinx may complain about documents that are not in a toctree.
-   This is especially true if the ``[sphinx_build] warning-is-error`` option is
-   set. See the `Sphinx build configuration file`__ documentation for more
-   information on configuring Sphinx.
-
-   __ http://sphinx-doc.org/config.html
+   The ``autodoc_tree_index_modules``, ``autodoc_tree_excludes``,
+   ``autodoc_index_modules``, ``autodoc_exclude_modules`` and ``api_doc_dir``
+   settings are all removed.
 
 .. versionchanged:: 4.2
 
@@ -415,7 +531,9 @@ installed into Python 2.6.
 
 For extras specified in ``setup.cfg``, add an ``extras`` section. For instance,
 to create two groups of extra requirements with additional constraints on the
-environment, you can use::
+environment, you can use:
+
+.. code-block:: ini
 
     [extras]
     security =
@@ -427,35 +545,6 @@ environment, you can use::
 
 __ https://www.python.org/dev/peps/pep-0426/#environment-markers
 
-Testing
--------
-
-.. deprecated:: 4.0
-
-As described in :doc:`/user/features`, *pbr* may override the ``test`` command
-depending on the test runner used.
-
-A typical usage would be in ``tox.ini`` such as::
-
-  [tox]
-  minversion = 2.0
-  skipsdist = True
-  envlist = py33,py34,py35,py26,py27,pypy,pep8,docs
-
-  [testenv]
-  usedevelop = True
-  setenv =
-    VIRTUAL_ENV={envdir}
-    CLIENT_NAME=pbr
-  deps = .
-       -r{toxinidir}/test-requirements.txt
-  commands =
-    python setup.py test --testr-args='{posargs}'
-
-The argument ``--coverage`` will set ``PYTHON`` to ``coverage run`` to produce
-a coverage report.  ``--coverage-package-name`` can be used to modify or narrow
-the packages traced.
-
 
 Sphinx ``conf.py``
 ------------------
@@ -465,7 +554,9 @@ automatically configure the version numb
 metadata.
 
 To enable this extension, you must add it to the list of extensions in
-your ``conf.py`` file::
+your ``conf.py`` file:
+
+.. code-block:: python
 
     extensions = [
         'pbr.sphinxext',
diff -pruN 6.1.1-2/pbr/_compat/command_hooks.py 7.0.1-2/pbr/_compat/command_hooks.py
--- 6.1.1-2/pbr/_compat/command_hooks.py	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/pbr/_compat/command_hooks.py	2025-08-14 16:07:35.000000000 +0000
@@ -0,0 +1,62 @@
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+import os
+
+from setuptools.command import easy_install
+
+import pbr._compat.commands
+from pbr.hooks import base
+from pbr import options
+
+
+class CommandsConfig(base.BaseConfig):
+
+    section = 'global'
+
+    def __init__(self, config):
+        super(CommandsConfig, self).__init__(config)
+        self.commands = self.config.get('commands', "")
+
+    def save(self):
+        self.config['commands'] = self.commands
+        super(CommandsConfig, self).save()
+
+    def add_command(self, command):
+        self.commands = "%s\n%s" % (self.commands, command)
+
+    def hook(self):
+        self.add_command('pbr._compat.commands.LocalEggInfo')
+        self.add_command('pbr._compat.commands.LocalSDist')
+        self.add_command('pbr._compat.commands.LocalInstallScripts')
+        self.add_command('pbr._compat.commands.LocalDevelop')
+        self.add_command('pbr._compat.commands.LocalRPMVersion')
+        self.add_command('pbr._compat.commands.LocalDebVersion')
+        if os.name != 'nt':
+            easy_install.get_script_args = (
+                pbr._compat.commands.override_get_script_args
+            )
+
+        use_egg = options.get_boolean_option(
+            self.pbr_config, 'use-egg', 'PBR_USE_EGG'
+        )
+        # We always want non-egg install unless explicitly requested
+        if 'manpages' in self.pbr_config or not use_egg:
+            self.add_command('pbr._compat.commands.LocalInstall')
+        else:
+            self.add_command('pbr._compat.commands.InstallWithGit')
diff -pruN 6.1.1-2/pbr/_compat/commands.py 7.0.1-2/pbr/_compat/commands.py
--- 6.1.1-2/pbr/_compat/commands.py	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/pbr/_compat/commands.py	2025-08-14 16:07:35.000000000 +0000
@@ -0,0 +1,442 @@
+# Copyright 2011 OpenStack Foundation
+# Copyright 2012-2013 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+from __future__ import unicode_literals
+
+from distutils.command import install as du_install
+from distutils import log
+import os
+import sys
+
+import setuptools
+from setuptools.command import develop
+from setuptools.command import easy_install
+from setuptools.command import egg_info
+from setuptools.command import install
+from setuptools.command import install_scripts
+from setuptools.command import sdist
+
+from pbr import extra_files
+from pbr import git
+from pbr import options
+from pbr import version
+
+_wsgi_text = """#PBR Generated from %(group)r
+
+import threading
+
+from %(module_name)s import %(import_target)s
+
+if __name__ == "__main__":
+    import argparse
+    import socket
+    import sys
+    import wsgiref.simple_server as wss
+
+    parser = argparse.ArgumentParser(
+        description=%(import_target)s.__doc__,
+        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+        usage='%%(prog)s [-h] [--port PORT] [--host IP] -- [passed options]')
+    parser.add_argument('--port', '-p', type=int, default=8000,
+                        help='TCP port to listen on')
+    parser.add_argument('--host', '-b', default='',
+                        help='IP to bind the server to')
+    parser.add_argument('args',
+                        nargs=argparse.REMAINDER,
+                        metavar='-- [passed options]',
+                        help="'--' is the separator of the arguments used "
+                        "to start the WSGI server and the arguments passed "
+                        "to the WSGI application.")
+    args = parser.parse_args()
+    if args.args:
+        if args.args[0] == '--':
+            args.args.pop(0)
+        else:
+            parser.error("unrecognized arguments: %%s" %% ' '.join(args.args))
+    sys.argv[1:] = args.args
+    server = wss.make_server(args.host, args.port, %(invoke_target)s())
+
+    print("*" * 80)
+    print("STARTING test server %(module_name)s.%(invoke_target)s")
+    url = "http://%%s:%%d/" %% (server.server_name, server.server_port)
+    print("Available at %%s" %% url)
+    print("DANGER! For testing only, do not use in production")
+    print("*" * 80)
+    sys.stdout.flush()
+
+    server.serve_forever()
+else:
+    application = None
+    app_lock = threading.Lock()
+
+    with app_lock:
+        if application is None:
+            application = %(invoke_target)s()
+
+"""
+
+_script_text = """# PBR Generated from %(group)r
+
+import sys
+
+from %(module_name)s import %(import_target)s
+
+
+if __name__ == "__main__":
+    sys.exit(%(invoke_target)s())
+"""
+
+# the following allows us to specify different templates per entry
+# point group when generating pbr scripts.
+ENTRY_POINTS_MAP = {
+    'console_scripts': _script_text,
+    'gui_scripts': _script_text,
+    'wsgi_scripts': _wsgi_text,
+}
+
+
+def generate_script(group, entry_point, header, template):
+    """Generate the script based on the template.
+
+    :param str group: The entry-point group name, e.g., "console_scripts".
+    :param str header: The first line of the script, e.g.,
+        "!#/usr/bin/env python".
+    :param str template: The script template.
+    :returns: The templated script content
+    :rtype: str
+    """
+    if not entry_point.attrs or len(entry_point.attrs) > 2:
+        raise ValueError(
+            "Script targets must be of the form "
+            "'func' or 'Class.class_method'."
+        )
+
+    script_text = template % {
+        'group': group,
+        'module_name': entry_point.module_name,
+        'import_target': entry_point.attrs[0],
+        'invoke_target': '.'.join(entry_point.attrs),
+    }
+    return header + script_text
+
+
+def override_get_script_args(
+    dist, executable=os.path.normpath(sys.executable)
+):
+    """Override entrypoints console_script."""
+    # get_script_header() is deprecated since Setuptools 12.0
+    try:
+        header = easy_install.ScriptWriter.get_header("", executable)
+    except AttributeError:
+        header = easy_install.get_script_header("", executable)
+    for group, template in ENTRY_POINTS_MAP.items():
+        for name, ep in dist.get_entry_map(group).items():
+            yield (name, generate_script(group, ep, header, template))
+
+
+class LocalDevelop(develop.develop):
+
+    command_name = 'develop'
+
+    def install_wrapper_scripts(self, dist):
+        if sys.platform == 'win32':
+            return develop.develop.install_wrapper_scripts(self, dist)
+        if not self.exclude_scripts:
+            for args in override_get_script_args(dist):
+                self.write_script(*args)
+
+
+class LocalInstallScripts(install_scripts.install_scripts):
+    """Intercepts console scripts entry_points."""
+
+    command_name = 'install_scripts'
+
+    def _make_wsgi_scripts_only(self, dist, executable):
+        # get_script_header() is deprecated since Setuptools 12.0
+        try:
+            header = easy_install.ScriptWriter.get_header("", executable)
+        except AttributeError:
+            header = easy_install.get_script_header("", executable)
+        wsgi_script_template = ENTRY_POINTS_MAP['wsgi_scripts']
+        for name, ep in dist.get_entry_map('wsgi_scripts').items():
+            content = generate_script(
+                'wsgi_scripts', ep, header, wsgi_script_template
+            )
+            self.write_script(name, content)
+
+    def run(self):
+        import distutils.command.install_scripts
+        import pkg_resources
+
+        self.run_command("egg_info")
+        if self.distribution.scripts:
+            # run first to set up self.outfiles
+            distutils.command.install_scripts.install_scripts.run(self)
+        else:
+            self.outfiles = []
+
+        ei_cmd = self.get_finalized_command("egg_info")
+        dist = pkg_resources.Distribution(
+            ei_cmd.egg_base,
+            pkg_resources.PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
+            ei_cmd.egg_name,
+            ei_cmd.egg_version,
+        )
+        bs_cmd = self.get_finalized_command('build_scripts')
+        executable = getattr(bs_cmd, 'executable', easy_install.sys_executable)
+        if 'bdist_wheel' in self.distribution.have_run:
+            # We're building a wheel which has no way of generating mod_wsgi
+            # scripts for us. Let's build them.
+            # NOTE(sigmavirus24): This needs to happen here because, as the
+            # comment below indicates, no_ep is True when building a wheel.
+            self._make_wsgi_scripts_only(dist, executable)
+
+        if self.no_ep:
+            # no_ep is True if we're installing into an .egg file or building
+            # a .whl file, in those cases, we do not want to build all of the
+            # entry-points listed for this package.
+            return
+
+        if os.name != 'nt':
+            get_script_args = override_get_script_args
+        else:
+            get_script_args = easy_install.get_script_args
+            executable = '"%s"' % executable
+
+        for args in get_script_args(dist, executable):
+            self.write_script(*args)
+
+
+class LocalManifestMaker(egg_info.manifest_maker):
+    """Add any files that are in git and some standard sensible files."""
+
+    def _add_pbr_defaults(self):
+        for template_line in [
+            'include AUTHORS',
+            'include ChangeLog',
+            'exclude .gitignore',
+            'exclude .gitreview',
+            'global-exclude *.pyc',
+        ]:
+            self.filelist.process_template_line(template_line)
+
+    def add_defaults(self):
+        """Add all the default files to self.filelist:
+
+        Extends the functionality provided by distutils to also included
+        additional sane defaults, such as the ``AUTHORS`` and ``ChangeLog``
+        files generated by *pbr*.
+
+        Warns if (``README`` or ``README.txt``) or ``setup.py`` are missing;
+        everything else is optional.
+        """
+        option_dict = self.distribution.get_option_dict('pbr')
+
+        sdist.sdist.add_defaults(self)
+        self.filelist.append(self.template)
+        self.filelist.append(self.manifest)
+        self.filelist.extend(extra_files.get_extra_files())
+        should_skip = options.get_boolean_option(
+            option_dict, 'skip_git_sdist', 'SKIP_GIT_SDIST'
+        )
+        if not should_skip:
+            rcfiles = git._find_git_files()
+            if rcfiles:
+                self.filelist.extend(rcfiles)
+        elif os.path.exists(self.manifest):
+            self.read_manifest()
+        ei_cmd = self.get_finalized_command('egg_info')
+        self._add_pbr_defaults()
+        self.filelist.include_pattern("*", prefix=ei_cmd.egg_info)
+
+
+class LocalEggInfo(egg_info.egg_info):
+    """Override the egg_info command to regenerate SOURCES.txt sensibly."""
+
+    command_name = 'egg_info'
+
+    def find_sources(self):
+        """Generate SOURCES.txt only if there isn't one already.
+
+        If we are in an sdist command, then we always want to update
+        SOURCES.txt. If we are not in an sdist command, then it doesn't
+        matter one flip, and is actually destructive.
+        However, if we're in a git context, it's always the right thing to do
+        to recreate SOURCES.txt
+        """
+        manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
+        if (
+            not os.path.exists(manifest_filename)
+            or os.path.exists('.git')
+            or 'sdist' in sys.argv
+        ):
+            log.info("[pbr] Processing SOURCES.txt")
+            mm = LocalManifestMaker(self.distribution)
+            mm.manifest = manifest_filename
+            mm.run()
+            self.filelist = mm.filelist
+        else:
+            log.info("[pbr] Reusing existing SOURCES.txt")
+            self.filelist = egg_info.FileList()
+            with open(manifest_filename, 'r') as fil:
+                for entry in fil.read().split('\n'):
+                    self.filelist.append(entry)
+
+
+def _from_git(distribution):
+    option_dict = distribution.get_option_dict('pbr')
+    changelog = git._iter_log_oneline()
+    if changelog:
+        changelog = git._iter_changelog(changelog)
+    git.write_git_changelog(option_dict=option_dict, changelog=changelog)
+    git.generate_authors(option_dict=option_dict)
+
+
+class InstallWithGit(install.install):
+    """Extracts ChangeLog and AUTHORS from git then installs.
+
+    This is useful for e.g. readthedocs where the package is
+    installed and then docs built.
+    """
+
+    command_name = 'install'
+
+    def run(self):
+        _from_git(self.distribution)
+        return install.install.run(self)
+
+
+class LocalInstall(install.install):
+    """Runs python setup.py install in a sensible manner.
+
+    Force a non-egg installed in the manner of
+    single-version-externally-managed, which allows us to install manpages
+    and config files.
+    """
+
+    command_name = 'install'
+
+    def run(self):
+        _from_git(self.distribution)
+        return du_install.install.run(self)
+
+
+class LocalSDist(sdist.sdist):
+    """Builds the ChangeLog and Authors files from VC first."""
+
+    command_name = 'sdist'
+
+    def checking_reno(self):
+        """Ensure reno is installed and configured.
+
+        We can't run reno-based commands if reno isn't installed/available, and
+        don't want to if the user isn't using it.
+        """
+        if hasattr(self, '_has_reno'):
+            return self._has_reno
+
+        option_dict = self.distribution.get_option_dict('pbr')
+        should_skip = options.get_boolean_option(
+            option_dict, 'skip_reno', 'SKIP_GENERATE_RENO'
+        )
+        if should_skip:
+            self._has_reno = False
+            return False
+
+        try:
+            # versions of reno witout this module will not have the required
+            # feature, hence the import
+            from reno import setup_command  # noqa
+        except ImportError:
+            log.info(
+                '[pbr] reno was not found or is too old. Skipping '
+                'release notes'
+            )
+            self._has_reno = False
+            return False
+
+        conf, output_file, cache_file = setup_command.load_config(
+            self.distribution
+        )
+
+        if not os.path.exists(os.path.join(conf.reporoot, conf.notespath)):
+            log.info(
+                '[pbr] reno does not appear to be configured. Skipping '
+                'release notes'
+            )
+            self._has_reno = False
+            return False
+
+        self._files = [output_file, cache_file]
+
+        log.info('[pbr] Generating release notes')
+        self._has_reno = True
+
+        return True
+
+    sub_commands = [('build_reno', checking_reno)] + sdist.sdist.sub_commands
+
+    def run(self):
+        _from_git(self.distribution)
+        # sdist.sdist is an old style class, can't use super()
+        sdist.sdist.run(self)
+
+    def make_distribution(self):
+        # This is included in make_distribution because setuptools doesn't use
+        # 'get_file_list'. As such, this is the only hook point that runs after
+        # the commands in 'sub_commands'
+        if self.checking_reno():
+            self.filelist.extend(self._files)
+            self.filelist.sort()
+        sdist.sdist.make_distribution(self)
+
+
+class LocalRPMVersion(setuptools.Command):
+    __doc__ = """Output the rpm *compatible* version string of this package"""
+    description = __doc__
+
+    user_options = []
+    command_name = "rpm_version"
+
+    def run(self):
+        log.info("[pbr] Extracting rpm version")
+        name = self.distribution.get_name()
+        print(version.VersionInfo(name).semantic_version().rpm_string())
+
+    def initialize_options(self):
+        pass
+
+    def finalize_options(self):
+        pass
+
+
+class LocalDebVersion(setuptools.Command):
+    __doc__ = """Output the deb *compatible* version string of this package"""
+    description = __doc__
+
+    user_options = []
+    command_name = "deb_version"
+
+    def run(self):
+        log.info("[pbr] Extracting deb version")
+        name = self.distribution.get_name()
+        print(version.VersionInfo(name).semantic_version().debian_string())
+
+    def initialize_options(self):
+        pass
+
+    def finalize_options(self):
+        pass
diff -pruN 6.1.1-2/pbr/_compat/five.py 7.0.1-2/pbr/_compat/five.py
--- 6.1.1-2/pbr/_compat/five.py	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/pbr/_compat/five.py	2025-08-14 16:07:35.000000000 +0000
@@ -0,0 +1,65 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Poor man's six."""
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+import sys
+
+# builtins
+
+if sys.version_info >= (3, 0):
+    string_type = str
+    integer_types = (int,)
+else:
+    string_type = basestring  # noqa
+    integer_types = (int, long)  # noqa
+
+# io
+
+if sys.version_info >= (3, 0):
+    import io
+
+    BytesIO = io.BytesIO
+else:
+    import cStringIO as io
+
+    BytesIO = io.StringIO
+
+# configparser
+
+if sys.version_info >= (3, 0):
+    import configparser
+
+    ConfigParser = configparser.ConfigParser
+else:
+    import ConfigParser as configparser
+
+    ConfigParser = configparser.SafeConfigParser
+    # monkeypatch in renamed method
+    ConfigParser.read_file = ConfigParser.readfp
+
+# urllib.parse.urlparse
+
+if sys.version_info >= (3, 0):
+    from urllib.parse import urlparse
+else:
+    from urlparse import urlparse  # noqa
+
+# urllib.request.urlopen
+
+if sys.version_info >= (3, 0):
+    from urllib.request import urlopen
+else:
+    from urllib2 import urlopen  # noqa
diff -pruN 6.1.1-2/pbr/_compat/metadata.py 7.0.1-2/pbr/_compat/metadata.py
--- 6.1.1-2/pbr/_compat/metadata.py	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/pbr/_compat/metadata.py	2025-08-14 16:07:35.000000000 +0000
@@ -0,0 +1,163 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Metadata parsing."""
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+import json
+import sys
+
+_metadata_lib = None
+
+METADATA_LIB_STDLIB = 'importlib.metadata'
+METADATA_LIB_BACKPORT = 'importlib_metadata'
+METADATA_LIB_LEGACY = 'pkg_resources'
+
+
+def _get_metadata_lib():
+    """Retrieve the correct metadata library to use."""
+    global _metadata_lib
+
+    if _metadata_lib is not None:
+        return _metadata_lib
+
+    # try importlib.metadata first. This will be available from the stdlib
+    # starting in python >= 3.8
+    if sys.version_info >= (3, 8):
+        _metadata_lib = METADATA_LIB_STDLIB
+        return _metadata_lib
+
+    # try importlib_metadata next. This must be installed from PyPI and we
+    # don't vendor it, but if available it will be preferred since later
+    # versions of pkg_resources issue very annoying deprecation warnings
+    try:
+        import importlib_metadata  # noqa
+
+        _metadata_lib = METADATA_LIB_BACKPORT
+        return _metadata_lib
+    except ImportError:
+        pass
+
+    # pkg_resources is our fallback. This will always be available on older
+    # Python versions since it's part of setuptools.
+    try:
+        import pkg_resources  # noqa
+
+        _metadata_lib = METADATA_LIB_LEGACY
+        return _metadata_lib
+    except ImportError:
+        pass
+
+    raise RuntimeError(
+        'Failed to find a library for loading metadata. This should not '
+        'happen. Please report a bug against pbr.'
+    )
+
+
+def get_distributions():
+    metadata_lib = _get_metadata_lib()
+    if metadata_lib == METADATA_LIB_STDLIB:
+        import importlib.metadata
+
+        data = sorted(
+            importlib.metadata.distributions(),
+            key=lambda x: x.metadata['name'].lower(),
+        )
+    elif metadata_lib == METADATA_LIB_BACKPORT:
+        import importlib_metadata
+
+        data = sorted(
+            importlib_metadata.distributions(),
+            key=lambda x: x.metadata['name'].lower(),
+        )
+    else:  # METADATA_LIB_LEGACY
+        import pkg_resources
+
+        data = sorted(
+            pkg_resources.working_set,
+            key=lambda dist: dist.project_name.lower(),
+        )
+
+    return list(data)
+
+
+class PackageNotFound(Exception):
+    def __init__(self, package_name):
+        self.package_name = package_name
+
+    def __str__(self):
+        return 'Package {0} not installed'.format(self.package_name)
+
+
+def get_metadata(package_name):
+    metadata_lib = _get_metadata_lib()
+    if metadata_lib == METADATA_LIB_STDLIB:
+        import importlib.metadata
+
+        try:
+            data = importlib.metadata.distribution(package_name).metadata[
+                'pbr.json'
+            ]
+        except importlib.metadata.PackageNotFoundError:
+            raise PackageNotFound(package_name)
+    elif metadata_lib == METADATA_LIB_BACKPORT:
+        import importlib_metadata
+
+        try:
+            data = importlib_metadata.distribution(package_name).metadata[
+                'pbr.json'
+            ]
+        except importlib_metadata.PackageNotFoundError:
+            raise PackageNotFound(package_name)
+    else:  # METADATA_LIB_LEGACY
+        import pkg_resources
+
+        try:
+            data = pkg_resources.get_distribution(package_name).get_metadata(
+                'pbr.json'
+            )
+        except pkg_resources.DistributionNotFound:
+            raise PackageNotFound(package_name)
+
+    try:
+        return json.loads(data)
+    except Exception:
+        # TODO(stephenfin): We should log an error here. Can we still use
+        # distutils.log in the future?
+        return None
+
+
+def get_version(package_name):
+    metadata_lib = _get_metadata_lib()
+    if metadata_lib == METADATA_LIB_STDLIB:
+        import importlib.metadata
+
+        try:
+            return importlib.metadata.distribution(package_name).version
+        except importlib.metadata.PackageNotFoundError:
+            raise PackageNotFound(package_name)
+    elif metadata_lib == METADATA_LIB_BACKPORT:
+        import importlib_metadata
+
+        try:
+            return importlib_metadata.distribution(package_name).version
+        except importlib_metadata.PackageNotFoundError:
+            raise PackageNotFound(package_name)
+    else:  # METADATA_LIB_LEGACY
+        import pkg_resources
+
+        try:
+            return pkg_resources.get_distribution(package_name).version
+        except pkg_resources.DistributionNotFound:
+            raise PackageNotFound(package_name)
diff -pruN 6.1.1-2/pbr/_compat/packaging.py 7.0.1-2/pbr/_compat/packaging.py
--- 6.1.1-2/pbr/_compat/packaging.py	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/pbr/_compat/packaging.py	2025-08-14 16:07:35.000000000 +0000
@@ -0,0 +1,111 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Utilities to paste over differences between Python versions."""
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+import re
+
+_packaging_lib = None
+
+PACKAGING_LIB_PACKAGING = 'packaging'
+PACKAGING_LIB_LEGACY = 'pkg_resources'
+
+
+def _get_packaging_lib():
+    global _packaging_lib
+
+    if _packaging_lib is not None:
+        return _packaging_lib
+
+    # packaging should almost always be available since setuptools vendors it
+    # and has done so since forever
+    #
+    # https://github.com/pypa/setuptools/commit/84c9006110e53c84296a05741edb7b9edd305f12
+    try:
+        import packaging  # noqa
+
+        _packaging_lib = PACKAGING_LIB_PACKAGING
+        return _packaging_lib
+    except ImportError:
+        pass
+
+    # pkg_resources is our fallback. This will always be available on older
+    # Python versions since it's part of setuptools.
+    try:
+        import pkg_resources  # noqa
+
+        _packaging_lib = PACKAGING_LIB_LEGACY
+        return _packaging_lib
+    except ImportError:
+        pass
+
+    raise RuntimeError(
+        'Failed to find a library for parsing packaging information. This '
+        'should not happen. Please report a bug against pbr.'
+    )
+
+
+def extract_project_name(requirement_line):
+    packaging_lib = _get_packaging_lib()
+    if packaging_lib == PACKAGING_LIB_PACKAGING:
+        import packaging.requirements
+
+        try:
+            requirement = packaging.requirements.Requirement(requirement_line)
+        except ValueError:
+            return None
+
+        # the .project_name attribute is not part of the
+        # packaging.requirements.Requirement API so we mimic it
+        #
+        # https://github.com/pypa/setuptools/blob/v80.9.0/pkg_resources/__init__.py#L2918
+        return re.sub('[^A-Za-z0-9.]+', '-', requirement.name)
+    else:  # PACKAGING_LIB_LEGACY
+        import pkg_resources
+
+        try:
+            requirement = pkg_resources.Requirement.parse(requirement_line)
+        except ValueError:
+            return None
+        return requirement.project_name
+
+
+def parse_version(version):
+    packaging_lib = _get_packaging_lib()
+    if packaging_lib == PACKAGING_LIB_PACKAGING:
+        import packaging.version
+
+        return packaging.version.Version(version)
+    else:  # PACKAGING_LIB_LEGACY
+        import pkg_resources
+
+        return pkg_resources.parse_version(version)
+
+
+def evaluate_marker(marker):
+    packaging_lib = _get_packaging_lib()
+    if packaging_lib == PACKAGING_LIB_PACKAGING:
+        import packaging.markers
+
+        try:
+            return packaging.markers.Marker(marker).evaluate()
+        except packaging.markers.InvalidMarker as e:
+            # setuptools expects a SyntaxError here, so we do the same.
+            # we can't chain the exceptions since that is a Python 3 only thing
+            raise SyntaxError(e)
+    else:  # PACKAGING_LIB_LEGACY
+        import pkg_resources
+
+        return pkg_resources.evaluate_marker(marker)
diff -pruN 6.1.1-2/pbr/build.py 7.0.1-2/pbr/build.py
--- 6.1.1-2/pbr/build.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/build.py	2025-08-14 16:07:35.000000000 +0000
@@ -23,6 +23,9 @@ Add::
 to ``pyproject.toml`` to use this.
 """
 
+from __future__ import absolute_import
+from __future__ import print_function
+
 from setuptools import build_meta
 
 __all__ = [
@@ -39,6 +42,7 @@ __all__ = [
 
 # PEP-517
 
+
 def get_requires_for_build_wheel(config_settings=None):
     return build_meta.get_requires_for_build_wheel(
         config_settings=config_settings,
@@ -79,6 +83,7 @@ def build_sdist(sdist_directory, config_
 
 # PEP-660
 
+
 def build_editable(
     wheel_directory,
     config_settings=None,
diff -pruN 6.1.1-2/pbr/cmd/main.py 7.0.1-2/pbr/cmd/main.py
--- 6.1.1-2/pbr/cmd/main.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/cmd/main.py	2025-08-14 16:07:35.000000000 +0000
@@ -13,26 +13,16 @@
 #    License for the specific language governing permissions and limitations
 #    under the License.
 
+from __future__ import absolute_import
+from __future__ import print_function
+
 import argparse
-import json
 import sys
 
-import pkg_resources
-
+import pbr._compat.metadata
 import pbr.version
 
 
-def _get_metadata(package_name):
-    try:
-        return json.loads(
-            pkg_resources.get_distribution(
-                package_name).get_metadata('pbr.json'))
-    except pkg_resources.DistributionNotFound:
-        raise Exception('Package {0} not installed'.format(package_name))
-    except Exception:
-        return None
-
-
 def get_sha(args):
     sha = _get_info(args.name)['sha']
     if sha:
@@ -43,13 +33,17 @@ def get_info(args):
     if args.short:
         print("{version}".format(**_get_info(args.name)))
     else:
-        print("{name}\t{version}\t{released}\t{sha}".format(
-            **_get_info(args.name)))
+        print(
+            "{name}\t{version}\t{released}\t{sha}".format(
+                **_get_info(args.name)
+            )
+        )
+
 
+def _get_info(package_name):
+    metadata = pbr._compat.metadata.get_metadata(package_name)
+    version = pbr._compat.metadata.get_version(package_name)
 
-def _get_info(name):
-    metadata = _get_metadata(name)
-    version = pkg_resources.get_distribution(name).version
     if metadata:
         if metadata['is_release']:
             released = 'released'
@@ -67,13 +61,17 @@ def _get_info(name):
             for part in version_parts:
                 if not part.isdigit():
                     released = "pre-release"
-    return dict(name=name, version=version, sha=sha, released=released)
+
+    return {
+        'name': package_name,
+        'version': version,
+        'sha': sha,
+        'released': released,
+    }
 
 
 def freeze(args):
-    sorted_dists = sorted(pkg_resources.working_set,
-                          key=lambda dist: dist.project_name.lower())
-    for dist in sorted_dists:
+    for dist in pbr._compat.metadata.get_distributions():
         info = _get_info(dist.project_name)
         output = "{name}=={version}".format(**info)
         if info['sha']:
@@ -83,14 +81,21 @@ def freeze(args):
 
 def main():
     parser = argparse.ArgumentParser(
-        description='pbr: Python Build Reasonableness')
+        description='pbr: Python Build Reasonableness'
+    )
     parser.add_argument(
-        '-v', '--version', action='version',
-        version=str(pbr.version.VersionInfo('pbr')))
+        '-v',
+        '--version',
+        action='version',
+        version=str(pbr.version.VersionInfo('pbr')),
+    )
 
     subparsers = parser.add_subparsers(
-        title='commands', description='valid commands', help='additional help',
-        dest='cmd')
+        title='commands',
+        description='valid commands',
+        help='additional help',
+        dest='cmd',
+    )
     subparsers.required = True
 
     cmd_sha = subparsers.add_parser('sha', help='print sha of package')
@@ -98,14 +103,20 @@ def main():
     cmd_sha.add_argument('name', help='package to print sha of')
 
     cmd_info = subparsers.add_parser(
-        'info', help='print version info for package')
+        'info', help='print version info for package'
+    )
     cmd_info.set_defaults(func=get_info)
     cmd_info.add_argument('name', help='package to print info of')
-    cmd_info.add_argument('-s', '--short', action="store_true",
-                          help='only display package version')
+    cmd_info.add_argument(
+        '-s',
+        '--short',
+        action="store_true",
+        help='only display package version',
+    )
 
     cmd_freeze = subparsers.add_parser(
-        'freeze', help='print version info for all installed packages')
+        'freeze', help='print version info for all installed packages'
+    )
     cmd_freeze.set_defaults(func=freeze)
 
     args = parser.parse_args()
diff -pruN 6.1.1-2/pbr/core.py 7.0.1-2/pbr/core.py
--- 6.1.1-2/pbr/core.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/core.py	2025-08-14 16:07:35.000000000 +0000
@@ -43,6 +43,9 @@
 # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
 # DAMAGE.
 
+from __future__ import absolute_import
+from __future__ import print_function
+
 import logging
 import os
 import sys
@@ -50,17 +53,11 @@ import warnings
 
 from distutils import errors
 
+from pbr._compat.five import integer_types
+from pbr._compat.five import string_type
 from pbr import util
 
 
-if sys.version_info[0] == 3:
-    string_type = str
-    integer_types = (int,)
-else:
-    string_type = basestring  # noqa
-    integer_types = (int, long)  # noqa
-
-
 def pbr(dist, attr, value):
     """Implements the actual pbr setup() keyword.
 
@@ -98,7 +95,8 @@ def pbr(dist, attr, value):
         path = os.path.abspath('setup.cfg')
     if not os.path.exists(path):
         raise errors.DistutilsFileError(
-            'The setup.cfg file %s does not exist.' % path)
+            'The setup.cfg file %s does not exist.' % path
+        )
 
     # Converts the setup.cfg file to setup() arguments
     try:
@@ -110,7 +108,8 @@ def pbr(dist, attr, value):
         # being pretty isn't the #1 goal.. being diagnosable is.
         logging.exception('Error parsing')
         raise errors.DistutilsSetupError(
-            'Error parsing %s: %s: %s' % (path, e.__class__.__name__, e))
+            'Error parsing %s: %s: %s' % (path, e.__class__.__name__, e)
+        )
 
     # There are some metadata fields that are only supported by
     # setuptools and not distutils, and hence are not in
@@ -118,7 +117,9 @@ def pbr(dist, attr, value):
     # see
     #  https://github.com/pypa/setuptools/pull/1343
     _DISTUTILS_UNSUPPORTED_METADATA = (
-        'long_description_content_type', 'project_urls', 'provides_extras'
+        'long_description_content_type',
+        'project_urls',
+        'provides_extras',
     )
 
     # Repeat some of the Distribution initialization code with the newly
diff -pruN 6.1.1-2/pbr/extra_files.py 7.0.1-2/pbr/extra_files.py
--- 6.1.1-2/pbr/extra_files.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/extra_files.py	2025-08-14 16:07:35.000000000 +0000
@@ -13,6 +13,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
+from __future__ import print_function
+
 from distutils import errors
 import os
 
@@ -30,6 +33,7 @@ def set_extra_files(extra_files):
         if not os.path.exists(filename):
             raise errors.DistutilsFileError(
                 '%s from the extra_files option in setup.cfg does not '
-                'exist' % filename)
+                'exist' % filename
+            )
     global _extra_files
     _extra_files[:] = extra_files[:]
diff -pruN 6.1.1-2/pbr/find_package.py 7.0.1-2/pbr/find_package.py
--- 6.1.1-2/pbr/find_package.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/find_package.py	2025-08-14 16:07:35.000000000 +0000
@@ -13,6 +13,9 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
+from __future__ import absolute_import
+from __future__ import print_function
+
 import os
 
 import setuptools
@@ -24,6 +27,7 @@ def smart_find_packages(package_list):
     for pkg in package_list.strip().split("\n"):
         pkg_path = pkg.replace('.', os.path.sep)
         packages.append(pkg)
-        packages.extend(['%s.%s' % (pkg, f)
-                         for f in setuptools.find_packages(pkg_path)])
+        packages.extend(
+            ['%s.%s' % (pkg, f) for f in setuptools.find_packages(pkg_path)]
+        )
     return "\n".join(set(packages))
diff -pruN 6.1.1-2/pbr/git.py 7.0.1-2/pbr/git.py
--- 6.1.1-2/pbr/git.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/git.py	2025-08-14 16:07:35.000000000 +0000
@@ -14,6 +14,8 @@
 #    License for the specific language governing permissions and limitations
 #    under the License.
 
+from __future__ import absolute_import
+from __future__ import print_function
 from __future__ import unicode_literals
 
 import distutils.errors
@@ -25,8 +27,7 @@ import re
 import subprocess
 import time
 
-import pkg_resources
-
+import pbr._compat.packaging
 from pbr import options
 from pbr import version
 
@@ -43,14 +44,14 @@ def _run_shell_command(cmd, throw_on_err
     if env:
         newenv.update(env)
 
-    output = subprocess.Popen(cmd,
-                              stdout=out_location,
-                              stderr=err_location,
-                              env=newenv)
+    output = subprocess.Popen(
+        cmd, stdout=out_location, stderr=err_location, env=newenv
+    )
     out = output.communicate()
     if output.returncode and throw_on_error:
         raise distutils.errors.DistutilsError(
-            "%s returned %d" % (cmd, output.returncode))
+            "%s returned %d" % (cmd, output.returncode)
+        )
     if len(out) == 0 or not out[0] or not out[0].strip():
         return ''
     # Since we don't control the history, and forcing users to rebase arbitrary
@@ -62,7 +63,8 @@ def _run_git_command(cmd, git_dir, **kwa
     if not isinstance(cmd, (list, tuple)):
         cmd = [cmd]
     return _run_shell_command(
-        ['git', '--git-dir=%s' % git_dir] + cmd, **kwargs)
+        ['git', '--git-dir=%s' % git_dir] + cmd, **kwargs
+    )
 
 
 def _get_git_directory():
@@ -90,9 +92,9 @@ def _get_highest_tag(tags):
     """Find the highest tag from a list.
 
     Pass in a list of tag strings and this will return the highest
-    (latest) as sorted by the pkg_resources version parser.
+    (latest) as sorted by the (Python) version parsing algorithm.
     """
-    return max(tags, key=pkg_resources.parse_version)
+    return max(tags, key=pbr._compat.packaging.parse_version)
 
 
 def _find_git_files(dirname='', git_dir=None):
@@ -138,8 +140,7 @@ def get_git_short_sha(git_dir=None):
     if not git_dir:
         git_dir = _run_git_functions()
     if git_dir:
-        return _run_git_command(
-            ['log', '-n1', '--pretty=format:%h'], git_dir)
+        return _run_git_command(['log', '-n1', '--pretty=format:%h'], git_dir)
     return None
 
 
@@ -180,14 +181,15 @@ def _iter_changelog(changelog):
             if not first_line:
                 yield current_release, '\n'
             yield current_release, (
-                "%(tag)s\n%(underline)s\n\n" %
-                dict(tag=current_release, underline=underline))
+                "%(tag)s\n%(underline)s\n\n"
+                % {'tag': current_release, 'underline': underline}
+            )
 
         if not msg.startswith("Merge "):
             if msg.endswith("."):
                 msg = msg[:-1]
             msg = _clean_changelog_message(msg)
-            yield current_release, "* %(msg)s\n" % dict(msg=msg)
+            yield current_release, "* %(msg)s\n" % {'msg': msg}
         first_line = False
 
 
@@ -260,16 +262,20 @@ def _iter_log_inner(git_dir):
         yield sha, tags, msg
 
 
-def write_git_changelog(git_dir=None, dest_dir=os.path.curdir,
-                        option_dict=None, changelog=None):
+def write_git_changelog(
+    git_dir=None, dest_dir=os.path.curdir, option_dict=None, changelog=None
+):
     """Write a changelog based on the git changelog."""
-    start = time.time()
-    if not option_dict:
+    if option_dict is None:
         option_dict = {}
-    should_skip = options.get_boolean_option(option_dict, 'skip_changelog',
-                                             'SKIP_WRITE_GIT_CHANGELOG')
+
+    should_skip = options.get_boolean_option(
+        option_dict, 'skip_changelog', 'SKIP_WRITE_GIT_CHANGELOG'
+    )
     if should_skip:
         return
+
+    start = time.time()
     if not changelog:
         changelog = _iter_log_oneline(git_dir=git_dir)
         if changelog:
@@ -280,8 +286,10 @@ def write_git_changelog(git_dir=None, de
     new_changelog = os.path.join(dest_dir, 'ChangeLog')
     if os.path.exists(new_changelog) and not os.access(new_changelog, os.W_OK):
         # If there's already a ChangeLog and it's not writable, just use it
-        log.info('[pbr] ChangeLog not written (file already'
-                 ' exists and it is not writeable)')
+        log.info(
+            '[pbr] ChangeLog not written (file already'
+            ' exists and it is not writeable)'
+        )
         return
 
     log.info('[pbr] Writing ChangeLog')
@@ -292,10 +300,14 @@ def write_git_changelog(git_dir=None, de
     log.info('[pbr] ChangeLog complete (%0.1fs)' % (stop - start))
 
 
-def generate_authors(git_dir=None, dest_dir='.', option_dict=dict()):
+def generate_authors(git_dir=None, dest_dir='.', option_dict=None):
     """Create AUTHORS file using git commits."""
-    should_skip = options.get_boolean_option(option_dict, 'skip_authors',
-                                             'SKIP_GENERATE_AUTHORS')
+    if option_dict is None:
+        option_dict = {}
+
+    should_skip = options.get_boolean_option(
+        option_dict, 'skip_authors', 'SKIP_GENERATE_AUTHORS'
+    )
     if should_skip:
         return
 
@@ -320,10 +332,12 @@ def generate_authors(git_dir=None, dest_
 
         # get all co-authors from commit messages
         co_authors_out = _run_git_command('log', git_dir)
-        co_authors = re.findall('Co-authored-by:.+', co_authors_out,
-                                re.MULTILINE)
-        co_authors = [signed.split(":", 1)[1].strip()
-                      for signed in co_authors if signed]
+        co_authors = re.findall(
+            'Co-authored-by:.+', co_authors_out, re.MULTILINE
+        )
+        co_authors = [
+            signed.split(":", 1)[1].strip() for signed in co_authors if signed
+        ]
 
         authors += co_authors
         authors = sorted(set(authors))
@@ -332,7 +346,6 @@ def generate_authors(git_dir=None, dest_
             if os.path.exists(old_authors):
                 with open(old_authors, "rb") as old_authors_fh:
                     new_authors_fh.write(old_authors_fh.read())
-            new_authors_fh.write(('\n'.join(authors) + '\n')
-                                 .encode('utf-8'))
+            new_authors_fh.write(('\n'.join(authors) + '\n').encode('utf-8'))
     stop = time.time()
     log.info('[pbr] AUTHORS complete (%0.1fs)' % (stop - start))
diff -pruN 6.1.1-2/pbr/hooks/__init__.py 7.0.1-2/pbr/hooks/__init__.py
--- 6.1.1-2/pbr/hooks/__init__.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/hooks/__init__.py	2025-08-14 16:07:35.000000000 +0000
@@ -13,8 +13,11 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
+from __future__ import absolute_import
+from __future__ import print_function
+
+from pbr._compat import command_hooks as commands
 from pbr.hooks import backwards
-from pbr.hooks import commands
 from pbr.hooks import files
 from pbr.hooks import metadata
 
diff -pruN 6.1.1-2/pbr/hooks/backwards.py 7.0.1-2/pbr/hooks/backwards.py
--- 6.1.1-2/pbr/hooks/backwards.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/hooks/backwards.py	2025-08-14 16:07:35.000000000 +0000
@@ -13,6 +13,9 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
+from __future__ import absolute_import
+from __future__ import print_function
+
 from pbr.hooks import base
 from pbr import packaging
 
@@ -24,10 +27,12 @@ class BackwardsCompatConfig(base.BaseCon
     def hook(self):
         self.config['include_package_data'] = 'True'
         packaging.append_text_list(
-            self.config, 'dependency_links',
-            packaging.parse_dependency_links())
+            self.config, 'dependency_links', packaging.parse_dependency_links()
+        )
         packaging.append_text_list(
-            self.config, 'tests_require',
+            self.config,
+            'tests_require',
             packaging.parse_requirements(
-                packaging.TEST_REQUIREMENTS_FILES,
-                strip_markers=True))
+                packaging.TEST_REQUIREMENTS_FILES, strip_markers=True
+            ),
+        )
diff -pruN 6.1.1-2/pbr/hooks/base.py 7.0.1-2/pbr/hooks/base.py
--- 6.1.1-2/pbr/hooks/base.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/hooks/base.py	2025-08-14 16:07:35.000000000 +0000
@@ -13,6 +13,9 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
+from __future__ import absolute_import
+from __future__ import print_function
+
 
 class BaseConfig(object):
 
@@ -20,8 +23,8 @@ class BaseConfig(object):
 
     def __init__(self, config):
         self._global_config = config
-        self.config = self._global_config.get(self.section, dict())
-        self.pbr_config = config.get('pbr', dict())
+        self.config = self._global_config.get(self.section, {})
+        self.pbr_config = config.get('pbr', {})
 
     def run(self):
         self.hook()
diff -pruN 6.1.1-2/pbr/hooks/commands.py 7.0.1-2/pbr/hooks/commands.py
--- 6.1.1-2/pbr/hooks/commands.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/hooks/commands.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,63 +0,0 @@
-# Copyright 2013 Hewlett-Packard Development Company, L.P.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import os
-
-from setuptools.command import easy_install
-
-from pbr.hooks import base
-from pbr import options
-from pbr import packaging
-
-
-class CommandsConfig(base.BaseConfig):
-
-    section = 'global'
-
-    def __init__(self, config):
-        super(CommandsConfig, self).__init__(config)
-        self.commands = self.config.get('commands', "")
-
-    def save(self):
-        self.config['commands'] = self.commands
-        super(CommandsConfig, self).save()
-
-    def add_command(self, command):
-        self.commands = "%s\n%s" % (self.commands, command)
-
-    def hook(self):
-        self.add_command('pbr.packaging.LocalEggInfo')
-        self.add_command('pbr.packaging.LocalSDist')
-        self.add_command('pbr.packaging.LocalInstallScripts')
-        self.add_command('pbr.packaging.LocalDevelop')
-        self.add_command('pbr.packaging.LocalRPMVersion')
-        self.add_command('pbr.packaging.LocalDebVersion')
-        if os.name != 'nt':
-            easy_install.get_script_args = packaging.override_get_script_args
-
-        if os.path.exists('.testr.conf') and packaging.have_testr():
-            # There is a .testr.conf file. We want to use it.
-            self.add_command('pbr.packaging.TestrTest')
-        elif self.config.get('nosetests', False) and packaging.have_nose():
-            # We seem to still have nose configured
-            self.add_command('pbr.packaging.NoseTest')
-
-        use_egg = options.get_boolean_option(
-            self.pbr_config, 'use-egg', 'PBR_USE_EGG')
-        # We always want non-egg install unless explicitly requested
-        if 'manpages' in self.pbr_config or not use_egg:
-            self.add_command('pbr.packaging.LocalInstall')
-        else:
-            self.add_command('pbr.packaging.InstallWithGit')
diff -pruN 6.1.1-2/pbr/hooks/files.py 7.0.1-2/pbr/hooks/files.py
--- 6.1.1-2/pbr/hooks/files.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/hooks/files.py	2025-08-14 16:07:35.000000000 +0000
@@ -13,6 +13,9 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
+from __future__ import absolute_import
+from __future__ import print_function
+
 import os
 import shlex
 import sys
@@ -75,16 +78,18 @@ class FilesConfig(base.BaseConfig):
                     target += os.path.sep
                 unquoted_prefix = unquote_path(source_prefix)
                 unquoted_target = unquote_path(target)
-                for (dirpath, dirnames, fnames) in os.walk(unquoted_prefix):
+                for dirpath, dirnames, fnames in os.walk(unquoted_prefix):
                     # As source_prefix is always matched, using replace with a
                     # a limit of one is always going to replace the path prefix
                     # and not accidentally replace some text in the middle of
                     # the path
-                    new_prefix = dirpath.replace(unquoted_prefix,
-                                                 unquoted_target, 1)
+                    new_prefix = dirpath.replace(
+                        unquoted_prefix, unquoted_target, 1
+                    )
                     finished.append("'%s' = " % new_prefix)
                     finished.extend(
-                        [" '%s'" % os.path.join(dirpath, f) for f in fnames])
+                        [" '%s'" % os.path.join(dirpath, f) for f in fnames]
+                    )
             else:
                 finished.append(line)
 
@@ -97,7 +102,7 @@ class FilesConfig(base.BaseConfig):
         self.data_files = "%s\n  '%s'" % (self.data_files, man_page)
 
     def get_man_sections(self):
-        man_sections = dict()
+        man_sections = {}
         manpages = self.pbr_config['manpages']
         for manpage in manpages.split():
             section_number = manpage.strip()[-1]
@@ -119,7 +124,7 @@ class FilesConfig(base.BaseConfig):
 
         if 'manpages' in self.pbr_config:
             man_sections = self.get_man_sections()
-            for (section, pages) in man_sections.items():
+            for section, pages in man_sections.items():
                 manpath = get_man_section(section)
                 self.add_man_path(manpath)
                 for page in pages:
diff -pruN 6.1.1-2/pbr/hooks/metadata.py 7.0.1-2/pbr/hooks/metadata.py
--- 6.1.1-2/pbr/hooks/metadata.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/hooks/metadata.py	2025-08-14 16:07:35.000000000 +0000
@@ -13,6 +13,9 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
+from __future__ import absolute_import
+from __future__ import print_function
+
 from pbr.hooks import base
 from pbr import packaging
 
@@ -23,10 +26,11 @@ class MetadataConfig(base.BaseConfig):
 
     def hook(self):
         self.config['version'] = packaging.get_version(
-            self.config['name'], self.config.get('version', None))
+            self.config['name'], self.config.get('version', None)
+        )
         packaging.append_text_list(
-            self.config, 'requires_dist',
-            packaging.parse_requirements())
+            self.config, 'requires_dist', packaging.parse_requirements()
+        )
 
     def get_name(self):
         return self.config['name']
diff -pruN 6.1.1-2/pbr/options.py 7.0.1-2/pbr/options.py
--- 6.1.1-2/pbr/options.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/options.py	2025-08-14 16:07:35.000000000 +0000
@@ -41,6 +41,9 @@
 # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
 # DAMAGE.
 
+from __future__ import absolute_import
+from __future__ import print_function
+
 import os
 
 
@@ -48,6 +51,7 @@ TRUE_VALUES = ('true', '1', 'yes')
 
 
 def get_boolean_option(option_dict, option_name, env_name):
-    return ((option_name in option_dict and
-             option_dict[option_name][1].lower() in TRUE_VALUES) or
-            str(os.getenv(env_name)).lower() in TRUE_VALUES)
+    return (
+        option_name in option_dict
+        and option_dict[option_name][1].lower() in TRUE_VALUES
+    ) or str(os.getenv(env_name)).lower() in TRUE_VALUES
diff -pruN 6.1.1-2/pbr/packaging.py 7.0.1-2/pbr/packaging.py
--- 6.1.1-2/pbr/packaging.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/packaging.py	2025-08-14 16:07:35.000000000 +0000
@@ -18,20 +18,10 @@
 Utilities with minimum-depends for use in setup.py
 """
 
+from __future__ import absolute_import
+from __future__ import print_function
 from __future__ import unicode_literals
 
-from distutils.command import install as du_install
-from distutils import log
-
-# (hberaud) do not use six here to import urlparse
-# to keep this module free from external dependencies
-# to avoid cross dependencies errors on minimal system
-# free from dependencies.
-try:
-    from urllib.parse import urlparse
-except ImportError:
-    from urlparse import urlparse
-
 import email
 import email.errors
 import os
@@ -39,25 +29,19 @@ import re
 import sys
 import warnings
 
-import pkg_resources
-import setuptools
-from setuptools.command import develop
-from setuptools.command import easy_install
-from setuptools.command import egg_info
-from setuptools.command import install
-from setuptools.command import install_scripts
-from setuptools.command import sdist
+from distutils import log
 
-from pbr import extra_files
+from pbr._compat.five import urlparse
+import pbr._compat.packaging
 from pbr import git
-from pbr import options
 import pbr.pbr_json
-from pbr import testr_command
 from pbr import version
 
 REQUIREMENTS_FILES = ('requirements.txt', 'tools/pip-requires')
-PY_REQUIREMENTS_FILES = [x % sys.version_info[0] for x in (
-    'requirements-py%d.txt', 'tools/pip-requires-py%d')]
+PY_REQUIREMENTS_FILES = [
+    x % sys.version_info[0]
+    for x in ('requirements-py%d.txt', 'tools/pip-requires-py%d')
+]
 TEST_REQUIREMENTS_FILES = ('test-requirements.txt', 'tools/test-requires')
 
 
@@ -94,11 +78,13 @@ def get_reqs_from_files(requirements_fil
     # TODO(stephenfin): Remove this in pbr 6.0+
     deprecated = [f for f in existing if f in PY_REQUIREMENTS_FILES]
     if deprecated:
-        warnings.warn('Support for \'-pyN\'-suffixed requirements files is '
-                      'removed in pbr 5.0 and these files are now ignored. '
-                      'Use environment markers instead. Conflicting files: '
-                      '%r' % deprecated,
-                      DeprecationWarning)
+        warnings.warn(
+            'Support for \'-pyN\'-suffixed requirements files is '
+            'removed in pbr 5.0 and these files are now ignored. '
+            'Use environment markers instead. Conflicting files: '
+            '%r' % deprecated,
+            DeprecationWarning,
+        )
 
     existing = [f for f in existing if f not in PY_REQUIREMENTS_FILES]
     for requirements_file in existing:
@@ -109,27 +95,28 @@ def get_reqs_from_files(requirements_fil
 
 
 def egg_fragment(match):
-    return re.sub(r'(?P<PackageName>[\w.-]+)-'
-                  r'(?P<GlobalVersion>'
-                  r'(?P<VersionTripple>'
-                  r'(?P<Major>0|[1-9][0-9]*)\.'
-                  r'(?P<Minor>0|[1-9][0-9]*)\.'
-                  r'(?P<Patch>0|[1-9][0-9]*)){1}'
-                  r'(?P<Tags>(?:\-'
-                  r'(?P<Prerelease>(?:(?=[0]{1}[0-9A-Za-z-]{0})(?:[0]{1})|'
-                  r'(?=[1-9]{1}[0-9]*[A-Za-z]{0})(?:[0-9]+)|'
-                  r'(?=[0-9]*[A-Za-z-]+[0-9A-Za-z-]*)(?:[0-9A-Za-z-]+)){1}'
-                  r'(?:\.(?=[0]{1}[0-9A-Za-z-]{0})(?:[0]{1})|'
-                  r'\.(?=[1-9]{1}[0-9]*[A-Za-z]{0})(?:[0-9]+)|'
-                  r'\.(?=[0-9]*[A-Za-z-]+[0-9A-Za-z-]*)'
-                  r'(?:[0-9A-Za-z-]+))*){1}){0,1}(?:\+'
-                  r'(?P<Meta>(?:[0-9A-Za-z-]+(?:\.[0-9A-Za-z-]+)*))){0,1}))',
-                  r'\g<PackageName>>=\g<GlobalVersion>',
-                  match.groups()[-1])
+    return re.sub(
+        r'(?P<PackageName>[\w.-]+)-'
+        r'(?P<GlobalVersion>'
+        r'(?P<VersionTripple>'
+        r'(?P<Major>0|[1-9][0-9]*)\.'
+        r'(?P<Minor>0|[1-9][0-9]*)\.'
+        r'(?P<Patch>0|[1-9][0-9]*)){1}'
+        r'(?P<Tags>(?:\-'
+        r'(?P<Prerelease>(?:(?=[0]{1}[0-9A-Za-z-]{0})(?:[0]{1})|'
+        r'(?=[1-9]{1}[0-9]*[A-Za-z]{0})(?:[0-9]+)|'
+        r'(?=[0-9]*[A-Za-z-]+[0-9A-Za-z-]*)(?:[0-9A-Za-z-]+)){1}'
+        r'(?:\.(?=[0]{1}[0-9A-Za-z-]{0})(?:[0]{1})|'
+        r'\.(?=[1-9]{1}[0-9]*[A-Za-z]{0})(?:[0-9]+)|'
+        r'\.(?=[0-9]*[A-Za-z-]+[0-9A-Za-z-]*)'
+        r'(?:[0-9A-Za-z-]+))*){1}){0,1}(?:\+'
+        r'(?P<Meta>(?:[0-9A-Za-z-]+(?:\.[0-9A-Za-z-]+)*))){0,1}))',
+        r'\g<PackageName>>=\g<GlobalVersion>',
+        match.groups()[-1],
+    )
 
 
 def parse_requirements(requirements_files=None, strip_markers=False):
-
     if requirements_files is None:
         requirements_files = get_requirements_files()
 
@@ -140,8 +127,9 @@ def parse_requirements(requirements_file
             continue
 
         # Ignore index URL lines
-        if re.match(r'^\s*(-i|--index-url|--extra-index-url|--find-links).*',
-                    line):
+        if re.match(
+            r'^\s*(-i|--index-url|--extra-index-url|--find-links).*', line
+        ):
             continue
 
         # Handle nested requirements files such as:
@@ -149,13 +137,11 @@ def parse_requirements(requirements_file
         if line.startswith('-r'):
             req_file = line.partition(' ')[2]
             requirements += parse_requirements(
-                [req_file], strip_markers=strip_markers)
+                [req_file], strip_markers=strip_markers
+            )
             continue
 
-        try:
-            project_name = pkg_resources.Requirement.parse(line).project_name
-        except ValueError:
-            project_name = None
+        project_name = pbr._compat.packaging.extract_project_name(line)
 
         # For the requirements list, we need to inject only the portion
         # after egg= so that distutils knows the package it's looking for
@@ -189,8 +175,7 @@ def parse_requirements(requirements_file
                 line = line[:semi_pos]
             requirements.append(line)
         else:
-            log.info(
-                '[pbr] Excluding %s: %s' % (project_name, reason))
+            log.info('[pbr] Excluding %s: %s' % (project_name, reason))
 
     return requirements
 
@@ -198,6 +183,7 @@ def parse_requirements(requirements_file
 def parse_dependency_links(requirements_files=None):
     if requirements_files is None:
         requirements_files = get_requirements_files()
+
     dependency_links = []
     # dependency_links inject alternate locations to find packages listed
     # in requirements
@@ -214,458 +200,6 @@ def parse_dependency_links(requirements_
     return dependency_links
 
 
-class InstallWithGit(install.install):
-    """Extracts ChangeLog and AUTHORS from git then installs.
-
-    This is useful for e.g. readthedocs where the package is
-    installed and then docs built.
-    """
-
-    command_name = 'install'
-
-    def run(self):
-        _from_git(self.distribution)
-        return install.install.run(self)
-
-
-class LocalInstall(install.install):
-    """Runs python setup.py install in a sensible manner.
-
-    Force a non-egg installed in the manner of
-    single-version-externally-managed, which allows us to install manpages
-    and config files.
-    """
-
-    command_name = 'install'
-
-    def run(self):
-        _from_git(self.distribution)
-        return du_install.install.run(self)
-
-
-class TestrTest(testr_command.Testr):
-    """Make setup.py test do the right thing."""
-
-    command_name = 'test'
-    description = 'DEPRECATED: Run unit tests using testr'
-
-    def run(self):
-        warnings.warn('testr integration is deprecated in pbr 4.2 and will '
-                      'be removed in a future release. Please call your test '
-                      'runner directly',
-                      DeprecationWarning)
-
-        # Can't use super - base class old-style class
-        testr_command.Testr.run(self)
-
-
-class LocalRPMVersion(setuptools.Command):
-    __doc__ = """Output the rpm *compatible* version string of this package"""
-    description = __doc__
-
-    user_options = []
-    command_name = "rpm_version"
-
-    def run(self):
-        log.info("[pbr] Extracting rpm version")
-        name = self.distribution.get_name()
-        print(version.VersionInfo(name).semantic_version().rpm_string())
-
-    def initialize_options(self):
-        pass
-
-    def finalize_options(self):
-        pass
-
-
-class LocalDebVersion(setuptools.Command):
-    __doc__ = """Output the deb *compatible* version string of this package"""
-    description = __doc__
-
-    user_options = []
-    command_name = "deb_version"
-
-    def run(self):
-        log.info("[pbr] Extracting deb version")
-        name = self.distribution.get_name()
-        print(version.VersionInfo(name).semantic_version().debian_string())
-
-    def initialize_options(self):
-        pass
-
-    def finalize_options(self):
-        pass
-
-
-def have_testr():
-    return testr_command.have_testr
-
-
-try:
-    from nose import commands
-
-    class NoseTest(commands.nosetests):
-        """Fallback test runner if testr is a no-go."""
-
-        command_name = 'test'
-        description = 'DEPRECATED: Run unit tests using nose'
-
-        def run(self):
-            warnings.warn('nose integration in pbr is deprecated. Please use '
-                          'the native nose setuptools configuration or call '
-                          'nose directly',
-                          DeprecationWarning)
-
-            # Can't use super - base class old-style class
-            commands.nosetests.run(self)
-
-    _have_nose = True
-
-except ImportError:
-    _have_nose = False
-
-
-def have_nose():
-    return _have_nose
-
-
-_wsgi_text = """#PBR Generated from %(group)r
-
-import threading
-
-from %(module_name)s import %(import_target)s
-
-if __name__ == "__main__":
-    import argparse
-    import socket
-    import sys
-    import wsgiref.simple_server as wss
-
-    parser = argparse.ArgumentParser(
-        description=%(import_target)s.__doc__,
-        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
-        usage='%%(prog)s [-h] [--port PORT] [--host IP] -- [passed options]')
-    parser.add_argument('--port', '-p', type=int, default=8000,
-                        help='TCP port to listen on')
-    parser.add_argument('--host', '-b', default='',
-                        help='IP to bind the server to')
-    parser.add_argument('args',
-                        nargs=argparse.REMAINDER,
-                        metavar='-- [passed options]',
-                        help="'--' is the separator of the arguments used "
-                        "to start the WSGI server and the arguments passed "
-                        "to the WSGI application.")
-    args = parser.parse_args()
-    if args.args:
-        if args.args[0] == '--':
-            args.args.pop(0)
-        else:
-            parser.error("unrecognized arguments: %%s" %% ' '.join(args.args))
-    sys.argv[1:] = args.args
-    server = wss.make_server(args.host, args.port, %(invoke_target)s())
-
-    print("*" * 80)
-    print("STARTING test server %(module_name)s.%(invoke_target)s")
-    url = "http://%%s:%%d/" %% (server.server_name, server.server_port)
-    print("Available at %%s" %% url)
-    print("DANGER! For testing only, do not use in production")
-    print("*" * 80)
-    sys.stdout.flush()
-
-    server.serve_forever()
-else:
-    application = None
-    app_lock = threading.Lock()
-
-    with app_lock:
-        if application is None:
-            application = %(invoke_target)s()
-
-"""
-
-_script_text = """# PBR Generated from %(group)r
-
-import sys
-
-from %(module_name)s import %(import_target)s
-
-
-if __name__ == "__main__":
-    sys.exit(%(invoke_target)s())
-"""
-
-
-# the following allows us to specify different templates per entry
-# point group when generating pbr scripts.
-ENTRY_POINTS_MAP = {
-    'console_scripts': _script_text,
-    'gui_scripts': _script_text,
-    'wsgi_scripts': _wsgi_text
-}
-
-
-def generate_script(group, entry_point, header, template):
-    """Generate the script based on the template.
-
-    :param str group:
-        The entry-point group name, e.g., "console_scripts".
-    :param str header:
-        The first line of the script, e.g., "!#/usr/bin/env python".
-    :param str template:
-        The script template.
-    :returns:
-        The templated script content
-    :rtype:
-        str
-    """
-    if not entry_point.attrs or len(entry_point.attrs) > 2:
-        raise ValueError("Script targets must be of the form "
-                         "'func' or 'Class.class_method'.")
-    script_text = template % dict(
-        group=group,
-        module_name=entry_point.module_name,
-        import_target=entry_point.attrs[0],
-        invoke_target='.'.join(entry_point.attrs),
-    )
-    return header + script_text
-
-
-def override_get_script_args(
-        dist, executable=os.path.normpath(sys.executable)):
-    """Override entrypoints console_script."""
-    # get_script_header() is deprecated since Setuptools 12.0
-    try:
-        header = easy_install.ScriptWriter.get_header("", executable)
-    except AttributeError:
-        header = easy_install.get_script_header("", executable)
-    for group, template in ENTRY_POINTS_MAP.items():
-        for name, ep in dist.get_entry_map(group).items():
-            yield (name, generate_script(group, ep, header, template))
-
-
-class LocalDevelop(develop.develop):
-
-    command_name = 'develop'
-
-    def install_wrapper_scripts(self, dist):
-        if sys.platform == 'win32':
-            return develop.develop.install_wrapper_scripts(self, dist)
-        if not self.exclude_scripts:
-            for args in override_get_script_args(dist):
-                self.write_script(*args)
-
-
-class LocalInstallScripts(install_scripts.install_scripts):
-    """Intercepts console scripts entry_points."""
-    command_name = 'install_scripts'
-
-    def _make_wsgi_scripts_only(self, dist, executable):
-        # get_script_header() is deprecated since Setuptools 12.0
-        try:
-            header = easy_install.ScriptWriter.get_header("", executable)
-        except AttributeError:
-            header = easy_install.get_script_header("", executable)
-        wsgi_script_template = ENTRY_POINTS_MAP['wsgi_scripts']
-        for name, ep in dist.get_entry_map('wsgi_scripts').items():
-            content = generate_script(
-                'wsgi_scripts', ep, header, wsgi_script_template)
-            self.write_script(name, content)
-
-    def run(self):
-        import distutils.command.install_scripts
-
-        self.run_command("egg_info")
-        if self.distribution.scripts:
-            # run first to set up self.outfiles
-            distutils.command.install_scripts.install_scripts.run(self)
-        else:
-            self.outfiles = []
-
-        ei_cmd = self.get_finalized_command("egg_info")
-        dist = pkg_resources.Distribution(
-            ei_cmd.egg_base,
-            pkg_resources.PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
-            ei_cmd.egg_name, ei_cmd.egg_version,
-        )
-        bs_cmd = self.get_finalized_command('build_scripts')
-        executable = getattr(
-            bs_cmd, 'executable', easy_install.sys_executable)
-        if 'bdist_wheel' in self.distribution.have_run:
-            # We're building a wheel which has no way of generating mod_wsgi
-            # scripts for us. Let's build them.
-            # NOTE(sigmavirus24): This needs to happen here because, as the
-            # comment below indicates, no_ep is True when building a wheel.
-            self._make_wsgi_scripts_only(dist, executable)
-
-        if self.no_ep:
-            # no_ep is True if we're installing into an .egg file or building
-            # a .whl file, in those cases, we do not want to build all of the
-            # entry-points listed for this package.
-            return
-
-        if os.name != 'nt':
-            get_script_args = override_get_script_args
-        else:
-            get_script_args = easy_install.get_script_args
-            executable = '"%s"' % executable
-
-        for args in get_script_args(dist, executable):
-            self.write_script(*args)
-
-
-class LocalManifestMaker(egg_info.manifest_maker):
-    """Add any files that are in git and some standard sensible files."""
-
-    def _add_pbr_defaults(self):
-        for template_line in [
-            'include AUTHORS',
-            'include ChangeLog',
-            'exclude .gitignore',
-            'exclude .gitreview',
-            'global-exclude *.pyc'
-        ]:
-            self.filelist.process_template_line(template_line)
-
-    def add_defaults(self):
-        """Add all the default files to self.filelist:
-
-        Extends the functionality provided by distutils to also included
-        additional sane defaults, such as the ``AUTHORS`` and ``ChangeLog``
-        files generated by *pbr*.
-
-        Warns if (``README`` or ``README.txt``) or ``setup.py`` are missing;
-        everything else is optional.
-        """
-        option_dict = self.distribution.get_option_dict('pbr')
-
-        sdist.sdist.add_defaults(self)
-        self.filelist.append(self.template)
-        self.filelist.append(self.manifest)
-        self.filelist.extend(extra_files.get_extra_files())
-        should_skip = options.get_boolean_option(option_dict, 'skip_git_sdist',
-                                                 'SKIP_GIT_SDIST')
-        if not should_skip:
-            rcfiles = git._find_git_files()
-            if rcfiles:
-                self.filelist.extend(rcfiles)
-        elif os.path.exists(self.manifest):
-            self.read_manifest()
-        ei_cmd = self.get_finalized_command('egg_info')
-        self._add_pbr_defaults()
-        self.filelist.include_pattern("*", prefix=ei_cmd.egg_info)
-
-
-class LocalEggInfo(egg_info.egg_info):
-    """Override the egg_info command to regenerate SOURCES.txt sensibly."""
-
-    command_name = 'egg_info'
-
-    def find_sources(self):
-        """Generate SOURCES.txt only if there isn't one already.
-
-        If we are in an sdist command, then we always want to update
-        SOURCES.txt. If we are not in an sdist command, then it doesn't
-        matter one flip, and is actually destructive.
-        However, if we're in a git context, it's always the right thing to do
-        to recreate SOURCES.txt
-        """
-        manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
-        if (not os.path.exists(manifest_filename) or
-                os.path.exists('.git') or
-                'sdist' in sys.argv):
-            log.info("[pbr] Processing SOURCES.txt")
-            mm = LocalManifestMaker(self.distribution)
-            mm.manifest = manifest_filename
-            mm.run()
-            self.filelist = mm.filelist
-        else:
-            log.info("[pbr] Reusing existing SOURCES.txt")
-            self.filelist = egg_info.FileList()
-            with open(manifest_filename, 'r') as fil:
-                for entry in fil.read().split('\n'):
-                    self.filelist.append(entry)
-
-
-def _from_git(distribution):
-    option_dict = distribution.get_option_dict('pbr')
-    changelog = git._iter_log_oneline()
-    if changelog:
-        changelog = git._iter_changelog(changelog)
-    git.write_git_changelog(option_dict=option_dict, changelog=changelog)
-    git.generate_authors(option_dict=option_dict)
-
-
-class LocalSDist(sdist.sdist):
-    """Builds the ChangeLog and Authors files from VC first."""
-
-    command_name = 'sdist'
-
-    def checking_reno(self):
-        """Ensure reno is installed and configured.
-
-        We can't run reno-based commands if reno isn't installed/available, and
-        don't want to if the user isn't using it.
-        """
-        if hasattr(self, '_has_reno'):
-            return self._has_reno
-
-        option_dict = self.distribution.get_option_dict('pbr')
-        should_skip = options.get_boolean_option(option_dict, 'skip_reno',
-                                                 'SKIP_GENERATE_RENO')
-        if should_skip:
-            self._has_reno = False
-            return False
-
-        try:
-            # versions of reno witout this module will not have the required
-            # feature, hence the import
-            from reno import setup_command  # noqa
-        except ImportError:
-            log.info('[pbr] reno was not found or is too old. Skipping '
-                     'release notes')
-            self._has_reno = False
-            return False
-
-        conf, output_file, cache_file = setup_command.load_config(
-            self.distribution)
-
-        if not os.path.exists(os.path.join(conf.reporoot, conf.notespath)):
-            log.info('[pbr] reno does not appear to be configured. Skipping '
-                     'release notes')
-            self._has_reno = False
-            return False
-
-        self._files = [output_file, cache_file]
-
-        log.info('[pbr] Generating release notes')
-        self._has_reno = True
-
-        return True
-
-    sub_commands = [('build_reno', checking_reno)] + sdist.sdist.sub_commands
-
-    def run(self):
-        _from_git(self.distribution)
-        # sdist.sdist is an old style class, can't use super()
-        sdist.sdist.run(self)
-
-    def make_distribution(self):
-        # This is included in make_distribution because setuptools doesn't use
-        # 'get_file_list'. As such, this is the only hook point that runs after
-        # the commands in 'sub_commands'
-        if self.checking_reno():
-            self.filelist.extend(self._files)
-            self.filelist.sort()
-        sdist.sdist.make_distribution(self)
-
-
-LocalBuildDoc = None
-
-
-def have_sphinx():
-    return False
-
-
 def _get_increment_kwargs(git_dir, tag):
     """Calculate the sort of semver increment needed from git history.
 
@@ -679,24 +213,27 @@ def _get_increment_kwargs(git_dir, tag):
         version_spec = tag + "..HEAD"
     else:
         version_spec = "HEAD"
+
     # Get the raw body of the commit messages so that we don't have to
     # parse out any formatting whitespace and to avoid user settings on
     # git log output affecting out ability to have working sem ver headers.
-    changelog = git._run_git_command(['log', '--pretty=%B', version_spec],
-                                     git_dir)
+    changelog = git._run_git_command(
+        ['log', '--pretty=%B', version_spec], git_dir
+    )
     symbols = set()
     header = 'sem-ver:'
     for line in changelog.split("\n"):
         line = line.lower().strip()
         if not line.lower().strip().startswith(header):
             continue
-        new_symbols = line[len(header):].strip().split(",")
+        new_symbols = line[len(header) :].strip().split(",")
         symbols.update([symbol.strip() for symbol in new_symbols])
 
     def _handle_symbol(symbol, symbols, impact):
         if symbol in symbols:
             result[impact] = True
             symbols.discard(symbol)
+
     _handle_symbol('bugfix', symbols, 'patch')
     _handle_symbol('feature', symbols, 'minor')
     _handle_symbol('deprecation', symbols, 'minor')
@@ -720,7 +257,7 @@ def _get_revno_and_last_tag(git_dir):
     row_count = 0
     for row_count, (ignored, tag_set, ignored) in enumerate(changelog):
         version_tags = set()
-        semver_to_tag = dict()
+        semver_to_tag = {}
         for tag in list(tag_set):
             try:
                 semver = version.SemanticVersion.from_pip_string(tag)
@@ -728,8 +265,10 @@ def _get_revno_and_last_tag(git_dir):
                 version_tags.add(semver)
             except Exception:
                 pass
+
         if version_tags:
             return semver_to_tag[max(version_tags)], row_count
+
     return "", row_count
 
 
@@ -754,12 +293,14 @@ def _get_version_from_git_target(git_dir
         new_version = last_semver
     else:
         new_version = last_semver.increment(
-            **_get_increment_kwargs(git_dir, tag))
+            **_get_increment_kwargs(git_dir, tag)
+        )
     if target_version is not None and new_version > target_version:
         raise ValueError(
             "git history requires a target version of %(new)s, but target "
-            "version is %(target)s" %
-            dict(new=new_version, target=target_version))
+            "version is %(target)s"
+            % {'new': new_version, 'target': target_version}
+        )
     if distance == 0:
         return last_semver
     new_dev = new_version.to_dev(distance)
@@ -786,14 +327,15 @@ def _get_version_from_git(pre_version=No
     if git_dir:
         try:
             tagged = git._run_git_command(
-                ['describe', '--exact-match'], git_dir,
-                throw_on_error=True).replace('-', '.')
+                ['describe', '--exact-match'], git_dir, throw_on_error=True
+            ).replace('-', '.')
             target_version = version.SemanticVersion.from_pip_string(tagged)
         except Exception:
             if pre_version:
                 # not released yet - use pre_version as the target
                 target_version = version.SemanticVersion.from_pip_string(
-                    pre_version)
+                    pre_version
+                )
             else:
                 # not released yet - just calculate from git history
                 target_version = None
@@ -847,8 +389,8 @@ def get_version(package_name, pre_versio
         version will be the next release.
     """
     version = os.environ.get(
-        "PBR_VERSION",
-        os.environ.get("OSLO_PACKAGE_VERSION", None))
+        "PBR_VERSION", os.environ.get("OSLO_PACKAGE_VERSION", None)
+    )
     if version:
         return version
     version = _get_version_from_pkg_metadata(package_name)
@@ -863,13 +405,14 @@ def get_version(package_name, pre_versio
         version = version.encode('utf-8')
     if version:
         return version
-    raise Exception("Versioning for this project requires either an sdist"
-                    " tarball, or access to an upstream git repository."
-                    " It's also possible that there is a mismatch between"
-                    " the package name in setup.cfg and the argument given"
-                    " to pbr.version.VersionInfo. Project name {name} was"
-                    " given, but was not able to be found.".format(
-                        name=package_name))
+    raise Exception(
+        "Versioning for this project requires either an sdist "
+        "tarball, or access to an upstream git repository. "
+        "It's also possible that there is a mismatch between "
+        "the package name in setup.cfg and the argument given "
+        "to pbr.version.VersionInfo. Project name {name} was "
+        "given, but was not able to be found.".format(name=package_name)
+    )
 
 
 # This is added because pbr uses pbr to install itself. That means that
diff -pruN 6.1.1-2/pbr/pbr_json.py 7.0.1-2/pbr/pbr_json.py
--- 6.1.1-2/pbr/pbr_json.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/pbr_json.py	2025-08-14 16:07:35.000000000 +0000
@@ -14,6 +14,9 @@
 #    License for the specific language governing permissions and limitations
 #    under the License.
 
+from __future__ import absolute_import
+from __future__ import print_function
+
 import json
 
 from pbr import git
@@ -25,7 +28,7 @@ def write_pbr_json(cmd, basename, filena
     git_dir = git._run_git_functions()
     if not git_dir:
         return
-    values = dict()
+    values = {}
     git_version = git.get_git_short_sha(git_dir)
     is_release = git.get_is_release(git_dir)
     if git_version is not None:
diff -pruN 6.1.1-2/pbr/sphinxext.py 7.0.1-2/pbr/sphinxext.py
--- 6.1.1-2/pbr/sphinxext.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/sphinxext.py	2025-08-14 16:07:35.000000000 +0000
@@ -13,18 +13,14 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
-# (hberaud) do not use six here to import configparser
-# to keep this module free from external dependencies
-# to avoid cross dependencies errors on minimal system
-# free from dependencies.
-try:
-    import configparser
-except ImportError:
-    import ConfigParser as configparser
+from __future__ import absolute_import
+from __future__ import print_function
+
 import os.path
 
 from sphinx.util import logging
 
+from pbr._compat.five import configparser
 import pbr.version
 
 _project = None
@@ -41,8 +37,9 @@ def _find_setup_cfg(srcdir):
     # an sdist or wheel? Perhaps we should check for 'PKG-INFO' or
     # 'METADATA' files, a la 'pbr.packaging._get_version_from_pkg_metadata'
     for path in [
-            os.path.join(srcdir, os.pardir, 'setup.cfg'),
-            os.path.join(srcdir, os.pardir, os.pardir, 'setup.cfg')]:
+        os.path.join(srcdir, os.pardir, 'setup.cfg'),
+        os.path.join(srcdir, os.pardir, os.pardir, 'setup.cfg'),
+    ]:
         if os.path.exists(path):
             return path
 
@@ -63,8 +60,9 @@ def _get_project_name(srcdir):
 
         path = _find_setup_cfg(srcdir)
         if not path or not parser.read(path):
-            logger.info('Could not find a setup.cfg to extract project name '
-                        'from')
+            logger.info(
+                'Could not find a setup.cfg to extract project name from'
+            )
             return None
 
         try:
diff -pruN 6.1.1-2/pbr/testr_command.py 7.0.1-2/pbr/testr_command.py
--- 6.1.1-2/pbr/testr_command.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/testr_command.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,167 +0,0 @@
-# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# Copyright (c) 2013 Testrepository Contributors
-#
-# Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
-# license at the users choice. A copy of both licenses are available in the
-# project source as Apache-2.0 and BSD. You may not use this file except in
-# compliance with one of these two licences.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-# license you chose for the specific language governing permissions and
-# limitations under that license.
-
-"""setuptools/distutils command to run testr via setup.py
-
-PBR will hook in the Testr class to provide "setup.py test" when
-.testr.conf is present in the repository (see pbr/hooks/commands.py).
-
-If we are activated but testrepository is not installed, we provide a
-sensible error.
-
-You can pass --coverage which will also export PYTHON='coverage run
---source <your package>' and automatically combine the coverage from
-each testr backend test runner after the run completes.
-
-"""
-
-from distutils import cmd
-import distutils.errors
-import logging
-import os
-import sys
-import warnings
-
-logger = logging.getLogger(__name__)
-
-
-class TestrReal(cmd.Command):
-
-    description = "DEPRECATED: Run unit tests using testr"
-
-    user_options = [
-        ('coverage', None, "Replace PYTHON with coverage and merge coverage "
-         "from each testr worker."),
-        ('testr-args=', 't', "Run 'testr' with these args"),
-        ('omit=', 'o', "Files to omit from coverage calculations"),
-        ('coverage-package-name=', None, "Use this name to select packages "
-                                         "for coverage (one or more, "
-                                         "comma-separated)"),
-        ('slowest', None, "Show slowest test times after tests complete."),
-        ('no-parallel', None, "Run testr serially"),
-        ('log-level=', 'l', "Log level (default: info)"),
-    ]
-
-    boolean_options = ['coverage', 'slowest', 'no_parallel']
-
-    def _run_testr(self, *args):
-        logger.debug("_run_testr called with args = %r", args)
-        return commands.run_argv([sys.argv[0]] + list(args),
-                                 sys.stdin, sys.stdout, sys.stderr)
-
-    def initialize_options(self):
-        self.testr_args = None
-        self.coverage = None
-        self.omit = ""
-        self.slowest = None
-        self.coverage_package_name = None
-        self.no_parallel = None
-        self.log_level = 'info'
-
-    def finalize_options(self):
-        self.log_level = getattr(
-            logging,
-            self.log_level.upper(),
-            logging.INFO)
-        logging.basicConfig(level=self.log_level)
-        logger.debug("finalize_options called")
-        if self.testr_args is None:
-            self.testr_args = []
-        else:
-            self.testr_args = self.testr_args.split()
-        if self.omit:
-            self.omit = "--omit=%s" % self.omit
-        logger.debug("finalize_options: self.__dict__ = %r", self.__dict__)
-
-    def run(self):
-        """Set up testr repo, then run testr."""
-        logger.debug("run called")
-
-        warnings.warn('testr integration in pbr is deprecated. Please use '
-                      'the \'testr\' setup command or call testr directly',
-                      DeprecationWarning)
-
-        if not os.path.isdir(".testrepository"):
-            self._run_testr("init")
-
-        if self.coverage:
-            self._coverage_before()
-        if not self.no_parallel:
-            testr_ret = self._run_testr("run", "--parallel", *self.testr_args)
-        else:
-            testr_ret = self._run_testr("run", *self.testr_args)
-        if testr_ret:
-            raise distutils.errors.DistutilsError(
-                "testr failed (%d)" % testr_ret)
-        if self.slowest:
-            print("Slowest Tests")
-            self._run_testr("slowest")
-        if self.coverage:
-            self._coverage_after()
-
-    def _coverage_before(self):
-        logger.debug("_coverage_before called")
-        package = self.distribution.get_name()
-        if package.startswith('python-'):
-            package = package[7:]
-
-        # Use this as coverage package name
-        if self.coverage_package_name:
-            package = self.coverage_package_name
-        options = "--source %s --parallel-mode" % package
-        os.environ['PYTHON'] = ("coverage run %s" % options)
-        logger.debug("os.environ['PYTHON'] = %r", os.environ['PYTHON'])
-
-    def _coverage_after(self):
-        logger.debug("_coverage_after called")
-        os.system("coverage combine")
-        os.system("coverage html -d ./cover %s" % self.omit)
-        os.system("coverage xml -o ./cover/coverage.xml %s" % self.omit)
-
-
-class TestrFake(cmd.Command):
-    description = "Run unit tests using testr"
-    user_options = []
-
-    def initialize_options(self):
-        pass
-
-    def finalize_options(self):
-        pass
-
-    def run(self):
-        print("Install testrepository to run 'testr' command properly.")
-
-
-try:
-    from testrepository import commands
-    have_testr = True
-    Testr = TestrReal
-except ImportError:
-    have_testr = False
-    Testr = TestrFake
diff -pruN 6.1.1-2/pbr/tests/__init__.py 7.0.1-2/pbr/tests/__init__.py
--- 6.1.1-2/pbr/tests/__init__.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/tests/__init__.py	2025-08-14 16:07:35.000000000 +0000
@@ -11,6 +11,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
+from __future__ import print_function
+
 import os
 
 import testscenarios
diff -pruN 6.1.1-2/pbr/tests/_compat/test_commands.py 7.0.1-2/pbr/tests/_compat/test_commands.py
--- 6.1.1-2/pbr/tests/_compat/test_commands.py	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/pbr/tests/_compat/test_commands.py	2025-08-14 16:07:35.000000000 +0000
@@ -0,0 +1,101 @@
+# Copyright (c) 2013 New Dream Network, LLC (DreamHost)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Copyright (C) 2013 Association of Universities for Research in Astronomy
+#                    (AURA)
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+#     1. Redistributions of source code must retain the above copyright
+#        notice, this list of conditions and the following disclaimer.
+#
+#     2. Redistributions in binary form must reproduce the above
+#        copyright notice, this list of conditions and the following
+#        disclaimer in the documentation and/or other materials provided
+#        with the distribution.
+#
+#     3. The name of AURA and its representatives may not be used to
+#        endorse or promote products derived from this software without
+#        specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED
+# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+
+import pkg_resources
+import testtools
+
+from pbr._compat import commands
+
+
+class TestPackagingHelpers(testtools.TestCase):
+
+    def test_generate_script(self):
+        group = 'console_scripts'
+        entry_point = pkg_resources.EntryPoint(
+            name='test-ep',
+            module_name='pbr.packaging',
+            attrs=('LocalInstallScripts',),
+        )
+        header = '#!/usr/bin/env fake-header\n'
+        template = (
+            '%(group)s %(module_name)s %(import_target)s %(invoke_target)s'
+        )
+
+        generated_script = commands.generate_script(
+            group, entry_point, header, template
+        )
+
+        expected_script = (
+            '#!/usr/bin/env fake-header\nconsole_scripts pbr.packaging '
+            'LocalInstallScripts LocalInstallScripts'
+        )
+        self.assertEqual(expected_script, generated_script)
+
+    def test_generate_script_validates_expectations(self):
+        group = 'console_scripts'
+        entry_point = pkg_resources.EntryPoint(
+            name='test-ep', module_name='pbr.packaging'
+        )
+        header = '#!/usr/bin/env fake-header\n'
+        template = (
+            '%(group)s %(module_name)s %(import_target)s %(invoke_target)s'
+        )
+        self.assertRaises(
+            ValueError,
+            commands.generate_script,
+            group,
+            entry_point,
+            header,
+            template,
+        )
+
+        entry_point = pkg_resources.EntryPoint(
+            name='test-ep',
+            module_name='pbr.packaging',
+            attrs=('attr1', 'attr2', 'attr3'),
+        )
+        self.assertRaises(
+            ValueError,
+            commands.generate_script,
+            group,
+            entry_point,
+            header,
+            template,
+        )
diff -pruN 6.1.1-2/pbr/tests/base.py 7.0.1-2/pbr/tests/base.py
--- 6.1.1-2/pbr/tests/base.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/tests/base.py	2025-08-14 16:07:35.000000000 +0000
@@ -39,34 +39,20 @@
 
 """Common utilities used in testing"""
 
+from __future__ import absolute_import
+from __future__ import print_function
+
 import os
 import shutil
-import subprocess
 import sys
 
 import fixtures
 import testresources
 import testtools
-from testtools import content
 
 from pbr import options
 
 
-class DiveDir(fixtures.Fixture):
-    """Dive into given directory and return back on cleanup.
-
-    :ivar path: The target directory.
-    """
-
-    def __init__(self, path):
-        self.path = path
-
-    def setUp(self):
-        super(DiveDir, self).setUp()
-        self.addCleanup(os.chdir, os.getcwd())
-        os.chdir(self.path)
-
-
 class BaseTestCase(testtools.TestCase, testresources.ResourcedTestCase):
 
     def setUp(self):
@@ -76,8 +62,10 @@ class BaseTestCase(testtools.TestCase, t
             test_timeout = int(test_timeout)
         except ValueError:
             # If timeout value is invalid, fail hard.
-            print("OS_TEST_TIMEOUT set to invalid value"
-                  " defaulting to no timeout")
+            print(
+                "OS_TEST_TIMEOUT set to invalid value"
+                " defaulting to no timeout"
+            )
             test_timeout = 0
         if test_timeout > 0:
             self.useFixture(fixtures.Timeout(test_timeout, gentle=True))
@@ -88,8 +76,7 @@ class BaseTestCase(testtools.TestCase, t
         if os.environ.get('OS_STDERR_CAPTURE') in options.TRUE_VALUES:
             stderr = self.useFixture(fixtures.StringStream('stderr')).stream
             self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
-        self.log_fixture = self.useFixture(
-            fixtures.FakeLogger('pbr'))
+        self.log_fixture = self.useFixture(fixtures.FakeLogger('pbr'))
 
         # Older git does not have config --local, so create a temporary home
         # directory to permit using git config --global without stepping on
@@ -104,8 +91,10 @@ class BaseTestCase(testtools.TestCase, t
 
         self.temp_dir = self.useFixture(fixtures.TempDir()).path
         self.package_dir = os.path.join(self.temp_dir, 'testpackage')
-        shutil.copytree(os.path.join(os.path.dirname(__file__), 'testpackage'),
-                        self.package_dir)
+        shutil.copytree(
+            os.path.join(os.path.dirname(__file__), 'testpackage'),
+            self.package_dir,
+        )
         self.addCleanup(os.chdir, os.getcwd())
         os.chdir(self.package_dir)
         self.addCleanup(self._discard_testpackage)
@@ -124,103 +113,5 @@ class BaseTestCase(testtools.TestCase, t
         # Remove pbr.testpackage from sys.modules so that it can be freshly
         # re-imported by the next test
         for k in list(sys.modules):
-            if (k == 'pbr_testpackage' or
-                    k.startswith('pbr_testpackage.')):
+            if k == 'pbr_testpackage' or k.startswith('pbr_testpackage.'):
                 del sys.modules[k]
-
-    def run_pbr(self, *args, **kwargs):
-        return self._run_cmd('pbr', args, **kwargs)
-
-    def run_setup(self, *args, **kwargs):
-        return self._run_cmd(sys.executable, ('setup.py',) + args, **kwargs)
-
-    def _run_cmd(self, cmd, args=[], allow_fail=True, cwd=None):
-        """Run a command in the root of the test working copy.
-
-        Runs a command, with the given argument list, in the root of the test
-        working copy--returns the stdout and stderr streams and the exit code
-        from the subprocess.
-
-        :param cwd: If falsy run within the test package dir, otherwise run
-            within the named path.
-        """
-        cwd = cwd or self.package_dir
-        result = _run_cmd([cmd] + list(args), cwd=cwd)
-        if result[2] and not allow_fail:
-            raise Exception("Command failed retcode=%s" % result[2])
-        return result
-
-
-class CapturedSubprocess(fixtures.Fixture):
-    """Run a process and capture its output.
-
-    :attr stdout: The output (a string).
-    :attr stderr: The standard error (a string).
-    :attr returncode: The return code of the process.
-
-    Note that stdout and stderr are decoded from the bytestrings subprocess
-    returns using error=replace.
-    """
-
-    def __init__(self, label, *args, **kwargs):
-        """Create a CapturedSubprocess.
-
-        :param label: A label for the subprocess in the test log. E.g. 'foo'.
-        :param *args: The *args to pass to Popen.
-        :param **kwargs: The **kwargs to pass to Popen.
-        """
-        super(CapturedSubprocess, self).__init__()
-        self.label = label
-        self.args = args
-        self.kwargs = kwargs
-        self.kwargs['stderr'] = subprocess.PIPE
-        self.kwargs['stdin'] = subprocess.PIPE
-        self.kwargs['stdout'] = subprocess.PIPE
-
-    def setUp(self):
-        super(CapturedSubprocess, self).setUp()
-        proc = subprocess.Popen(*self.args, **self.kwargs)
-        out, err = proc.communicate()
-        self.out = out.decode('utf-8', 'replace')
-        self.err = err.decode('utf-8', 'replace')
-        self.addDetail(self.label + '-stdout', content.text_content(self.out))
-        self.addDetail(self.label + '-stderr', content.text_content(self.err))
-        self.returncode = proc.returncode
-        if proc.returncode:
-            raise AssertionError(
-                'Failed process args=%r, kwargs=%r, returncode=%s' % (
-                    self.args, self.kwargs, proc.returncode))
-        self.addCleanup(delattr, self, 'out')
-        self.addCleanup(delattr, self, 'err')
-        self.addCleanup(delattr, self, 'returncode')
-
-
-def _run_cmd(args, cwd):
-    """Run the command args in cwd.
-
-    :param args: The command to run e.g. ['git', 'status']
-    :param cwd: The directory to run the comamnd in.
-    :return: ((stdout, stderr), returncode)
-    """
-    print('Running %s' % ' '.join(args))
-    p = subprocess.Popen(
-        args, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
-        stderr=subprocess.PIPE, cwd=cwd)
-    streams = tuple(s.decode('latin1').strip() for s in p.communicate())
-    print('STDOUT:')
-    print(streams[0])
-    print('STDERR:')
-    print(streams[1])
-    return (streams) + (p.returncode,)
-
-
-def _config_git():
-    _run_cmd(
-        ['git', 'config', '--global', 'user.email', 'example@example.com'],
-        None)
-    _run_cmd(
-        ['git', 'config', '--global', 'user.name', 'OpenStack Developer'],
-        None)
-    _run_cmd(
-        ['git', 'config', '--global', 'user.signingkey',
-         'example@example.com'], None)
diff -pruN 6.1.1-2/pbr/tests/fixtures.py 7.0.1-2/pbr/tests/fixtures.py
--- 6.1.1-2/pbr/tests/fixtures.py	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/pbr/tests/fixtures.py	2025-08-14 16:07:35.000000000 +0000
@@ -0,0 +1,331 @@
+# Copyright (c) 2013 New Dream Network, LLC (DreamHost)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Copyright (C) 2013 Association of Universities for Research in Astronomy
+#                    (AURA)
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+#     1. Redistributions of source code must retain the above copyright
+#        notice, this list of conditions and the following disclaimer.
+#
+#     2. Redistributions in binary form must reproduce the above
+#        copyright notice, this list of conditions and the following
+#        disclaimer in the documentation and/or other materials provided
+#        with the distribution.
+#
+#     3. The name of AURA and its representatives may not be used to
+#        endorse or promote products derived from this software without
+#        specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED
+# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+
+from __future__ import absolute_import
+
+import os
+import re
+import subprocess
+import textwrap
+
+import fixtures
+from testtools import content
+import virtualenv
+
+from pbr.tests import util
+
+PBR_ROOT = os.path.abspath(os.path.join(__file__, '..', '..', '..'))
+
+
+class Chdir(fixtures.Fixture):
+    """Dive into given directory and return back on cleanup.
+
+    :ivar path: The target directory.
+    """
+
+    def __init__(self, path):
+        self.path = path
+
+    def setUp(self):
+        super(Chdir, self).setUp()
+        self.addCleanup(os.chdir, os.getcwd())
+        os.chdir(self.path)
+
+
+class CapturedSubprocess(fixtures.Fixture):
+    """Run a process and capture its output.
+
+    :attr stdout: The output (a string). Only set if the process fails.
+    :attr stderr: The standard error (a string). Only set if the process fails.
+    :attr returncode: The return code of the process.
+
+    Note that stdout and stderr are decoded from the bytestrings subprocess
+    returns using error=replace
+    """
+
+    def __init__(self, label, *args, **kwargs):
+        """Create a CapturedSubprocess.
+
+        :param label: A label for the subprocess in the test log. E.g. 'foo'.
+        :param *args: The *args to pass to Popen.
+        :param **kwargs: The **kwargs to pass to Popen.
+        """
+        super(CapturedSubprocess, self).__init__()
+        self.label = label
+        self.args = args
+        self.kwargs = kwargs
+        self.kwargs['stderr'] = subprocess.PIPE
+        self.kwargs['stdin'] = subprocess.PIPE
+        self.kwargs['stdout'] = subprocess.PIPE
+
+    def setUp(self):
+        super(CapturedSubprocess, self).setUp()
+        # setuptools can be very shouty
+        env = os.environ.copy()
+        env['PYTHONWARNINGS'] = 'ignore'
+        self.kwargs['env'] = env
+        proc = subprocess.Popen(*self.args, **self.kwargs)
+        out, err = proc.communicate()
+        self.out = out.decode('utf-8', 'replace')
+        self.err = err.decode('utf-8', 'replace')
+        self.addDetail(self.label + '-stdout', content.text_content(self.out))
+        self.addDetail(self.label + '-stderr', content.text_content(self.err))
+        self.returncode = proc.returncode
+        if proc.returncode:
+            raise AssertionError(
+                'Failed process args=%r, kwargs=%r, returncode=%s'
+                % (self.args, self.kwargs, proc.returncode)
+            )
+        self.addCleanup(delattr, self, 'out')
+        self.addCleanup(delattr, self, 'err')
+        self.addCleanup(delattr, self, 'returncode')
+
+
+class GitRepo(fixtures.Fixture):
+    """A git repo for testing with.
+
+    Use of TempHomeDir with this fixture is strongly recommended as due to the
+    lack of config --local in older gits, it will write to the users global
+    configuration without TempHomeDir.
+    """
+
+    def __init__(self, basedir):
+        super(GitRepo, self).__init__()
+        self._basedir = basedir
+
+    def setUp(self):
+        super(GitRepo, self).setUp()
+        util.run_cmd(['git', 'init', '.'], self._basedir)
+        util.config_git()
+        util.run_cmd(['git', 'add', '.'], self._basedir)
+
+    def commit(self, message_content='test commit'):
+        files = len(os.listdir(self._basedir))
+        path = self._basedir + '/%d' % files
+        open(path, 'wt').close()
+        util.run_cmd(['git', 'add', path], self._basedir)
+        util.run_cmd(['git', 'commit', '-m', message_content], self._basedir)
+
+    def uncommit(self):
+        util.run_cmd(['git', 'reset', '--hard', 'HEAD^'], self._basedir)
+
+    def tag(self, version):
+        util.run_cmd(['git', 'tag', '-sm', 'test tag', version], self._basedir)
+
+
+class GPGKey(fixtures.Fixture):
+    """Creates a GPG key for testing.
+
+    It's recommended that this be used in concert with a unique home
+    directory.
+    """
+
+    def setUp(self):
+        super(GPGKey, self).setUp()
+        # If a temporary home dir is in use (and it should be), ensure gpg is
+        # aware of it. This seems to be necessary on Fedora.
+        self.useFixture(
+            fixtures.EnvironmentVariable('GNUPGHOME', os.getenv('HOME'))
+        )
+        tempdir = self.useFixture(fixtures.TempDir())
+        gnupg_version_re = re.compile(r'^gpg\s.*\s([\d+])\.([\d+])\.([\d+])')
+        gnupg_version = util.run_cmd(['gpg', '--version'], tempdir.path)
+        for line in gnupg_version[0].split('\n'):
+            gnupg_version = gnupg_version_re.match(line)
+            if gnupg_version:
+                gnupg_version = (
+                    int(gnupg_version.group(1)),
+                    int(gnupg_version.group(2)),
+                    int(gnupg_version.group(3)),
+                )
+                break
+        else:
+            if gnupg_version is None:
+                gnupg_version = (0, 0, 0)
+
+        config_file = os.path.join(tempdir.path, 'key-config')
+        with open(config_file, 'wt') as f:
+            if gnupg_version[0] == 2 and gnupg_version[1] >= 1:
+                f.write(
+                    """
+                %no-protection
+                %transient-key
+                """
+                )
+            f.write(
+                """
+            %no-ask-passphrase
+            Key-Type: RSA
+            Name-Real: Example Key
+            Name-Comment: N/A
+            Name-Email: example@example.com
+            Expire-Date: 2d
+            %commit
+            """
+            )
+
+        # Note that --quick-random (--debug-quick-random in GnuPG 2.x)
+        # does not have a corresponding preferences file setting and
+        # must be passed explicitly on the command line instead
+        if gnupg_version[0] == 1:
+            gnupg_random = '--quick-random'
+        elif gnupg_version[0] >= 2:
+            gnupg_random = '--debug-quick-random'
+        else:
+            gnupg_random = ''
+
+        _, _, retcode = util.run_cmd(
+            ['gpg', '--gen-key', '--batch', gnupg_random, config_file],
+            tempdir.path,
+        )
+        assert retcode == 0, 'gpg key generation failed!'
+
+
+class Venv(fixtures.Fixture):
+    """Create a virtual environment for testing with.
+
+    :attr path: The path to the environment root.
+    :attr python: The path to the python binary in the environment.
+    """
+
+    def __init__(self, reason, modules=(), pip_cmd=None):
+        """Create a Venv fixture.
+
+        :param reason: A human readable string to bake into the venv
+            file path to aid diagnostics in the case of failures.
+        :param modules: A list of modules to install, defaults to latest
+            pip, wheel, and the working copy of PBR.
+        :attr pip_cmd: A list to override the default pip_cmd passed to
+            python for installing base packages.
+        """
+        self._reason = reason
+        if modules == ():
+            modules = ['pip', 'wheel', 'build', 'setuptools', PBR_ROOT]
+        self.modules = modules
+        if pip_cmd is None:
+            self.pip_cmd = ['-m', 'pip', '-v', 'install']
+        else:
+            self.pip_cmd = pip_cmd
+
+    def _setUp(self):
+        path = self.useFixture(fixtures.TempDir()).path
+        virtualenv.cli_run([path])
+
+        python = os.path.join(path, 'bin', 'python')
+        command = [python] + self.pip_cmd + ['-U']
+        if self.modules and len(self.modules) > 0:
+            command.extend(self.modules)
+            self.useFixture(
+                CapturedSubprocess('mkvenv-' + self._reason, command)
+            )
+        self.addCleanup(delattr, self, 'path')
+        self.addCleanup(delattr, self, 'python')
+        self.path = path
+        self.python = python
+        return path, python
+
+
+class Packages(fixtures.Fixture):
+    """Creates packages from dict with defaults
+
+    :param package_dirs: A dict of package name to directory strings
+    {'pkg_a': '/tmp/path/to/tmp/pkg_a', 'pkg_b': '/tmp/path/to/tmp/pkg_b'}
+    """
+
+    defaults = {
+        'setup.py': textwrap.dedent(
+            u"""\
+            #!/usr/bin/env python
+            import setuptools
+            setuptools.setup(
+                setup_requires=['pbr'],
+                pbr=True,
+            )
+        """
+        ),
+        'setup.cfg': textwrap.dedent(
+            u"""\
+            [metadata]
+            name = {pkg_name}
+        """
+        ),
+    }
+
+    def __init__(self, packages):
+        """Creates packages from dict with defaults
+
+        :param packages: a dict where the keys are the package name and a
+        value that is a second dict that may be empty, containing keys of
+        filenames and a string value of the contents.
+        {'package-a': {'requirements.txt': 'string', 'setup.cfg': 'string'}
+        """
+        self.packages = packages
+
+    def _writeFile(self, directory, file_name, contents):
+        path = os.path.abspath(os.path.join(directory, file_name))
+        path_dir = os.path.dirname(path)
+        if not os.path.exists(path_dir):
+            if path_dir.startswith(directory):
+                os.makedirs(path_dir)
+            else:
+                raise ValueError
+        with open(path, 'wt') as f:
+            f.write(contents)
+
+    def _setUp(self):
+        tmpdir = self.useFixture(fixtures.TempDir()).path
+        package_dirs = {}
+        for pkg_name in self.packages:
+            pkg_path = os.path.join(tmpdir, pkg_name)
+            package_dirs[pkg_name] = pkg_path
+            os.mkdir(pkg_path)
+            for cf in ['setup.py', 'setup.cfg']:
+                if cf in self.packages[pkg_name]:
+                    contents = self.packages[pkg_name].pop(cf)
+                else:
+                    contents = self.defaults[cf].format(pkg_name=pkg_name)
+                self._writeFile(pkg_path, cf, contents)
+
+            for cf in self.packages[pkg_name]:
+                self._writeFile(pkg_path, cf, self.packages[pkg_name][cf])
+            self.useFixture(GitRepo(pkg_path)).commit()
+        self.addCleanup(delattr, self, 'package_dirs')
+        self.package_dirs = package_dirs
+        return package_dirs
diff -pruN 6.1.1-2/pbr/tests/functional/base.py 7.0.1-2/pbr/tests/functional/base.py
--- 6.1.1-2/pbr/tests/functional/base.py	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/pbr/tests/functional/base.py	2025-08-14 16:07:35.000000000 +0000
@@ -0,0 +1,107 @@
+# Copyright 2010-2011 OpenStack Foundation
+# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+# Copyright (C) 2013 Association of Universities for Research in Astronomy
+#                    (AURA)
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+#     1. Redistributions of source code must retain the above copyright
+#        notice, this list of conditions and the following disclaimer.
+#
+#     2. Redistributions in binary form must reproduce the above
+#        copyright notice, this list of conditions and the following
+#        disclaimer in the documentation and/or other materials provided
+#        with the distribution.
+#
+#     3. The name of AURA and its representatives may not be used to
+#        endorse or promote products derived from this software without
+#        specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED
+# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+
+"""Common utilities used in functional testing"""
+
+import os
+import sys
+
+from wheel import wheelfile
+
+from pbr.tests import base
+from pbr.tests import fixtures as pbr_fixtures
+from pbr.tests import util
+
+
+class BaseTestCase(base.BaseTestCase):
+
+    def _run_cmd(self, cmd, args=[], allow_fail=True, cwd=None):
+        """Run a command in the root of the test working copy.
+
+        Runs a command, with the given argument list, in the root of the test
+        working copy--returns the stdout and stderr streams and the exit code
+        from the subprocess.
+
+        :param cwd: If falsy run within the test package dir, otherwise run
+            within the named path.
+        """
+        cwd = cwd or self.package_dir
+        result = util.run_cmd([cmd] + list(args), cwd=cwd)
+        if result[2] and not allow_fail:
+            raise Exception("Command failed retcode=%s" % result[2])
+        return result
+
+    def get_setuptools_version(self):
+        # we rely on this to determine whether to skip tests, so we can't allow
+        # this to fail silently
+        stdout, _, _ = self._run_cmd(
+            sys.executable,
+            ('-c', 'import setuptools; print(setuptools.__version__)'),
+            allow_fail=False,
+        )
+        return tuple(int(x) for x in stdout.strip().split('.')[:3])
+
+    def run_pbr(self, *args, **kwargs):
+        return self._run_cmd('pbr', args, **kwargs)
+
+    def run_setup(self, *args, **kwargs):
+        return self._run_cmd(sys.executable, ('setup.py',) + args, **kwargs)
+
+
+class BaseWheelTestCase(BaseTestCase):
+    """Base test case for tests that build wheels."""
+
+    def setUp(self):
+        super(BaseWheelTestCase, self).setUp()
+        self.useFixture(pbr_fixtures.GitRepo(self.package_dir))
+        # Build the wheel
+        self.run_setup('bdist_wheel', allow_fail=False)
+        # Slowly construct the path to the generated whl
+        dist_dir = os.path.join(self.package_dir, 'dist')
+        relative_wheel_filename = os.listdir(dist_dir)[0]
+        absolute_wheel_filename = os.path.join(
+            dist_dir, relative_wheel_filename
+        )
+        wheel_file = wheelfile.WheelFile(absolute_wheel_filename)
+        wheel_name = wheel_file.parsed_filename.group('namever')
+        # Create a directory path to unpack the wheel to
+        self.extracted_wheel_dir = os.path.join(dist_dir, wheel_name)
+        # Extract the wheel contents to the directory we just created
+        wheel_file.extractall(self.extracted_wheel_dir)
+        wheel_file.close()
diff -pruN 6.1.1-2/pbr/tests/functional/test_c_extension.py 7.0.1-2/pbr/tests/functional/test_c_extension.py
--- 6.1.1-2/pbr/tests/functional/test_c_extension.py	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/pbr/tests/functional/test_c_extension.py	2025-08-14 16:07:35.000000000 +0000
@@ -0,0 +1,100 @@
+# Copyright (c) 2013 New Dream Network, LLC (DreamHost)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Copyright (C) 2013 Association of Universities for Research in Astronomy
+#                    (AURA)
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+#     1. Redistributions of source code must retain the above copyright
+#        notice, this list of conditions and the following disclaimer.
+#
+#     2. Redistributions in binary form must reproduce the above
+#        copyright notice, this list of conditions and the following
+#        disclaimer in the documentation and/or other materials provided
+#        with the distribution.
+#
+#     3. The name of AURA and its representatives may not be used to
+#        endorse or promote products derived from this software without
+#        specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED
+# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+
+import os
+import sysconfig
+
+from pbr.tests.functional import base
+
+try:
+    import importlib.machinery
+
+    get_suffixes = importlib.machinery.all_suffixes
+# NOTE(JayF): ModuleNotFoundError only exists in Python 3.6+, not in 2.7
+except ImportError:
+    import imp
+
+    # NOTE(JayF) imp.get_suffixes returns a list of three-tuples;
+    # we need the first value from each tuple.
+
+    def get_suffixes():
+        return [x[0] for x in imp.get_suffixes]
+
+
+def get_soabi():
+    soabi = None
+    try:
+        soabi = sysconfig.get_config_var('SOABI')
+        arch = sysconfig.get_config_var('MULTIARCH')
+    except IOError:
+        pass
+    if soabi and arch and 'pypy' in sysconfig.get_scheme_names():
+        soabi = '%s-%s' % (soabi, arch)
+    if soabi is None and 'pypy' in sysconfig.get_scheme_names():
+        # NOTE(sigmavirus24): PyPy only added support for the SOABI config var
+        # to sysconfig in 2015. That was well after 2.2.1 was published in the
+        # Ubuntu 14.04 archive.
+        for suffix in get_suffixes():
+            if suffix.startswith('.pypy') and suffix.endswith('.so'):
+                soabi = suffix.split('.')[1]
+                break
+    return soabi
+
+
+class TestCExtension(base.BaseWheelTestCase):
+
+    def test_generates_c_extensions(self):
+        built_package_dir = os.path.join(
+            self.extracted_wheel_dir, 'pbr_testpackage'
+        )
+        static_object_filename = 'testext.so'
+        ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
+        if ext_suffix is not None:
+            static_object_filename = 'testext' + ext_suffix
+        else:
+            soabi = get_soabi()
+            if soabi:
+                static_object_filename = 'testext.{0}.so'.format(soabi)
+        static_object_path = os.path.join(
+            built_package_dir, static_object_filename
+        )
+
+        self.assertTrue(os.path.exists(built_package_dir))
+        self.assertTrue(os.path.exists(static_object_path))
diff -pruN 6.1.1-2/pbr/tests/functional/test_changelog_author.py 7.0.1-2/pbr/tests/functional/test_changelog_author.py
--- 6.1.1-2/pbr/tests/functional/test_changelog_author.py	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/pbr/tests/functional/test_changelog_author.py	2025-08-14 16:07:35.000000000 +0000
@@ -0,0 +1,159 @@
+# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Copyright (C) 2013 Association of Universities for Research in Astronomy
+#                    (AURA)
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+#     1. Redistributions of source code must retain the above copyright
+#        notice, this list of conditions and the following disclaimer.
+#
+#     2. Redistributions in binary form must reproduce the above
+#        copyright notice, this list of conditions and the following
+#        disclaimer in the documentation and/or other materials provided
+#        with the distribution.
+#
+#     3. The name of AURA and its representatives may not be used to
+#        endorse or promote products derived from this software without
+#        specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED
+# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+
+import os
+
+from testtools import matchers
+
+from pbr.tests import fixtures as pbr_fixtures
+from pbr.tests.functional import base
+
+
+class TestPackagingInPlainDirectory(base.BaseTestCase):
+
+    def test_authors(self):
+        self.run_setup('sdist', allow_fail=False)
+        # Not a git repo, no AUTHORS file created
+        filename = os.path.join(self.package_dir, 'AUTHORS')
+        self.assertFalse(os.path.exists(filename))
+
+    def test_changelog(self):
+        self.run_setup('sdist', allow_fail=False)
+        # Not a git repo, no ChangeLog created
+        filename = os.path.join(self.package_dir, 'ChangeLog')
+        self.assertFalse(os.path.exists(filename))
+
+    def test_install_no_ChangeLog(self):
+        stdout, _, _ = self.run_setup(
+            'install', '--root', self.temp_dir + 'installed', allow_fail=False
+        )
+        self.expectThat(
+            stdout, matchers.Not(matchers.Contains('Generating ChangeLog'))
+        )
+
+
+class TestPackagingInGitRepoWithoutCommit(base.BaseTestCase):
+
+    def setUp(self):
+        super(TestPackagingInGitRepoWithoutCommit, self).setUp()
+        self.useFixture(pbr_fixtures.GitRepo(self.package_dir))
+        self.run_setup('sdist', allow_fail=False)
+
+    def test_authors(self):
+        # No commits, no authors in list
+        with open(os.path.join(self.package_dir, 'AUTHORS'), 'r') as f:
+            body = f.read()
+        self.assertEqual('\n', body)
+
+    def test_changelog(self):
+        # No commits, nothing should be in the ChangeLog list
+        with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f:
+            body = f.read()
+        self.assertEqual('CHANGES\n=======\n\n', body)
+
+
+class TestPackagingInGitRepoWithCommit(base.BaseTestCase):
+
+    scenarios = [
+        ('preversioned', {'preversioned': True}),
+        ('postversioned', {'preversioned': False}),
+    ]
+
+    def setUp(self):
+        super(TestPackagingInGitRepoWithCommit, self).setUp()
+        self.repo = self.useFixture(pbr_fixtures.GitRepo(self.package_dir))
+        self.repo.commit()
+
+    def test_authors(self):
+        self.run_setup('sdist', allow_fail=False)
+        # One commit, something should be in the authors list
+        with open(os.path.join(self.package_dir, 'AUTHORS'), 'r') as f:
+            body = f.read()
+        self.assertNotEqual(body, '')
+
+    def test_changelog(self):
+        self.run_setup('sdist', allow_fail=False)
+        with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f:
+            body = f.read()
+        # One commit, something should be in the ChangeLog list
+        self.assertNotEqual(body, '')
+
+    def test_changelog_handles_astrisk(self):
+        self.repo.commit(message_content="Allow *.openstack.org to work")
+        self.run_setup('sdist', allow_fail=False)
+        with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f:
+            body = f.read()
+        self.assertIn(r'\*', body)
+
+    def test_changelog_handles_dead_links_in_commit(self):
+        self.repo.commit(message_content="See os_ for to_do about qemu_.")
+        self.run_setup('sdist', allow_fail=False)
+        with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f:
+            body = f.read()
+        self.assertIn(r'os\_', body)
+        self.assertIn(r'to\_do', body)
+        self.assertIn(r'qemu\_', body)
+
+    def test_changelog_handles_backticks(self):
+        self.repo.commit(message_content="Allow `openstack.org` to `work")
+        self.run_setup('sdist', allow_fail=False)
+        with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f:
+            body = f.read()
+        self.assertIn(r'\`', body)
+
+    def test_manifest_exclude_honoured(self):
+        self.run_setup('sdist', allow_fail=False)
+        with open(
+            os.path.join(
+                self.package_dir, 'pbr_testpackage.egg-info/SOURCES.txt'
+            ),
+            'r',
+        ) as f:
+            body = f.read()
+        self.assertThat(
+            body, matchers.Not(matchers.Contains('pbr_testpackage/extra.py'))
+        )
+        self.assertThat(body, matchers.Contains('pbr_testpackage/__init__.py'))
+
+    def test_install_writes_changelog(self):
+        stdout, _, _ = self.run_setup(
+            'install', '--root', self.temp_dir + 'installed', allow_fail=False
+        )
+        self.expectThat(stdout, matchers.Contains('Generating ChangeLog'))
diff -pruN 6.1.1-2/pbr/tests/functional/test_commands.py 7.0.1-2/pbr/tests/functional/test_commands.py
--- 6.1.1-2/pbr/tests/functional/test_commands.py	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/pbr/tests/functional/test_commands.py	2025-08-14 16:07:35.000000000 +0000
@@ -0,0 +1,97 @@
+# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Copyright (C) 2013 Association of Universities for Research in Astronomy
+#                    (AURA)
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+#     1. Redistributions of source code must retain the above copyright
+#        notice, this list of conditions and the following disclaimer.
+#
+#     2. Redistributions in binary form must reproduce the above
+#        copyright notice, this list of conditions and the following
+#        disclaimer in the documentation and/or other materials provided
+#        with the distribution.
+#
+#     3. The name of AURA and its representatives may not be used to
+#        endorse or promote products derived from this software without
+#        specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED
+# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+from testtools import content
+
+from pbr.tests.functional import base
+
+
+class TestCommands(base.BaseTestCase):
+    def test_setup_py_keywords(self):
+        """setup.py --keywords.
+
+        Test that the `./setup.py --keywords` command returns the correct
+        value without balking.
+        """
+        self.run_setup('egg_info')
+        stdout, _, _ = self.run_setup('--keywords')
+        assert stdout == 'packaging, distutils, setuptools'
+
+    def test_custom_build_py_command(self):
+        """Test custom build_py command.
+
+        Test that a custom subclass of the build_py command runs when listed in
+        the commands [global] option, rather than the normal build command.
+        """
+
+        stdout, stderr, return_code = self.run_setup('build_py')
+        self.addDetail('stdout', content.text_content(stdout))
+        self.addDetail('stderr', content.text_content(stderr))
+        self.assertIn('Running custom build_py command.', stdout)
+        self.assertEqual(0, return_code)
+
+    def test_custom_deb_version_py_command(self):
+        """Test custom deb_version command."""
+        stdout, stderr, return_code = self.run_setup('deb_version')
+        self.addDetail('stdout', content.text_content(stdout))
+        self.addDetail('stderr', content.text_content(stderr))
+        self.assertIn('Extracting deb version', stdout)
+        self.assertEqual(0, return_code)
+
+    def test_custom_rpm_version_py_command(self):
+        """Test custom rpm_version command."""
+        stdout, stderr, return_code = self.run_setup('rpm_version')
+        self.addDetail('stdout', content.text_content(stdout))
+        self.addDetail('stderr', content.text_content(stderr))
+        self.assertIn('Extracting rpm version', stdout)
+        self.assertEqual(0, return_code)
+
+    def test_freeze_command(self):
+        """Test that freeze output is sorted in a case-insensitive manner."""
+        stdout, stderr, return_code = self.run_pbr('freeze')
+        self.assertEqual(0, return_code)
+        pkgs = []
+        for line in stdout.split('\n'):
+            pkgs.append(line.split('==')[0].lower())
+        pkgs_sort = sorted(pkgs[:])
+        self.assertEqual(pkgs_sort, pkgs)
diff -pruN 6.1.1-2/pbr/tests/functional/test_console_scripts.py 7.0.1-2/pbr/tests/functional/test_console_scripts.py
--- 6.1.1-2/pbr/tests/functional/test_console_scripts.py	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/pbr/tests/functional/test_console_scripts.py	2025-08-14 16:07:35.000000000 +0000
@@ -0,0 +1,121 @@
+# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Copyright (C) 2013 Association of Universities for Research in Astronomy
+#                    (AURA)
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+#     1. Redistributions of source code must retain the above copyright
+#        notice, this list of conditions and the following disclaimer.
+#
+#     2. Redistributions in binary form must reproduce the above
+#        copyright notice, this list of conditions and the following
+#        disclaimer in the documentation and/or other materials provided
+#        with the distribution.
+#
+#     3. The name of AURA and its representatives may not be used to
+#        endorse or promote products derived from this software without
+#        specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED
+# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+import os
+import sys
+
+import fixtures
+
+from pbr.tests.functional import base
+
+
+class TestConsoleScripts(base.BaseTestCase):
+    """Test generation of custom console scripts.
+
+    We generate custom console scripts that do not rely on pkg_resources to
+    handle imports. This is no longer a concern starting with pip 19.0, since
+    pip handles generation of scripts for wheel and starting in 19.0, pip nows
+    generate an intermediate wheel during installation.
+    """
+
+    cmd_names = ('pbr_test_cmd', 'pbr_test_cmd_with_class')
+
+    def check_script_install(self, install_stdout):
+        for cmd_name in self.cmd_names:
+            install_txt = 'Installing %s script to %s' % (
+                cmd_name,
+                self.temp_dir,
+            )
+            self.assertIn(install_txt, install_stdout)
+
+            cmd_filename = os.path.join(self.temp_dir, cmd_name)
+
+            script_txt = open(cmd_filename, 'r').read()
+            self.assertNotIn('pkg_resources', script_txt)
+
+            stdout, _, return_code = self._run_cmd(cmd_filename)
+            self.assertIn("PBR", stdout)
+
+    def test_console_script_install(self):
+        """Test that we install a non-pkg-resources console script."""
+
+        if os.name == 'nt':
+            self.skipTest('Windows support is passthrough')
+
+        stdout, _, return_code = self.run_setup(
+            'install_scripts', '--install-dir=%s' % self.temp_dir
+        )
+
+        self.useFixture(fixtures.EnvironmentVariable('PYTHONPATH', '.'))
+
+        self.check_script_install(stdout)
+
+    def test_console_script_develop(self):
+        """Test that we develop a non-pkg-resources console script."""
+
+        if sys.version_info < (3, 0):
+            self.skipTest(
+                'Fails with recent virtualenv due to '
+                'https://github.com/pypa/virtualenv/issues/1638'
+            )
+
+        if os.name == 'nt':
+            self.skipTest('Windows support is passthrough')
+
+        # setuptools v80.0.0 switched to using pip for the 'develop' command,
+        # which means easy_install is no longer invoked
+        #
+        # https://github.com/pypa/setuptools/commit/98e6b4cac625c6c13b718eeccea42d00d75f2577
+        # https://setuptools.pypa.io/en/stable/history.html#v80-0-0
+        if self.get_setuptools_version() >= (80, 0):
+            self.skipTest('setuptools is too new')
+
+        self.useFixture(
+            fixtures.EnvironmentVariable('PYTHONPATH', ".:%s" % self.temp_dir)
+        )
+
+        stdout, _, return_code = self.run_setup(
+            'develop', '--install-dir=%s' % self.temp_dir
+        )
+
+        self.check_script_install(stdout)
diff -pruN 6.1.1-2/pbr/tests/functional/test_extra_files.py 7.0.1-2/pbr/tests/functional/test_extra_files.py
--- 6.1.1-2/pbr/tests/functional/test_extra_files.py	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/pbr/tests/functional/test_extra_files.py	2025-08-14 16:07:35.000000000 +0000
@@ -0,0 +1,102 @@
+# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Copyright (C) 2013 Association of Universities for Research in Astronomy
+#                    (AURA)
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+#     1. Redistributions of source code must retain the above copyright
+#        notice, this list of conditions and the following disclaimer.
+#
+#     2. Redistributions in binary form must reproduce the above
+#        copyright notice, this list of conditions and the following
+#        disclaimer in the documentation and/or other materials provided
+#        with the distribution.
+#
+#     3. The name of AURA and its representatives may not be used to
+#        endorse or promote products derived from this software without
+#        specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED
+# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+
+import glob
+import os
+import tarfile
+
+from testtools import matchers
+
+from pbr.tests.functional import base
+
+
+class TestExtraFiles(base.BaseTestCase):
+
+    def test_sdist_extra_files(self):
+        """Test that the extra files are correctly added."""
+
+        stdout, _, return_code = self.run_setup('sdist', '--formats=gztar')
+
+        # There can be only one
+        try:
+            tf_path = glob.glob(os.path.join('dist', '*.tar.gz'))[0]
+        except IndexError:
+            assert False, 'source dist not found'
+
+        tf = tarfile.open(tf_path)
+        names = ['/'.join(p.split('/')[1:]) for p in tf.getnames()]
+
+        self.assertIn('extra-file.txt', names)
+
+
+class TestDataFiles(base.BaseTestCase):
+
+    def test_install_glob(self):
+        stdout, _, _ = self.run_setup(
+            'install', '--root', self.temp_dir + 'installed', allow_fail=False
+        )
+        self.expectThat(stdout, matchers.Contains('copying data_files/a.txt'))
+        self.expectThat(stdout, matchers.Contains('copying data_files/b.txt'))
+
+
+class TestExtraFilesWithGit(base.BaseTestCase):
+
+    def setUp(self):
+        super(TestExtraFilesWithGit, self).setUp()
+
+        stdout, _, return_code = self._run_cmd('git', ('init',))
+        if return_code:
+            self.skipTest("git not installed")
+
+        stdout, _, return_code = self._run_cmd('git', ('add', '.'))
+        stdout, _, return_code = self._run_cmd(
+            'git', ('commit', '-m', 'Turn this into a git repo')
+        )
+
+    def test_sdist_git_extra_files(self):
+        """Test that extra files found in git are correctly added."""
+        stdout, _, return_code = self.run_setup('sdist', '--formats=gztar')
+
+        # There can be only one
+        tf_path = glob.glob(os.path.join('dist', '*.tar.gz'))[0]
+        tf = tarfile.open(tf_path)
+        names = ['/'.join(p.split('/')[1:]) for p in tf.getnames()]
+
+        self.assertIn('git-extra-file.txt', names)
diff -pruN 6.1.1-2/pbr/tests/functional/test_hooks.py 7.0.1-2/pbr/tests/functional/test_hooks.py
--- 6.1.1-2/pbr/tests/functional/test_hooks.py	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/pbr/tests/functional/test_hooks.py	2025-08-14 16:07:35.000000000 +0000
@@ -0,0 +1,72 @@
+# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Copyright (C) 2013 Association of Universities for Research in Astronomy
+#                    (AURA)
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+#     1. Redistributions of source code must retain the above copyright
+#        notice, this list of conditions and the following disclaimer.
+#
+#     2. Redistributions in binary form must reproduce the above
+#        copyright notice, this list of conditions and the following
+#        disclaimer in the documentation and/or other materials provided
+#        with the distribution.
+#
+#     3. The name of AURA and its representatives may not be used to
+#        endorse or promote products derived from this software without
+#        specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED
+# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+import os
+
+from pbr.tests.functional import base
+from pbr.tests import util
+
+
+class TestHooks(base.BaseTestCase):
+    def setUp(self):
+        super(TestHooks, self).setUp()
+        with util.open_config(
+            os.path.join(self.package_dir, 'setup.cfg')
+        ) as cfg:
+            cfg.set(
+                'global',
+                'setup-hooks',
+                'pbr_testpackage._setup_hooks.test_hook_1\n'
+                'pbr_testpackage._setup_hooks.test_hook_2',
+            )
+
+    def test_global_setup_hooks(self):
+        """Test setup_hooks.
+
+        Test that setup_hooks listed in the [global] section of setup.cfg are
+        executed in order.
+        """
+
+        stdout, _, return_code = self.run_setup('egg_info')
+        assert 'test_hook_1\ntest_hook_2' in stdout
+        assert return_code == 0
diff -pruN 6.1.1-2/pbr/tests/functional/test_integration.py 7.0.1-2/pbr/tests/functional/test_integration.py
--- 6.1.1-2/pbr/tests/functional/test_integration.py	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/pbr/tests/functional/test_integration.py	2025-08-14 16:07:35.000000000 +0000
@@ -0,0 +1,419 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+import os.path
+import pkg_resources
+import shlex
+import sys
+
+import fixtures
+import testtools
+import textwrap
+
+from pbr._compat.five import ConfigParser
+from pbr.tests import fixtures as pbr_fixtures
+from pbr.tests.functional import base
+from pbr.tests import util
+
+PIPFLAGS = shlex.split(os.environ.get('PIPFLAGS', ''))
+PIPVERSION = os.environ.get('PIPVERSION', 'pip')
+PBRVERSION = os.environ.get('PBRVERSION', 'pbr')
+REPODIR = os.environ.get('REPODIR', '')
+WHEELHOUSE = os.environ.get('WHEELHOUSE', '')
+PIP_CMD = ['-m', 'pip'] + PIPFLAGS + ['install', '-f', WHEELHOUSE]
+PROJECTS = shlex.split(os.environ.get('PROJECTS', ''))
+
+
+def all_projects():
+    if os.environ.get('PBR_INTEGRATION', None) != '1':
+        return
+
+    if not REPODIR:
+        return
+
+    yielded = 0
+
+    # Future: make this path parameterisable.
+    excludes = set(['tempest', 'requirements'])
+    for name in PROJECTS:
+        name = name.strip()
+        short_name = name.split('/')[-1]
+        try:
+            with open(
+                os.path.join(REPODIR, short_name, 'setup.py'), 'rt'
+            ) as f:
+                if 'pbr' not in f.read():
+                    continue
+        except IOError:
+            continue
+        if short_name in excludes:
+            continue
+        yielded += 1
+        yield (short_name, {'name': name, 'short_name': short_name})
+
+    if not yielded:
+        raise Exception(
+            'no projects found: is PROJECTS set, and do the paths exist'
+        )
+
+
+class TestIntegration(base.BaseTestCase):
+
+    scenarios = list(all_projects())
+
+    def setUp(self):
+        # Integration tests need a higher default - big repos can be slow to
+        # clone, particularly under guest load.
+        env = fixtures.EnvironmentVariable(
+            'OS_TEST_TIMEOUT', os.environ.get('OS_TEST_TIMEOUT', '600')
+        )
+        with env:
+            super(TestIntegration, self).setUp()
+        util.config_git()
+
+    @testtools.skipUnless(
+        os.environ.get('PBR_INTEGRATION', None) == '1',
+        'integration tests not enabled',
+    )
+    def test_integration(self):
+        # Test that we can:
+        # - run sdist from the repo in a venv
+        # - install the resulting tarball in a new venv
+        # - pip install the repo
+        # - pip install -e the repo
+        # We don't break these into separate tests because we'd need separate
+        # source dirs to isolate from side effects of running pip, and the
+        # overheads of setup would start to beat the benefits of parallelism.
+        path = os.path.join(REPODIR, self.short_name)
+        setup_cfg = os.path.join(path, 'setup.cfg')
+        project_name = pkg_resources.safe_name(self.short_name).lower()
+        # These projects should all have setup.cfg files but we'll be careful
+        if os.path.exists(setup_cfg):
+            config = ConfigParser()
+            config.read(setup_cfg)
+            if config.has_section('metadata'):
+                raw_name = config.get(
+                    'metadata', 'name', fallback='notapackagename'
+                )
+                # Technically we should really only need to use the raw
+                # name because all our projects should be good and use
+                # normalized names but they don't...
+                project_name = pkg_resources.safe_name(raw_name).lower()
+        constraints = os.path.join(
+            REPODIR, 'requirements', 'upper-constraints.txt'
+        )
+        tmp_constraints = os.path.join(
+            self.useFixture(fixtures.TempDir()).path, 'upper-constraints.txt'
+        )
+        # We need to filter out the package we are installing to avoid
+        # conflicts with the constraints.
+        with open(constraints, 'r') as src:
+            with open(tmp_constraints, 'w') as dest:
+                for line in src:
+                    constraint = line.split('===')[0]
+                    constraint = pkg_resources.safe_name(constraint).lower()
+                    if project_name != constraint:
+                        dest.write(line)
+        pip_cmd = PIP_CMD + ['-c', tmp_constraints]
+
+        venv = self.useFixture(
+            pbr_fixtures.Venv(
+                'sdist',
+                modules=['pip', 'wheel', 'setuptools<80', PBRVERSION],
+                pip_cmd=PIP_CMD,
+            )
+        )
+        python = venv.python
+        self.useFixture(
+            pbr_fixtures.CapturedSubprocess(
+                'sdist', [python, 'setup.py', 'sdist'], cwd=path
+            )
+        )
+
+        venv = self.useFixture(
+            pbr_fixtures.Venv(
+                'tarball',
+                modules=['pip', 'wheel', 'setuptools<80', PBRVERSION],
+                pip_cmd=PIP_CMD,
+            )
+        )
+        python = venv.python
+        filename = os.path.join(
+            path, 'dist', os.listdir(os.path.join(path, 'dist'))[0]
+        )
+        self.useFixture(
+            pbr_fixtures.CapturedSubprocess(
+                'tarball', [python] + pip_cmd + [filename]
+            )
+        )
+
+        venv = self.useFixture(
+            pbr_fixtures.Venv(
+                'install-git',
+                modules=['pip', 'wheel', 'setuptools<80', PBRVERSION],
+                pip_cmd=PIP_CMD,
+            )
+        )
+        root = venv.path
+        python = venv.python
+        self.useFixture(
+            pbr_fixtures.CapturedSubprocess(
+                'install-git', [python] + pip_cmd + ['git+file://' + path]
+            )
+        )
+        if self.short_name == 'nova':
+            found = False
+            for _, _, filenames in os.walk(root):
+                if 'alembic.ini' in filenames:
+                    found = True
+            self.assertTrue(found)
+
+        venv = self.useFixture(
+            pbr_fixtures.Venv(
+                'install-editable',
+                modules=['pip', 'wheel', 'setuptools<80', PBRVERSION],
+                pip_cmd=PIP_CMD,
+            )
+        )
+        root = venv.path
+        python = venv.python
+        self.useFixture(
+            pbr_fixtures.CapturedSubprocess(
+                'install-editable', [python] + pip_cmd + ['-e', path]
+            )
+        )
+
+
+class TestInstallWithoutPbr(base.BaseTestCase):
+
+    # TODO(clarkb) This test should be reimagined with modern packaging tools
+    # and expectations.
+    @testtools.skipUnless(
+        os.environ.get('PBR_INTEGRATION', None) == '1',
+        'integration tests not enabled',
+    )
+    def test_install_without_pbr(self):
+        # Test easy-install of a thing that depends on a thing using pbr
+        tempdir = self.useFixture(fixtures.TempDir()).path
+        # A directory containing sdists of the things we're going to depend on
+        # in using-package.
+        dist_dir = os.path.join(tempdir, 'distdir')
+        os.mkdir(dist_dir)
+        self._run_cmd(
+            sys.executable,
+            ('setup.py', 'sdist', '-d', dist_dir),
+            allow_fail=False,
+            cwd=pbr_fixtures.PBR_ROOT,
+        )
+        # testpkg - this requires a pbr-using package
+        test_pkg_dir = os.path.join(tempdir, 'testpkg')
+        os.mkdir(test_pkg_dir)
+        pkgs = {
+            'pkgTest': {
+                'setup.py': textwrap.dedent(
+                    """\
+                    #!/usr/bin/env python
+                    import setuptools
+                    setuptools.setup(
+                        name = 'pkgTest',
+                        # TODO(clarkb) should we use a random prefix to
+                        # avoid collisions?
+                        install_requires = ['pkgReq'],
+                    )
+                """
+                ),
+                'setup.cfg': textwrap.dedent(
+                    """\
+                    [easy_install]
+                    find_links = %s
+                """
+                    % dist_dir
+                ),
+            },
+            # We don't need to use PBRVERSION here because we precreate the
+            # pbr sdist and point to it with find_links.
+            'pkgReq': {
+                'requirements.txt': textwrap.dedent(
+                    """\
+                    pbr
+                """
+                ),
+                'pkgReq/__init__.py': "",
+                'pkgReq/__main__.py': textwrap.dedent(
+                    """\
+                    print("FakeTest loaded and ran")
+                """
+                ),
+            },
+        }
+        pkg_dirs = self.useFixture(pbr_fixtures.Packages(pkgs)).package_dirs
+        test_pkg_dir = pkg_dirs['pkgTest']
+        req_pkg_dir = pkg_dirs['pkgReq']
+
+        self._run_cmd(
+            sys.executable,
+            ('setup.py', 'sdist', '-d', dist_dir),
+            allow_fail=False,
+            cwd=req_pkg_dir,
+        )
+        # A venv to test within
+        # We install setuptools because we rely on setup.py below.
+        # FIXME(stephenfin): We should not need to pin setuptools
+        # https://github.com/pypa/setuptools/commit/ef4cd2960d75f2d49f40f5495347523be62d20e5
+        venv = self.useFixture(
+            pbr_fixtures.Venv('nopbr', ['pip', 'wheel', 'setuptools<80'])
+        )
+        python = venv.python
+        # Install both packages
+        self.useFixture(
+            pbr_fixtures.CapturedSubprocess(
+                'nopbr', [python] + ['setup.py', 'install'], cwd=test_pkg_dir
+            )
+        )
+        # Execute code that should only be present if the install worked.
+        self.useFixture(
+            pbr_fixtures.CapturedSubprocess(
+                'nopbr', [python] + ['-m', 'pkgReq'], cwd=test_pkg_dir
+            )
+        )
+        pbr_cmd = os.path.join(venv.path, 'bin', 'pbr')
+        self.useFixture(
+            pbr_fixtures.CapturedSubprocess(
+                'nopbr', [pbr_cmd] + ['freeze'], cwd=test_pkg_dir
+            )
+        )
+
+
+# Handle various compatability issues with pip and setuptools versions against
+# python3 versions. Unfortunately python3.12 in particular isn't very backward
+# compatible with pip and setuptools.
+# TODO(clarkb) add other distros like EL9 and EL10
+if sys.version_info[0:3] < (3, 10, 0):
+    lts_scenarios = [
+        ('Bionic', {'modules': ['pip==9.0.1', 'setuptools==39.0.1']}),
+        ('Stretch', {'modules': ['pip==9.0.1', 'setuptools==33.1.1']}),
+        ('EL8', {'modules': ['pip==9.0.3', 'setuptools==39.2.0']}),
+        ('Buster', {'modules': ['pip==18.1', 'setuptools==40.8.0']}),
+        ('Focal', {'modules': ['pip==20.0.2', 'setuptools==45.2.0']}),
+    ]
+elif sys.version_info[0:3] < (3, 12, 0):
+    lts_scenarios = [
+        ('Bullseye', {'modules': ['pip==20.3.4', 'setuptools==52.0.0']}),
+        ('Bookworm', {'modules': ['pip==23.0.1', 'setuptools==66.1.1']}),
+        ('Focal', {'modules': ['pip==20.0.2', 'setuptools==45.2.0']}),
+        ('Jammy', {'modules': ['pip==22.0.2', 'setuptools==59.6.0']}),
+    ]
+else:
+    lts_scenarios = [
+        ('Noble', {'modules': ['pip==24.0.0', 'setuptools==68.1.2']}),
+    ]
+
+
+class TestMarkersPip(base.BaseTestCase):
+
+    scenarios = [
+        ('pip-latest', {'modules': ['pip', 'setuptools']})
+    ] + lts_scenarios
+
+    @testtools.skipUnless(
+        os.environ.get('PBR_INTEGRATION', None) == '1',
+        'integration tests not enabled',
+    )
+    def test_pip_versions(self):
+        pkgs = {
+            'test_markers': {
+                'requirements.txt': textwrap.dedent(
+                    """\
+                    pkg_a; python_version=='1.2'
+                    pkg_b; python_version!='1.2'
+                """
+                )
+            },
+            'pkg_a': {},
+            'pkg_b': {},
+        }
+        pkg_dirs = self.useFixture(pbr_fixtures.Packages(pkgs)).package_dirs
+        temp_dir = self.useFixture(fixtures.TempDir()).path
+        repo_dir = os.path.join(temp_dir, 'repo')
+        venv = self.useFixture(pbr_fixtures.Venv('markers'))
+        bin_python = venv.python
+        os.mkdir(repo_dir)
+        for module in self.modules:
+            self.useFixture(
+                pbr_fixtures.CapturedSubprocess(
+                    'pip-version',
+                    [bin_python, '-m', 'pip', 'install', '--upgrade', module],
+                    cwd=venv.path,
+                )
+            )
+        # TODO(clarkb) do we need to install PBR from source here to avoid
+        # using the latest release?
+        for pkg in pkg_dirs:
+            self._run_cmd(
+                bin_python,
+                ['setup.py', 'sdist', '-d', repo_dir],
+                cwd=pkg_dirs[pkg],
+                allow_fail=False,
+            )
+        self._run_cmd(
+            bin_python,
+            [
+                '-m',
+                'pip',
+                'install',
+                '--no-index',
+                '-f',
+                repo_dir,
+                'test_markers',
+            ],
+            cwd=venv.path,
+            allow_fail=False,
+        )
+        pkgs = self._run_cmd(
+            bin_python,
+            ['-m', 'pip', 'freeze'],
+            cwd=venv.path,
+            allow_fail=False,
+        )[0]
+        # Depending on the version of pip/setuptools etc the name of the
+        # installed package may be noramlized to 'pkg-b'. As of March 2024
+        # 'pkg_b' is what we get and previously 'pkg-b' was the result.
+        self.assertTrue('pkg_b' in pkgs or 'pkg-b' in pkgs)
+
+
+class TestLTSSupport(base.BaseTestCase):
+
+    scenarios = lts_scenarios
+
+    @testtools.skipUnless(
+        os.environ.get('PBR_INTEGRATION', None) == '1',
+        'integration tests not enabled',
+    )
+    def test_lts_venv_default_versions(self):
+        venv = self.useFixture(
+            pbr_fixtures.Venv('setuptools', modules=self.modules)
+        )
+        bin_python = venv.python
+        pbr = 'file://%s#egg=pbr' % pbr_fixtures.PBR_ROOT
+        # Installing PBR is a reasonable indication that we are not broken on
+        # this particular combination of setuptools and pip.
+        self.useFixture(
+            pbr_fixtures.CapturedSubprocess(
+                'lts-support',
+                [bin_python, '-m', 'pip', 'install', pbr],
+                cwd=venv.path,
+            )
+        )
diff -pruN 6.1.1-2/pbr/tests/functional/test_pbr_json.py 7.0.1-2/pbr/tests/functional/test_pbr_json.py
--- 6.1.1-2/pbr/tests/functional/test_pbr_json.py	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/pbr/tests/functional/test_pbr_json.py	2025-08-14 16:07:35.000000000 +0000
@@ -0,0 +1,53 @@
+# Copyright (c) 2013 New Dream Network, LLC (DreamHost)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Copyright (C) 2013 Association of Universities for Research in Astronomy
+#                    (AURA)
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+#     1. Redistributions of source code must retain the above copyright
+#        notice, this list of conditions and the following disclaimer.
+#
+#     2. Redistributions in binary form must reproduce the above
+#        copyright notice, this list of conditions and the following
+#        disclaimer in the documentation and/or other materials provided
+#        with the distribution.
+#
+#     3. The name of AURA and its representatives may not be used to
+#        endorse or promote products derived from this software without
+#        specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED
+# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+
+import os
+
+from pbr.tests.functional import base
+
+
+class TestPbrJson(base.BaseWheelTestCase):
+
+    def test_metadata_directory_has_pbr_json(self):
+        # Build the path to the scripts directory
+        pbr_json = os.path.join(
+            self.extracted_wheel_dir, 'pbr_testpackage-0.0.dist-info/pbr.json'
+        )
+        self.assertTrue(os.path.exists(pbr_json))
diff -pruN 6.1.1-2/pbr/tests/functional/test_pep517.py 7.0.1-2/pbr/tests/functional/test_pep517.py
--- 6.1.1-2/pbr/tests/functional/test_pep517.py	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/pbr/tests/functional/test_pep517.py	2025-08-14 16:07:35.000000000 +0000
@@ -0,0 +1,117 @@
+# Copyright (c) 2013 New Dream Network, LLC (DreamHost)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Copyright (C) 2013 Association of Universities for Research in Astronomy
+#                    (AURA)
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+#     1. Redistributions of source code must retain the above copyright
+#        notice, this list of conditions and the following disclaimer.
+#
+#     2. Redistributions in binary form must reproduce the above
+#        copyright notice, this list of conditions and the following
+#        disclaimer in the documentation and/or other materials provided
+#        with the distribution.
+#
+#     3. The name of AURA and its representatives may not be used to
+#        endorse or promote products derived from this software without
+#        specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED
+# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+
+import textwrap
+
+from pbr.tests import fixtures as pbr_fixtures
+from pbr.tests.functional import base
+
+
+class TestPEP517Support(base.BaseTestCase):
+    def test_pep_517_support(self):
+        # Note that the current PBR PEP517 entrypoints rely on a valid
+        # PBR setup.py existing.
+        pkgs = {
+            'test_pep517': {
+                'requirements.txt': textwrap.dedent(
+                    """\
+                        sphinx
+                        iso8601
+                    """
+                ),
+                # Override default setup.py to remove setup_requires.
+                'setup.py': textwrap.dedent(
+                    """\
+                        #!/usr/bin/env python
+                        import setuptools
+                        setuptools.setup(pbr=True)
+                    """
+                ),
+                'setup.cfg': textwrap.dedent(
+                    """\
+                        [metadata]
+                        name = test_pep517
+                        summary = A tiny test project
+                        author = PBR Team
+                        author_email = foo@example.com
+                        home_page = https://example.com/
+                        classifier =
+                            Intended Audience :: Information Technology
+                            Intended Audience :: System Administrators
+                            License :: OSI Approved :: Apache Software License
+                            Operating System :: POSIX :: Linux
+                            Programming Language :: Python
+                            Programming Language :: Python :: 2
+                            Programming Language :: Python :: 2.7
+                            Programming Language :: Python :: 3
+                            Programming Language :: Python :: 3.6
+                            Programming Language :: Python :: 3.7
+                            Programming Language :: Python :: 3.8
+                    """
+                ),
+                # note that we use 36.6.0 rather than 64.0.0 since the
+                # latter doesn't support Python < 3.8 and we run our tests
+                # against Python 2.7 still. That's okay since we're not
+                # testing PEP-660 functionality here (which requires the
+                # newer setuptools)
+                'pyproject.toml': textwrap.dedent(
+                    """\
+                        [build-system]
+                        requires = ["pbr", "setuptools>=36.6.0", "wheel"]
+                        build-backend = "pbr.build"
+                    """
+                ),
+            },
+        }
+        pkg_dirs = self.useFixture(pbr_fixtures.Packages(pkgs)).package_dirs
+        pkg_dir = pkg_dirs['test_pep517']
+        venv = self.useFixture(pbr_fixtures.Venv('PEP517'))
+
+        # Test building sdists and wheels works. Note we do not use pip here
+        # because pip will forcefully install the latest version of PBR on
+        # pypi to satisfy the build-system requires. This means we can't self
+        # test changes using pip. Build with --no-isolation appears to avoid
+        # this problem.
+        self._run_cmd(
+            venv.python,
+            ('-m', 'build', '--no-isolation', '.'),
+            allow_fail=False,
+            cwd=pkg_dir,
+        )
diff -pruN 6.1.1-2/pbr/tests/functional/test_requirements.py 7.0.1-2/pbr/tests/functional/test_requirements.py
--- 6.1.1-2/pbr/tests/functional/test_requirements.py	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/pbr/tests/functional/test_requirements.py	2025-08-14 16:07:35.000000000 +0000
@@ -0,0 +1,121 @@
+# Copyright (c) 2013 New Dream Network, LLC (DreamHost)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Copyright (C) 2013 Association of Universities for Research in Astronomy
+#                    (AURA)
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+#     1. Redistributions of source code must retain the above copyright
+#        notice, this list of conditions and the following disclaimer.
+#
+#     2. Redistributions in binary form must reproduce the above
+#        copyright notice, this list of conditions and the following
+#        disclaimer in the documentation and/or other materials provided
+#        with the distribution.
+#
+#     3. The name of AURA and its representatives may not be used to
+#        endorse or promote products derived from this software without
+#        specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED
+# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+
+import os
+import textwrap
+
+import pkg_resources
+
+from pbr.tests import fixtures as pbr_fixtures
+from pbr.tests.functional import base
+
+
+class TestRequirementParsing(base.BaseTestCase):
+
+    def test_requirement_parsing(self):
+        pkgs = {
+            'test_reqparse': {
+                'requirements.txt': textwrap.dedent(
+                    """\
+                        bar
+                        quux<1.0; python_version=='2.6'
+                        requests-aws>=0.1.4    # BSD License (3 clause)
+                        Routes>=1.12.3,!=2.0,!=2.1;python_version=='2.7'
+                        requests-kerberos>=0.6;python_version=='2.7' # MIT
+                    """
+                ),
+                'setup.cfg': textwrap.dedent(
+                    """\
+                        [metadata]
+                        name = test_reqparse
+
+                        [extras]
+                        test =
+                            foo
+                            baz>3.2 :python_version=='2.7' # MIT
+                            bar>3.3 :python_version=='2.7' # MIT # Apache
+                    """
+                ),
+            },
+        }
+        pkg_dirs = self.useFixture(pbr_fixtures.Packages(pkgs)).package_dirs
+        pkg_dir = pkg_dirs['test_reqparse']
+        # pkg_resources.split_sections uses None as the title of an
+        # anonymous section instead of the empty string. Weird.
+        expected_requirements = {
+            None: ['bar', 'requests-aws>=0.1.4'],
+            ":(python_version=='2.6')": ['quux<1.0'],
+            ":(python_version=='2.7')": [
+                'Routes!=2.0,!=2.1,>=1.12.3',
+                'requests-kerberos>=0.6',
+            ],
+            'test': ['foo'],
+            "test:(python_version=='2.7')": ['baz>3.2', 'bar>3.3'],
+        }
+        venv = self.useFixture(pbr_fixtures.Venv('reqParse'))
+        bin_python = venv.python
+        # Two things are tested by this
+        # 1) pbr properly parses markers from requirements.txt and setup.cfg
+        # 2) bdist_wheel causes pbr to not evaluate markers
+        self._run_cmd(
+            bin_python,
+            ('setup.py', 'bdist_wheel'),
+            allow_fail=False,
+            cwd=pkg_dir,
+        )
+        egg_info = os.path.join(pkg_dir, 'test_reqparse.egg-info')
+
+        requires_txt = os.path.join(egg_info, 'requires.txt')
+        with open(requires_txt, 'rt') as requires:
+            generated_requirements = dict(
+                pkg_resources.split_sections(requires)
+            )
+
+        # NOTE(dhellmann): We have to spell out the comparison because
+        # the rendering for version specifiers in a range is not
+        # consistent across versions of setuptools.
+
+        for section, expected in expected_requirements.items():
+            exp_parsed = [pkg_resources.Requirement.parse(s) for s in expected]
+            gen_parsed = [
+                pkg_resources.Requirement.parse(s)
+                for s in generated_requirements[section]
+            ]
+            self.assertEqual(exp_parsed, gen_parsed)
diff -pruN 6.1.1-2/pbr/tests/functional/test_wsgi_scripts.py 7.0.1-2/pbr/tests/functional/test_wsgi_scripts.py
--- 6.1.1-2/pbr/tests/functional/test_wsgi_scripts.py	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/pbr/tests/functional/test_wsgi_scripts.py	2025-08-14 16:07:35.000000000 +0000
@@ -0,0 +1,189 @@
+# Copyright (c) 2015 Hewlett-Packard Development Company, L.P. (HP)
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+import os
+import re
+import subprocess
+import sys
+import sysconfig
+
+from pbr._compat.five import urlopen
+from pbr.tests.functional import base
+
+
+class TestPackaging(base.BaseWheelTestCase):
+
+    def test_data_directory_has_wsgi_scripts(self):
+        # Build the path to the scripts directory
+        scripts_dir = os.path.join(
+            self.extracted_wheel_dir, 'pbr_testpackage-0.0.data/scripts'
+        )
+        self.assertTrue(os.path.exists(scripts_dir))
+        scripts = os.listdir(scripts_dir)
+
+        self.assertIn('pbr_test_wsgi', scripts)
+        self.assertIn('pbr_test_wsgi_with_class', scripts)
+        self.assertNotIn('pbr_test_cmd', scripts)
+        self.assertNotIn('pbr_test_cmd_with_class', scripts)
+
+
+class TestWsgiScripts(base.BaseTestCase):
+
+    cmd_names = ('pbr_test_wsgi', 'pbr_test_wsgi_with_class')
+
+    def _get_path(self):
+        if sys.version_info[0] > 3:
+            return sysconfig.get_path("platlib", vars={"base": self.temp_dir})
+        if os.path.isdir("%s/lib64" % self.temp_dir):
+            path = "%s/lib64" % self.temp_dir
+        elif os.path.isdir("%s/lib" % self.temp_dir):
+            path = "%s/lib" % self.temp_dir
+        elif os.path.isdir("%s/site-packages" % self.temp_dir):
+            return ".:%s/site-packages" % self.temp_dir
+        else:
+            raise Exception("Could not determine path for test")
+        return ".:%s/python%s.%s/site-packages" % (
+            path,
+            sys.version_info[0],
+            sys.version_info[1],
+        )
+
+    def test_wsgi_script_install(self):
+        """Test that we install a non-pkg-resources wsgi script."""
+        if os.name == 'nt':
+            self.skipTest('Windows support is passthrough')
+
+        stdout, _, return_code = self.run_setup(
+            'install', '--prefix=%s' % self.temp_dir
+        )
+
+        self._check_wsgi_install_content(stdout)
+
+    def test_wsgi_script_run(self):
+        """Test that we install a runnable wsgi script.
+
+        This test actually attempts to start and interact with the
+        wsgi script in question to demonstrate that it's a working
+        wsgi script using simple server.
+
+        """
+        if os.name == 'nt':
+            self.skipTest('Windows support is passthrough')
+
+        stdout, _, return_code = self.run_setup(
+            'install', '--prefix=%s' % self.temp_dir
+        )
+
+        self._check_wsgi_install_content(stdout)
+
+        # Live test run the scripts and see that they respond to wsgi
+        # requests.
+        for cmd_name in self.cmd_names:
+            self._test_wsgi(cmd_name, b'Hello World')
+
+    def _test_wsgi(self, cmd_name, output, extra_args=None):
+        cmd = os.path.join(self.temp_dir, 'bin', cmd_name)
+        print("Running %s -p 0 -b 127.0.0.1" % cmd)
+        popen_cmd = [cmd, '-p', '0', '-b', '127.0.0.1']
+        if extra_args:
+            popen_cmd.extend(extra_args)
+
+        env = {'PYTHONPATH': self._get_path()}
+
+        p = subprocess.Popen(
+            popen_cmd,
+            stdout=subprocess.PIPE,
+            stderr=subprocess.PIPE,
+            cwd=self.temp_dir,
+            env=env,
+        )
+        self.addCleanup(p.kill)
+
+        stdoutdata = p.stdout.readline()  # ****...
+
+        stdoutdata = p.stdout.readline()  # STARTING test server...
+        self.assertIn(b"STARTING test server pbr_testpackage.wsgi", stdoutdata)
+
+        stdoutdata = p.stdout.readline()  # Available at ...
+        print(stdoutdata)
+        m = re.search(br'(http://[^:]+:\d+)/', stdoutdata)
+        self.assertIsNotNone(m, "Regex failed to match on %s" % stdoutdata)
+
+        stdoutdata = p.stdout.readline()  # DANGER! ...
+        self.assertIn(
+            b"DANGER! For testing only, do not use in production", stdoutdata
+        )
+
+        stdoutdata = p.stdout.readline()  # ***...
+
+        f = urlopen(m.group(1).decode('utf-8'))
+        self.assertEqual(output, f.read())
+
+        # Request again so that the application can force stderr.flush(),
+        # otherwise the log is buffered and the next readline() will hang.
+        urlopen(m.group(1).decode('utf-8'))
+
+        stdoutdata = p.stderr.readline()
+        # we should have logged an HTTP request, return code 200, that
+        # returned the right amount of bytes
+        status = '"GET / HTTP/1.1" 200 %d' % len(output)
+        self.assertIn(status.encode('utf-8'), stdoutdata)
+
+    def _check_wsgi_install_content(self, install_stdout):
+        for cmd_name in self.cmd_names:
+            install_txt = 'Installing %s script to %s' % (
+                cmd_name,
+                self.temp_dir,
+            )
+            self.assertIn(install_txt, install_stdout)
+
+            cmd_filename = os.path.join(self.temp_dir, 'bin', cmd_name)
+
+            script_txt = open(cmd_filename, 'r').read()
+            self.assertNotIn('pkg_resources', script_txt)
+
+            main_block = """if __name__ == "__main__":
+    import argparse
+    import socket
+    import sys
+    import wsgiref.simple_server as wss"""
+
+            if cmd_name == 'pbr_test_wsgi':
+                app_name = "main"
+            else:
+                app_name = "WSGI.app"
+
+            starting_block = (
+                "STARTING test server pbr_testpackage.wsgi.%s" % app_name
+            )
+
+            else_block = """else:
+    application = None"""
+
+            self.assertIn(main_block, script_txt)
+            self.assertIn(starting_block, script_txt)
+            self.assertIn(else_block, script_txt)
+
+    def test_with_argument(self):
+        if os.name == 'nt':
+            self.skipTest('Windows support is passthrough')
+
+        stdout, _, return_code = self.run_setup(
+            'install', '--prefix=%s' % self.temp_dir
+        )
+
+        self._test_wsgi('pbr_test_wsgi', b'Foo Bar', ["--", "-c", "Foo Bar"])
diff -pruN 6.1.1-2/pbr/tests/test_commands.py 7.0.1-2/pbr/tests/test_commands.py
--- 6.1.1-2/pbr/tests/test_commands.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/tests/test_commands.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,84 +0,0 @@
-# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# Copyright (C) 2013 Association of Universities for Research in Astronomy
-#                    (AURA)
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are met:
-#
-#     1. Redistributions of source code must retain the above copyright
-#        notice, this list of conditions and the following disclaimer.
-#
-#     2. Redistributions in binary form must reproduce the above
-#        copyright notice, this list of conditions and the following
-#        disclaimer in the documentation and/or other materials provided
-#        with the distribution.
-#
-#     3. The name of AURA and its representatives may not be used to
-#        endorse or promote products derived from this software without
-#        specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED
-# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT,
-# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
-
-from testtools import content
-
-from pbr.tests import base
-
-
-class TestCommands(base.BaseTestCase):
-    def test_custom_build_py_command(self):
-        """Test custom build_py command.
-
-        Test that a custom subclass of the build_py command runs when listed in
-        the commands [global] option, rather than the normal build command.
-        """
-
-        stdout, stderr, return_code = self.run_setup('build_py')
-        self.addDetail('stdout', content.text_content(stdout))
-        self.addDetail('stderr', content.text_content(stderr))
-        self.assertIn('Running custom build_py command.', stdout)
-        self.assertEqual(0, return_code)
-
-    def test_custom_deb_version_py_command(self):
-        """Test custom deb_version command."""
-        stdout, stderr, return_code = self.run_setup('deb_version')
-        self.addDetail('stdout', content.text_content(stdout))
-        self.addDetail('stderr', content.text_content(stderr))
-        self.assertIn('Extracting deb version', stdout)
-        self.assertEqual(0, return_code)
-
-    def test_custom_rpm_version_py_command(self):
-        """Test custom rpm_version command."""
-        stdout, stderr, return_code = self.run_setup('rpm_version')
-        self.addDetail('stdout', content.text_content(stdout))
-        self.addDetail('stderr', content.text_content(stderr))
-        self.assertIn('Extracting rpm version', stdout)
-        self.assertEqual(0, return_code)
-
-    def test_freeze_command(self):
-        """Test that freeze output is sorted in a case-insensitive manner."""
-        stdout, stderr, return_code = self.run_pbr('freeze')
-        self.assertEqual(0, return_code)
-        pkgs = []
-        for line in stdout.split('\n'):
-            pkgs.append(line.split('==')[0].lower())
-        pkgs_sort = sorted(pkgs[:])
-        self.assertEqual(pkgs_sort, pkgs)
diff -pruN 6.1.1-2/pbr/tests/test_core.py 7.0.1-2/pbr/tests/test_core.py
--- 6.1.1-2/pbr/tests/test_core.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/tests/test_core.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,154 +0,0 @@
-# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# Copyright (C) 2013 Association of Universities for Research in Astronomy
-#                    (AURA)
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are met:
-#
-#     1. Redistributions of source code must retain the above copyright
-#        notice, this list of conditions and the following disclaimer.
-#
-#     2. Redistributions in binary form must reproduce the above
-#        copyright notice, this list of conditions and the following
-#        disclaimer in the documentation and/or other materials provided
-#        with the distribution.
-#
-#     3. The name of AURA and its representatives may not be used to
-#        endorse or promote products derived from this software without
-#        specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED
-# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT,
-# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
-
-import glob
-import os
-import sys
-import tarfile
-
-import fixtures
-
-from pbr.tests import base
-
-
-class TestCore(base.BaseTestCase):
-
-    cmd_names = ('pbr_test_cmd', 'pbr_test_cmd_with_class')
-
-    def check_script_install(self, install_stdout):
-        for cmd_name in self.cmd_names:
-            install_txt = 'Installing %s script to %s' % (cmd_name,
-                                                          self.temp_dir)
-            self.assertIn(install_txt, install_stdout)
-
-            cmd_filename = os.path.join(self.temp_dir, cmd_name)
-
-            script_txt = open(cmd_filename, 'r').read()
-            self.assertNotIn('pkg_resources', script_txt)
-
-            stdout, _, return_code = self._run_cmd(cmd_filename)
-            self.assertIn("PBR", stdout)
-
-    def test_setup_py_keywords(self):
-        """setup.py --keywords.
-
-        Test that the `./setup.py --keywords` command returns the correct
-        value without balking.
-        """
-
-        self.run_setup('egg_info')
-        stdout, _, _ = self.run_setup('--keywords')
-        assert stdout == 'packaging, distutils, setuptools'
-
-    def test_sdist_extra_files(self):
-        """Test that the extra files are correctly added."""
-
-        stdout, _, return_code = self.run_setup('sdist', '--formats=gztar')
-
-        # There can be only one
-        try:
-            tf_path = glob.glob(os.path.join('dist', '*.tar.gz'))[0]
-        except IndexError:
-            assert False, 'source dist not found'
-
-        tf = tarfile.open(tf_path)
-        names = ['/'.join(p.split('/')[1:]) for p in tf.getnames()]
-
-        self.assertIn('extra-file.txt', names)
-
-    def test_console_script_install(self):
-        """Test that we install a non-pkg-resources console script."""
-
-        if os.name == 'nt':
-            self.skipTest('Windows support is passthrough')
-
-        stdout, _, return_code = self.run_setup(
-            'install_scripts', '--install-dir=%s' % self.temp_dir)
-
-        self.useFixture(
-            fixtures.EnvironmentVariable('PYTHONPATH', '.'))
-
-        self.check_script_install(stdout)
-
-    def test_console_script_develop(self):
-        """Test that we develop a non-pkg-resources console script."""
-
-        if sys.version_info < (3, 0):
-            self.skipTest(
-                'Fails with recent virtualenv due to '
-                'https://github.com/pypa/virtualenv/issues/1638'
-            )
-
-        if os.name == 'nt':
-            self.skipTest('Windows support is passthrough')
-
-        self.useFixture(
-            fixtures.EnvironmentVariable(
-                'PYTHONPATH', ".:%s" % self.temp_dir))
-
-        stdout, _, return_code = self.run_setup(
-            'develop', '--install-dir=%s' % self.temp_dir)
-
-        self.check_script_install(stdout)
-
-
-class TestGitSDist(base.BaseTestCase):
-
-    def setUp(self):
-        super(TestGitSDist, self).setUp()
-
-        stdout, _, return_code = self._run_cmd('git', ('init',))
-        if return_code:
-            self.skipTest("git not installed")
-
-        stdout, _, return_code = self._run_cmd('git', ('add', '.'))
-        stdout, _, return_code = self._run_cmd(
-            'git', ('commit', '-m', 'Turn this into a git repo'))
-
-        stdout, _, return_code = self.run_setup('sdist', '--formats=gztar')
-
-    def test_sdist_git_extra_files(self):
-        """Test that extra files found in git are correctly added."""
-        # There can be only one
-        tf_path = glob.glob(os.path.join('dist', '*.tar.gz'))[0]
-        tf = tarfile.open(tf_path)
-        names = ['/'.join(p.split('/')[1:]) for p in tf.getnames()]
-
-        self.assertIn('git-extra-file.txt', names)
diff -pruN 6.1.1-2/pbr/tests/test_files.py 7.0.1-2/pbr/tests/test_files.py
--- 6.1.1-2/pbr/tests/test_files.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/tests/test_files.py	2025-08-14 16:07:35.000000000 +0000
@@ -13,6 +13,7 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
+from __future__ import absolute_import
 from __future__ import print_function
 
 import os
@@ -21,6 +22,7 @@ import fixtures
 
 from pbr.hooks import files
 from pbr.tests import base
+from pbr.tests import fixtures as pbr_fixtures
 
 
 class FilesConfigTest(base.BaseTestCase):
@@ -29,20 +31,25 @@ class FilesConfigTest(base.BaseTestCase)
         super(FilesConfigTest, self).setUp()
 
         pkg_fixture = fixtures.PythonPackage(
-            "fake_package", [
+            "fake_package",
+            [
                 ("fake_module.py", b""),
                 ("other_fake_module.py", b""),
-            ])
+            ],
+        )
         self.useFixture(pkg_fixture)
         pkg_etc = os.path.join(pkg_fixture.base, 'etc')
-        pkg_ansible = os.path.join(pkg_fixture.base, 'ansible',
-                                   'kolla-ansible', 'test')
+        pkg_ansible = os.path.join(
+            pkg_fixture.base, 'ansible', 'kolla-ansible', 'test'
+        )
         dir_spcs = os.path.join(pkg_fixture.base, 'dir with space')
-        dir_subdir_spc = os.path.join(pkg_fixture.base, 'multi space',
-                                      'more spaces')
+        dir_subdir_spc = os.path.join(
+            pkg_fixture.base, 'multi space', 'more spaces'
+        )
         pkg_sub = os.path.join(pkg_etc, 'sub')
         subpackage = os.path.join(
-            pkg_fixture.base, 'fake_package', 'subpackage')
+            pkg_fixture.base, 'fake_package', 'subpackage'
+        )
         os.makedirs(pkg_sub)
         os.makedirs(subpackage)
         os.makedirs(pkg_ansible)
@@ -61,58 +68,44 @@ class FilesConfigTest(base.BaseTestCase)
         with open(os.path.join(dir_subdir_spc, "file with spc"), 'w') as file_:
             file_.write("# empty")
 
-        self.useFixture(base.DiveDir(pkg_fixture.base))
+        self.useFixture(pbr_fixtures.Chdir(pkg_fixture.base))
 
     def test_implicit_auto_package(self):
-        config = dict(
-            files=dict(
-            )
-        )
+        config = {'files': {}}
         files.FilesConfig(config, 'fake_package').run()
         self.assertIn('subpackage', config['files']['packages'])
 
     def test_auto_package(self):
-        config = dict(
-            files=dict(
-                packages='fake_package',
-            )
-        )
+        config = {'files': {'packages': 'fake_package'}}
         files.FilesConfig(config, 'fake_package').run()
         self.assertIn('subpackage', config['files']['packages'])
 
     def test_data_files_globbing(self):
-        config = dict(
-            files=dict(
-                data_files="\n  etc/pbr = etc/*"
-            )
-        )
+        config = {'files': {'data_files': '\n  etc/pbr = etc/*'}}
         files.FilesConfig(config, 'fake_package').run()
         self.assertIn(
             "\n'etc/pbr/' = \n 'etc/foo'\n'etc/pbr/sub' = \n 'etc/sub/bar'",
-            config['files']['data_files'])
+            config['files']['data_files'],
+        )
 
     def test_data_files_with_spaces(self):
-        config = dict(
-            files=dict(
-                data_files="\n  'i like spaces' = 'dir with space'/*"
-            )
-        )
+        config = {
+            'files': {'data_files': "\n  'i like spaces' = 'dir with space'/*"}
+        }
         files.FilesConfig(config, 'fake_package').run()
         self.assertIn(
             "\n'i like spaces/' = \n 'dir with space/file with spc'",
-            config['files']['data_files'])
+            config['files']['data_files'],
+        )
 
     def test_data_files_with_spaces_subdirectories(self):
         # test that we can handle whitespace in subdirectories
         data_files = "\n 'one space/two space' = 'multi space/more spaces'/*"
         expected = (
             "\n'one space/two space/' = "
-            "\n 'multi space/more spaces/file with spc'")
-        config = dict(
-            files=dict(
-                data_files=data_files
-            )
+            "\n 'multi space/more spaces/file with spc'"
         )
+        config = {'files': {'data_files': data_files}}
         files.FilesConfig(config, 'fake_package').run()
         self.assertIn(expected, config['files']['data_files'])
 
@@ -121,28 +114,23 @@ class FilesConfigTest(base.BaseTestCase)
         data_files = (
             "\n'one space'/'two space' = 'multi space'/'more spaces'/*"
         )
-        expected = ("\n'one space/two space/' = "
-                    "\n 'multi space/more spaces/file with spc'")
-        config = dict(
-            files=dict(
-                data_files=data_files
-            )
+        expected = (
+            "\n'one space/two space/' = "
+            "\n 'multi space/more spaces/file with spc'"
         )
+        config = {'files': {'data_files': data_files}}
         files.FilesConfig(config, 'fake_package').run()
         self.assertIn(expected, config['files']['data_files'])
 
     def test_data_files_globbing_source_prefix_in_directory_name(self):
         # We want to test that the string, "docs", is not replaced in a
         # subdirectory name, "sub-docs"
-        config = dict(
-            files=dict(
-                data_files="\n  share/ansible = ansible/*"
-            )
-        )
+        config = {'files': {'data_files': "\n  share/ansible = ansible/*"}}
         files.FilesConfig(config, 'fake_package').run()
         self.assertIn(
             "\n'share/ansible/' = "
             "\n'share/ansible/kolla-ansible' = "
             "\n'share/ansible/kolla-ansible/test' = "
             "\n 'ansible/kolla-ansible/test/baz'",
-            config['files']['data_files'])
+            config['files']['data_files'],
+        )
diff -pruN 6.1.1-2/pbr/tests/test_git.py 7.0.1-2/pbr/tests/test_git.py
--- 6.1.1-2/pbr/tests/test_git.py	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/pbr/tests/test_git.py	2025-08-14 16:07:35.000000000 +0000
@@ -0,0 +1,310 @@
+# Copyright (c) 2011 OpenStack Foundation
+# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+import os
+import sys
+
+import fixtures
+
+from pbr._compat.five import BytesIO
+from pbr import git
+from pbr import options
+from pbr.tests import base
+
+if sys.version_info >= (3, 3):
+    from unittest import mock
+else:
+    import mock  # noqa
+
+
+class TestGitIsInstalled(base.BaseTestCase):
+
+    def testGitIsInstalled(self):
+        with mock.patch.object(git, '_run_shell_command') as _command:
+            _command.return_value = 'git version 1.8.4.1'
+            self.assertEqual(True, git._git_is_installed())
+
+    def testGitIsNotInstalled(self):
+        with mock.patch.object(git, '_run_shell_command') as _command:
+            _command.side_effect = OSError
+            self.assertEqual(False, git._git_is_installed())
+
+
+class SkipFileWrites(base.BaseTestCase):
+
+    scenarios = [
+        (
+            'changelog_option_true',
+            {
+                'option_key': 'skip_changelog',
+                'option_value': 'True',
+                'env_key': 'SKIP_WRITE_GIT_CHANGELOG',
+                'env_value': None,
+                'pkg_func': git.write_git_changelog,
+                'filename': 'ChangeLog',
+            },
+        ),
+        (
+            'changelog_option_false',
+            {
+                'option_key': 'skip_changelog',
+                'option_value': 'False',
+                'env_key': 'SKIP_WRITE_GIT_CHANGELOG',
+                'env_value': None,
+                'pkg_func': git.write_git_changelog,
+                'filename': 'ChangeLog',
+            },
+        ),
+        (
+            'changelog_env_true',
+            {
+                'option_key': 'skip_changelog',
+                'option_value': 'False',
+                'env_key': 'SKIP_WRITE_GIT_CHANGELOG',
+                'env_value': 'True',
+                'pkg_func': git.write_git_changelog,
+                'filename': 'ChangeLog',
+            },
+        ),
+        (
+            'changelog_both_true',
+            {
+                'option_key': 'skip_changelog',
+                'option_value': 'True',
+                'env_key': 'SKIP_WRITE_GIT_CHANGELOG',
+                'env_value': 'True',
+                'pkg_func': git.write_git_changelog,
+                'filename': 'ChangeLog',
+            },
+        ),
+        (
+            'authors_option_true',
+            {
+                'option_key': 'skip_authors',
+                'option_value': 'True',
+                'env_key': 'SKIP_GENERATE_AUTHORS',
+                'env_value': None,
+                'pkg_func': git.generate_authors,
+                'filename': 'AUTHORS',
+            },
+        ),
+        (
+            'authors_option_false',
+            {
+                'option_key': 'skip_authors',
+                'option_value': 'False',
+                'env_key': 'SKIP_GENERATE_AUTHORS',
+                'env_value': None,
+                'pkg_func': git.generate_authors,
+                'filename': 'AUTHORS',
+            },
+        ),
+        (
+            'authors_env_true',
+            {
+                'option_key': 'skip_authors',
+                'option_value': 'False',
+                'env_key': 'SKIP_GENERATE_AUTHORS',
+                'env_value': 'True',
+                'pkg_func': git.generate_authors,
+                'filename': 'AUTHORS',
+            },
+        ),
+        (
+            'authors_both_true',
+            {
+                'option_key': 'skip_authors',
+                'option_value': 'True',
+                'env_key': 'SKIP_GENERATE_AUTHORS',
+                'env_value': 'True',
+                'pkg_func': git.generate_authors,
+                'filename': 'AUTHORS',
+            },
+        ),
+    ]
+
+    def setUp(self):
+        super(SkipFileWrites, self).setUp()
+        self.temp_path = self.useFixture(fixtures.TempDir()).path
+        self.root_dir = os.path.abspath(os.path.curdir)
+        self.git_dir = os.path.join(self.root_dir, ".git")
+        if not os.path.exists(self.git_dir):
+            self.skipTest(
+                "%s is missing; skipping git-related checks" % self.git_dir
+            )
+            return
+        self.filename = os.path.join(self.temp_path, self.filename)
+        self.option_dict = {}
+        if self.option_key is not None:
+            self.option_dict[self.option_key] = (
+                'setup.cfg',
+                self.option_value,
+            )
+        self.useFixture(
+            fixtures.EnvironmentVariable(self.env_key, self.env_value)
+        )
+
+    def test_skip(self):
+        self.pkg_func(
+            git_dir=self.git_dir,
+            dest_dir=self.temp_path,
+            option_dict=self.option_dict,
+        )
+        self.assertEqual(
+            not os.path.exists(self.filename),
+            (
+                self.option_value.lower() in options.TRUE_VALUES
+                or self.env_value is not None
+            ),
+        )
+
+
+_changelog_content = """7780758\x00Break parser\x00 (tag: refs/tags/1_foo.1)
+04316fe\x00Make python\x00 (refs/heads/review/monty_taylor/27519)
+378261a\x00Add an integration test script.\x00
+3c373ac\x00Merge "Lib\x00 (HEAD, tag: refs/tags/2013.2.rc2, tag: refs/tags/2013.2, refs/heads/mile-proposed)
+182feb3\x00Fix pip invocation for old versions of pip.\x00 (tag: refs/tags/0.5.17)
+fa4f46e\x00Remove explicit depend on distribute.\x00 (tag: refs/tags/0.5.16)
+d1c53dd\x00Use pip instead of easy_install for installation.\x00
+a793ea1\x00Merge "Skip git-checkout related tests when .git is missing"\x00
+6c27ce7\x00Skip git-checkout related tests when .git is missing\x00
+451e513\x00Bug fix: create_stack() fails when waiting\x00
+4c8cfe4\x00Improve test coverage: network delete API\x00 (tag: refs/tags/(evil))
+d7e6167\x00Bug fix: Fix pass thru filtering in list_networks\x00 (tag: refs/tags/ev()il)
+c47ec15\x00Consider 'in-use' a non-pending volume for caching\x00 (tag: refs/tags/ev)il)
+8696fbd\x00Improve test coverage: private extension API\x00 (tag: refs/tags/ev(il)
+f0440f8\x00Improve test coverage: hypervisor list\x00 (tag: refs/tags/e(vi)l)
+04984a5\x00Refactor hooks file.\x00 (HEAD, tag: 0.6.7,b, tag: refs/tags/(12), refs/heads/master)
+a65e8ee\x00Remove jinja pin.\x00 (tag: refs/tags/0.5.14, tag: refs/tags/0.5.13)
+"""  # noqa
+
+
+def _make_old_git_changelog_format(line):
+    """Convert post-1.8.1 git log format to pre-1.8.1 git log format"""
+
+    if not line.strip():
+        return line
+    sha, msg, refname = line.split('\x00')
+    refname = refname.replace('tag: ', '')
+    return '\x00'.join((sha, msg, refname))
+
+
+_old_git_changelog_content = '\n'.join(
+    _make_old_git_changelog_format(line)
+    for line in _changelog_content.split('\n')
+)
+
+
+class GitLogsTest(base.BaseTestCase):
+
+    scenarios = [
+        ('pre1.8.3', {'changelog': _old_git_changelog_content}),
+        ('post1.8.3', {'changelog': _changelog_content}),
+    ]
+
+    def setUp(self):
+        super(GitLogsTest, self).setUp()
+        self.temp_path = self.useFixture(fixtures.TempDir()).path
+        self.root_dir = os.path.abspath(os.path.curdir)
+        self.git_dir = os.path.join(self.root_dir, ".git")
+        self.useFixture(fixtures.EnvironmentVariable('SKIP_GENERATE_AUTHORS'))
+        self.useFixture(
+            fixtures.EnvironmentVariable('SKIP_WRITE_GIT_CHANGELOG')
+        )
+
+    def test_write_git_changelog(self):
+        self.useFixture(
+            fixtures.FakePopen(
+                lambda _: {"stdout": BytesIO(self.changelog.encode('utf-8'))}
+            )
+        )
+
+        git.write_git_changelog(git_dir=self.git_dir, dest_dir=self.temp_path)
+
+        with open(os.path.join(self.temp_path, "ChangeLog"), "r") as ch_fh:
+            changelog_contents = ch_fh.read()
+            self.assertIn("2013.2", changelog_contents)
+            self.assertIn("0.5.17", changelog_contents)
+            self.assertIn("------", changelog_contents)
+            self.assertIn("Refactor hooks file", changelog_contents)
+            self.assertIn(
+                r"Bug fix: create\_stack() fails when waiting",
+                changelog_contents,
+            )
+            self.assertNotIn("Refactor hooks file.", changelog_contents)
+            self.assertNotIn("182feb3", changelog_contents)
+            self.assertNotIn("review/monty_taylor/27519", changelog_contents)
+            self.assertNotIn("0.5.13", changelog_contents)
+            self.assertNotIn("0.6.7", changelog_contents)
+            self.assertNotIn("12", changelog_contents)
+            self.assertNotIn("(evil)", changelog_contents)
+            self.assertNotIn("ev()il", changelog_contents)
+            self.assertNotIn("ev(il", changelog_contents)
+            self.assertNotIn("ev)il", changelog_contents)
+            self.assertNotIn("e(vi)l", changelog_contents)
+            self.assertNotIn('Merge "', changelog_contents)
+            self.assertNotIn(r'1\_foo.1', changelog_contents)
+
+    def test_generate_authors(self):
+        author_old = u"Foo Foo <email@foo.com>"
+        author_new = u"Bar Bar <email@bar.com>"
+        co_author = u"Foo Bar <foo@bar.com>"
+        co_author_by = u"Co-authored-by: " + co_author
+
+        git_log_cmd = (
+            "git --git-dir=%s log --format=%%aN <%%aE>" % self.git_dir
+        )
+        git_co_log_cmd = "git --git-dir=%s log" % self.git_dir
+        git_top_level = "git rev-parse --show-toplevel"
+        cmd_map = {
+            git_log_cmd: author_new,
+            git_co_log_cmd: co_author_by,
+            git_top_level: self.root_dir,
+        }
+
+        exist_files = [
+            self.git_dir,
+            os.path.join(self.temp_path, "AUTHORS.in"),
+        ]
+        self.useFixture(
+            fixtures.MonkeyPatch(
+                "os.path.exists",
+                lambda path: os.path.abspath(path) in exist_files,
+            )
+        )
+
+        def _fake_run_shell_command(cmd, **kwargs):
+            return cmd_map[" ".join(cmd)]
+
+        self.useFixture(
+            fixtures.MonkeyPatch(
+                "pbr.git._run_shell_command", _fake_run_shell_command
+            )
+        )
+
+        with open(os.path.join(self.temp_path, "AUTHORS.in"), "w") as auth_fh:
+            auth_fh.write("%s\n" % author_old)
+
+        git.generate_authors(git_dir=self.git_dir, dest_dir=self.temp_path)
+
+        with open(os.path.join(self.temp_path, "AUTHORS"), "r") as auth_fh:
+            authors = auth_fh.read()
+            self.assertIn(author_old, authors)
+            self.assertIn(author_new, authors)
+            self.assertIn(co_author, authors)
diff -pruN 6.1.1-2/pbr/tests/test_hooks.py 7.0.1-2/pbr/tests/test_hooks.py
--- 6.1.1-2/pbr/tests/test_hooks.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/tests/test_hooks.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,75 +0,0 @@
-# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# Copyright (C) 2013 Association of Universities for Research in Astronomy
-#                    (AURA)
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are met:
-#
-#     1. Redistributions of source code must retain the above copyright
-#        notice, this list of conditions and the following disclaimer.
-#
-#     2. Redistributions in binary form must reproduce the above
-#        copyright notice, this list of conditions and the following
-#        disclaimer in the documentation and/or other materials provided
-#        with the distribution.
-#
-#     3. The name of AURA and its representatives may not be used to
-#        endorse or promote products derived from this software without
-#        specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED
-# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT,
-# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
-
-import os
-
-from testtools import matchers
-from testtools import skipUnless
-
-from pbr import testr_command
-from pbr.tests import base
-from pbr.tests import util
-
-
-class TestHooks(base.BaseTestCase):
-    def setUp(self):
-        super(TestHooks, self).setUp()
-        with util.open_config(
-                os.path.join(self.package_dir, 'setup.cfg')) as cfg:
-            cfg.set('global', 'setup-hooks',
-                    'pbr_testpackage._setup_hooks.test_hook_1\n'
-                    'pbr_testpackage._setup_hooks.test_hook_2')
-
-    def test_global_setup_hooks(self):
-        """Test setup_hooks.
-
-        Test that setup_hooks listed in the [global] section of setup.cfg are
-        executed in order.
-        """
-
-        stdout, _, return_code = self.run_setup('egg_info')
-        assert 'test_hook_1\ntest_hook_2' in stdout
-        assert return_code == 0
-
-    @skipUnless(testr_command.have_testr, "testrepository not available")
-    def test_custom_commands_known(self):
-        stdout, _, return_code = self.run_setup('--help-commands')
-        self.assertFalse(return_code)
-        self.assertThat(stdout, matchers.Contains(" testr "))
diff -pruN 6.1.1-2/pbr/tests/test_integration.py 7.0.1-2/pbr/tests/test_integration.py
--- 6.1.1-2/pbr/tests/test_integration.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/tests/test_integration.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,320 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-try:
-    import configparser
-except ImportError:
-    import ConfigParser as configparser
-import os.path
-import pkg_resources
-import shlex
-import sys
-
-import fixtures
-import testtools
-import textwrap
-
-from pbr.tests import base
-from pbr.tests import test_packaging
-
-PIPFLAGS = shlex.split(os.environ.get('PIPFLAGS', ''))
-PIPVERSION = os.environ.get('PIPVERSION', 'pip')
-PBRVERSION = os.environ.get('PBRVERSION', 'pbr')
-REPODIR = os.environ.get('REPODIR', '')
-WHEELHOUSE = os.environ.get('WHEELHOUSE', '')
-PIP_CMD = ['-m', 'pip'] + PIPFLAGS + ['install', '-f', WHEELHOUSE]
-PROJECTS = shlex.split(os.environ.get('PROJECTS', ''))
-PBR_ROOT = os.path.abspath(os.path.join(__file__, '..', '..', '..'))
-
-
-def all_projects():
-    if not REPODIR:
-        return
-    # Future: make this path parameterisable.
-    excludes = set(['tempest', 'requirements'])
-    for name in PROJECTS:
-        name = name.strip()
-        short_name = name.split('/')[-1]
-        try:
-            with open(os.path.join(
-                    REPODIR, short_name, 'setup.py'), 'rt') as f:
-                if 'pbr' not in f.read():
-                    continue
-        except IOError:
-            continue
-        if short_name in excludes:
-            continue
-        yield (short_name, dict(name=name, short_name=short_name))
-
-
-class TestIntegration(base.BaseTestCase):
-
-    scenarios = list(all_projects())
-
-    def setUp(self):
-        # Integration tests need a higher default - big repos can be slow to
-        # clone, particularly under guest load.
-        env = fixtures.EnvironmentVariable(
-            'OS_TEST_TIMEOUT', os.environ.get('OS_TEST_TIMEOUT', '600'))
-        with env:
-            super(TestIntegration, self).setUp()
-        base._config_git()
-
-    @testtools.skipUnless(
-        os.environ.get('PBR_INTEGRATION', None) == '1',
-        'integration tests not enabled')
-    def test_integration(self):
-        # Test that we can:
-        # - run sdist from the repo in a venv
-        # - install the resulting tarball in a new venv
-        # - pip install the repo
-        # - pip install -e the repo
-        # We don't break these into separate tests because we'd need separate
-        # source dirs to isolate from side effects of running pip, and the
-        # overheads of setup would start to beat the benefits of parallelism.
-        path = os.path.join(REPODIR, self.short_name)
-        setup_cfg = os.path.join(path, 'setup.cfg')
-        project_name = pkg_resources.safe_name(self.short_name).lower()
-        # These projects should all have setup.cfg files but we'll be careful
-        if os.path.exists(setup_cfg):
-            config = configparser.ConfigParser()
-            config.read(setup_cfg)
-            if config.has_section('metadata'):
-                raw_name = config.get('metadata', 'name',
-                                      fallback='notapackagename')
-                # Technically we should really only need to use the raw
-                # name because all our projects should be good and use
-                # normalized names but they don't...
-                project_name = pkg_resources.safe_name(raw_name).lower()
-        constraints = os.path.join(REPODIR, 'requirements',
-                                   'upper-constraints.txt')
-        tmp_constraints = os.path.join(
-            self.useFixture(fixtures.TempDir()).path,
-            'upper-constraints.txt')
-        # We need to filter out the package we are installing to avoid
-        # conflicts with the constraints.
-        with open(constraints, 'r') as src:
-            with open(tmp_constraints, 'w') as dest:
-                for line in src:
-                    constraint = line.split('===')[0]
-                    if project_name != constraint:
-                        dest.write(line)
-        pip_cmd = PIP_CMD + ['-c', tmp_constraints]
-
-        venv = self.useFixture(
-            test_packaging.Venv('sdist',
-                                modules=['pip', 'wheel', PBRVERSION],
-                                pip_cmd=PIP_CMD))
-        python = venv.python
-        self.useFixture(base.CapturedSubprocess(
-            'sdist', [python, 'setup.py', 'sdist'], cwd=path))
-        venv = self.useFixture(
-            test_packaging.Venv('tarball',
-                                modules=['pip', 'wheel', PBRVERSION],
-                                pip_cmd=PIP_CMD))
-        python = venv.python
-        filename = os.path.join(
-            path, 'dist', os.listdir(os.path.join(path, 'dist'))[0])
-        self.useFixture(base.CapturedSubprocess(
-            'tarball', [python] + pip_cmd + [filename]))
-        venv = self.useFixture(
-            test_packaging.Venv('install-git',
-                                modules=['pip', 'wheel', PBRVERSION],
-                                pip_cmd=PIP_CMD))
-        root = venv.path
-        python = venv.python
-        self.useFixture(base.CapturedSubprocess(
-            'install-git', [python] + pip_cmd + ['git+file://' + path]))
-        if self.short_name == 'nova':
-            found = False
-            for _, _, filenames in os.walk(root):
-                if 'alembic.ini' in filenames:
-                    found = True
-            self.assertTrue(found)
-        venv = self.useFixture(
-            test_packaging.Venv('install-e',
-                                modules=['pip', 'wheel', PBRVERSION],
-                                pip_cmd=PIP_CMD))
-        root = venv.path
-        python = venv.python
-        self.useFixture(base.CapturedSubprocess(
-            'install-e', [python] + pip_cmd + ['-e', path]))
-
-
-class TestInstallWithoutPbr(base.BaseTestCase):
-
-    # TODO(clarkb) This test should be reimagined with modern packaging tools
-    # and expectations.
-    @testtools.skipUnless(
-        os.environ.get('PBR_INTEGRATION', None) == '1',
-        'integration tests not enabled')
-    def test_install_without_pbr(self):
-        # Test easy-install of a thing that depends on a thing using pbr
-        tempdir = self.useFixture(fixtures.TempDir()).path
-        # A directory containing sdists of the things we're going to depend on
-        # in using-package.
-        dist_dir = os.path.join(tempdir, 'distdir')
-        os.mkdir(dist_dir)
-        self._run_cmd(sys.executable, ('setup.py', 'sdist', '-d', dist_dir),
-                      allow_fail=False, cwd=PBR_ROOT)
-        # testpkg - this requires a pbr-using package
-        test_pkg_dir = os.path.join(tempdir, 'testpkg')
-        os.mkdir(test_pkg_dir)
-        pkgs = {
-            'pkgTest': {
-                'setup.py': textwrap.dedent("""\
-                    #!/usr/bin/env python
-                    import setuptools
-                    setuptools.setup(
-                        name = 'pkgTest',
-                        # TODO(clarkb) should we use a random prefix to
-                        # avoid collisions?
-                        install_requires = ['pkgReq'],
-                    )
-                """),
-                'setup.cfg': textwrap.dedent("""\
-                    [easy_install]
-                    find_links = %s
-                """ % dist_dir)},
-            # We don't need to use PBRVERSION here because we precreate the
-            # pbr sdist and point to it with find_links.
-            'pkgReq': {
-                'requirements.txt': textwrap.dedent("""\
-                    pbr
-                """),
-                'pkgReq/__init__.py': "",
-                'pkgReq/__main__.py': textwrap.dedent("""\
-                    print("FakeTest loaded and ran")
-                """)},
-        }
-        pkg_dirs = self.useFixture(
-            test_packaging.CreatePackages(pkgs)).package_dirs
-        test_pkg_dir = pkg_dirs['pkgTest']
-        req_pkg_dir = pkg_dirs['pkgReq']
-
-        self._run_cmd(sys.executable, ('setup.py', 'sdist', '-d', dist_dir),
-                      allow_fail=False, cwd=req_pkg_dir)
-        # A venv to test within
-        # We install setuptools because we rely on setup.py below.
-        venv = self.useFixture(test_packaging.Venv('nopbr',
-                                                   ['pip', 'wheel',
-                                                    'setuptools']))
-        python = venv.python
-        # Install both packages
-        self.useFixture(base.CapturedSubprocess(
-            'nopbr', [python] + ['setup.py', 'install'], cwd=test_pkg_dir))
-        # Execute code that should only be present if the install worked.
-        self.useFixture(base.CapturedSubprocess(
-            'nopbr', [python] + ['-m', 'pkgReq'], cwd=test_pkg_dir))
-        pbr_cmd = os.path.join(venv.path, 'bin', 'pbr')
-        self.useFixture(base.CapturedSubprocess(
-            'nopbr', [pbr_cmd] + ['freeze'], cwd=test_pkg_dir))
-
-
-# Handle various comaptibility issues with pip and setuptools versions against
-# python3 versions. Unfortunately python3.12 in particular isn't very backward
-# compatible with pip and setuptools.
-# TODO(clarkb) add other distros like EL9 and EL10
-if sys.version_info[0:3] < (3, 10, 0):
-    lts_scenarios = [
-        ('Bionic', {'modules': ['pip==9.0.1', 'setuptools==39.0.1']}),
-        ('Stretch', {'modules': ['pip==9.0.1', 'setuptools==33.1.1']}),
-        ('EL8', {'modules': ['pip==9.0.3', 'setuptools==39.2.0']}),
-        ('Buster', {'modules': ['pip==18.1', 'setuptools==40.8.0']}),
-        ('Focal', {'modules': ['pip==20.0.2', 'setuptools==45.2.0']}),
-    ]
-elif sys.version_info[0:3] < (3, 12, 0):
-    lts_scenarios = [
-        ('Bullseye', {'modules': ['pip==20.3.4', 'setuptools==52.0.0']}),
-        ('Bookworm', {'modules': ['pip==23.0.1', 'setuptools==66.1.1']}),
-        ('Focal', {'modules': ['pip==20.0.2', 'setuptools==45.2.0']}),
-        ('Jammy', {'modules': ['pip==22.0.2', 'setuptools==59.6.0']}),
-    ]
-else:
-    lts_scenarios = [
-        ('Noble', {'modules': ['pip==24.0.0', 'setuptools==68.1.2']}),
-    ]
-
-
-class TestMarkersPip(base.BaseTestCase):
-
-    scenarios = [
-        ('pip-latest', {'modules': ['pip', 'setuptools']})
-    ] + lts_scenarios
-
-    @testtools.skipUnless(
-        os.environ.get('PBR_INTEGRATION', None) == '1',
-        'integration tests not enabled',
-    )
-    def test_pip_versions(self):
-        pkgs = {
-            'test_markers':
-                {'requirements.txt': textwrap.dedent("""\
-                    pkg_a; python_version=='1.2'
-                    pkg_b; python_version!='1.2'
-                """)},
-            'pkg_a': {},
-            'pkg_b': {},
-        }
-        pkg_dirs = self.useFixture(
-            test_packaging.CreatePackages(pkgs)).package_dirs
-        temp_dir = self.useFixture(fixtures.TempDir()).path
-        repo_dir = os.path.join(temp_dir, 'repo')
-        venv = self.useFixture(test_packaging.Venv('markers'))
-        bin_python = venv.python
-        os.mkdir(repo_dir)
-        for module in self.modules:
-            self.useFixture(base.CapturedSubprocess(
-                'pip-version',
-                [bin_python, '-m', 'pip', 'install', '--upgrade', module],
-                cwd=venv.path))
-        # TODO(clarkb) do we need to install PBR from source here to avoid
-        # using the latest release?
-        for pkg in pkg_dirs:
-            self._run_cmd(
-                bin_python, ['setup.py', 'sdist', '-d', repo_dir],
-                cwd=pkg_dirs[pkg], allow_fail=False)
-        self._run_cmd(
-            bin_python,
-            ['-m', 'pip', 'install', '--no-index', '-f', repo_dir,
-             'test_markers'],
-            cwd=venv.path, allow_fail=False)
-        pkgs = self._run_cmd(
-            bin_python,
-            ['-m', 'pip', 'freeze'],
-            cwd=venv.path, allow_fail=False)[0]
-        # Depending on the version of pip/setuptools etc the name of the
-        # installed package may be noramlized to 'pkg-b'. As of March 2024
-        # 'pkg_b' is what we get and previously 'pkg-b' was the result.
-        self.assertTrue('pkg_b' in pkgs or 'pkg-b' in pkgs)
-
-
-class TestLTSSupport(base.BaseTestCase):
-
-    scenarios = lts_scenarios
-
-    @testtools.skipUnless(
-        os.environ.get('PBR_INTEGRATION', None) == '1',
-        'integration tests not enabled',
-    )
-    def test_lts_venv_default_versions(self):
-        venv = self.useFixture(
-            test_packaging.Venv('setuptools', modules=self.modules))
-        bin_python = venv.python
-        pbr = 'file://%s#egg=pbr' % PBR_ROOT
-        # Installing PBR is a reasonable indication that we are not broken on
-        # this particular combination of setuptools and pip.
-        self.useFixture(base.CapturedSubprocess(
-            'lts-support',
-            [bin_python, '-m', 'pip', 'install', pbr],
-            cwd=venv.path))
diff -pruN 6.1.1-2/pbr/tests/test_packaging.py 7.0.1-2/pbr/tests/test_packaging.py
--- 6.1.1-2/pbr/tests/test_packaging.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/tests/test_packaging.py	2025-08-14 16:07:35.000000000 +0000
@@ -38,471 +38,27 @@
 # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
 # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
 
-import email
+from __future__ import absolute_import
+from __future__ import print_function
+
 import email.errors
 import os
 import re
-import sysconfig
+import sys
 import tempfile
-import textwrap
 
 import fixtures
-try:
-    from unittest import mock
-except ImportError:
-    import mock
-import pkg_resources
 import testscenarios
-import testtools
 from testtools import matchers
-import virtualenv
-from wheel import wheelfile
 
-from pbr import git
 from pbr import packaging
 from pbr.tests import base
+from pbr.tests import fixtures as pbr_fixtures
 
-try:
-    import importlib.machinery
-    get_suffixes = importlib.machinery.all_suffixes
-# NOTE(JayF): ModuleNotFoundError only exists in Python 3.6+, not in 2.7
-except ImportError:
-    import imp
-    # NOTE(JayF) imp.get_suffixes returns a list of three-tuples;
-    # we need the first value from each tuple.
-
-    def get_suffixes():
-        return [x[0] for x in imp.get_suffixes]
-
-
-PBR_ROOT = os.path.abspath(os.path.join(__file__, '..', '..', '..'))
-
-
-class TestRepo(fixtures.Fixture):
-    """A git repo for testing with.
-
-    Use of TempHomeDir with this fixture is strongly recommended as due to the
-    lack of config --local in older gits, it will write to the users global
-    configuration without TempHomeDir.
-    """
-
-    def __init__(self, basedir):
-        super(TestRepo, self).__init__()
-        self._basedir = basedir
-
-    def setUp(self):
-        super(TestRepo, self).setUp()
-        base._run_cmd(['git', 'init', '.'], self._basedir)
-        base._config_git()
-        base._run_cmd(['git', 'add', '.'], self._basedir)
-
-    def commit(self, message_content='test commit'):
-        files = len(os.listdir(self._basedir))
-        path = self._basedir + '/%d' % files
-        open(path, 'wt').close()
-        base._run_cmd(['git', 'add', path], self._basedir)
-        base._run_cmd(['git', 'commit', '-m', message_content], self._basedir)
-
-    def uncommit(self):
-        base._run_cmd(['git', 'reset', '--hard', 'HEAD^'], self._basedir)
-
-    def tag(self, version):
-        base._run_cmd(
-            ['git', 'tag', '-sm', 'test tag', version], self._basedir)
-
-
-class GPGKeyFixture(fixtures.Fixture):
-    """Creates a GPG key for testing.
-
-    It's recommended that this be used in concert with a unique home
-    directory.
-    """
-
-    def setUp(self):
-        super(GPGKeyFixture, self).setUp()
-        tempdir = self.useFixture(fixtures.TempDir())
-        gnupg_version_re = re.compile(r'^gpg\s.*\s([\d+])\.([\d+])\.([\d+])')
-        gnupg_version = base._run_cmd(['gpg', '--version'], tempdir.path)
-        for line in gnupg_version[0].split('\n'):
-            gnupg_version = gnupg_version_re.match(line)
-            if gnupg_version:
-                gnupg_version = (int(gnupg_version.group(1)),
-                                 int(gnupg_version.group(2)),
-                                 int(gnupg_version.group(3)))
-                break
-        else:
-            if gnupg_version is None:
-                gnupg_version = (0, 0, 0)
-
-        config_file = os.path.join(tempdir.path, 'key-config')
-        with open(config_file, 'wt') as f:
-            if gnupg_version[0] == 2 and gnupg_version[1] >= 1:
-                f.write("""
-                %no-protection
-                %transient-key
-                """)
-            f.write("""
-            %no-ask-passphrase
-            Key-Type: RSA
-            Name-Real: Example Key
-            Name-Comment: N/A
-            Name-Email: example@example.com
-            Expire-Date: 2d
-            %commit
-            """)
-
-        # Note that --quick-random (--debug-quick-random in GnuPG 2.x)
-        # does not have a corresponding preferences file setting and
-        # must be passed explicitly on the command line instead
-        if gnupg_version[0] == 1:
-            gnupg_random = '--quick-random'
-        elif gnupg_version[0] >= 2:
-            gnupg_random = '--debug-quick-random'
-        else:
-            gnupg_random = ''
-
-        base._run_cmd(
-            ['gpg', '--gen-key', '--batch', gnupg_random, config_file],
-            tempdir.path)
-
-
-class Venv(fixtures.Fixture):
-    """Create a virtual environment for testing with.
-
-    :attr path: The path to the environment root.
-    :attr python: The path to the python binary in the environment.
-    """
-
-    def __init__(self, reason, modules=(), pip_cmd=None):
-        """Create a Venv fixture.
-
-        :param reason: A human readable string to bake into the venv
-            file path to aid diagnostics in the case of failures.
-        :param modules: A list of modules to install, defaults to latest
-            pip, wheel, and the working copy of PBR.
-        :attr pip_cmd: A list to override the default pip_cmd passed to
-            python for installing base packages.
-        """
-        self._reason = reason
-        if modules == ():
-            modules = ['pip', 'wheel', 'build', 'setuptools', PBR_ROOT]
-        self.modules = modules
-        if pip_cmd is None:
-            self.pip_cmd = ['-m', 'pip', '-v', 'install']
-        else:
-            self.pip_cmd = pip_cmd
-
-    def _setUp(self):
-        path = self.useFixture(fixtures.TempDir()).path
-        virtualenv.cli_run([path])
-
-        python = os.path.join(path, 'bin', 'python')
-        command = [python] + self.pip_cmd + ['-U']
-        if self.modules and len(self.modules) > 0:
-            command.extend(self.modules)
-            self.useFixture(base.CapturedSubprocess(
-                'mkvenv-' + self._reason, command))
-        self.addCleanup(delattr, self, 'path')
-        self.addCleanup(delattr, self, 'python')
-        self.path = path
-        self.python = python
-        return path, python
-
-
-class CreatePackages(fixtures.Fixture):
-    """Creates packages from dict with defaults
-
-        :param package_dirs: A dict of package name to directory strings
-        {'pkg_a': '/tmp/path/to/tmp/pkg_a', 'pkg_b': '/tmp/path/to/tmp/pkg_b'}
-    """
-
-    defaults = {
-        'setup.py': textwrap.dedent(u"""\
-            #!/usr/bin/env python
-            import setuptools
-            setuptools.setup(
-                setup_requires=['pbr'],
-                pbr=True,
-            )
-        """),
-        'setup.cfg': textwrap.dedent(u"""\
-            [metadata]
-            name = {pkg_name}
-        """)
-    }
-
-    def __init__(self, packages):
-        """Creates packages from dict with defaults
-
-            :param packages: a dict where the keys are the package name and a
-            value that is a second dict that may be empty, containing keys of
-            filenames and a string value of the contents.
-            {'package-a': {'requirements.txt': 'string', 'setup.cfg': 'string'}
-        """
-        self.packages = packages
-
-    def _writeFile(self, directory, file_name, contents):
-        path = os.path.abspath(os.path.join(directory, file_name))
-        path_dir = os.path.dirname(path)
-        if not os.path.exists(path_dir):
-            if path_dir.startswith(directory):
-                os.makedirs(path_dir)
-            else:
-                raise ValueError
-        with open(path, 'wt') as f:
-            f.write(contents)
-
-    def _setUp(self):
-        tmpdir = self.useFixture(fixtures.TempDir()).path
-        package_dirs = {}
-        for pkg_name in self.packages:
-            pkg_path = os.path.join(tmpdir, pkg_name)
-            package_dirs[pkg_name] = pkg_path
-            os.mkdir(pkg_path)
-            for cf in ['setup.py', 'setup.cfg']:
-                if cf in self.packages[pkg_name]:
-                    contents = self.packages[pkg_name].pop(cf)
-                else:
-                    contents = self.defaults[cf].format(pkg_name=pkg_name)
-                self._writeFile(pkg_path, cf, contents)
-
-            for cf in self.packages[pkg_name]:
-                self._writeFile(pkg_path, cf, self.packages[pkg_name][cf])
-            self.useFixture(TestRepo(pkg_path)).commit()
-        self.addCleanup(delattr, self, 'package_dirs')
-        self.package_dirs = package_dirs
-        return package_dirs
-
-
-class TestPackagingInGitRepoWithCommit(base.BaseTestCase):
-
-    scenarios = [
-        ('preversioned', dict(preversioned=True)),
-        ('postversioned', dict(preversioned=False)),
-    ]
-
-    def setUp(self):
-        super(TestPackagingInGitRepoWithCommit, self).setUp()
-        self.repo = self.useFixture(TestRepo(self.package_dir))
-        self.repo.commit()
-
-    def test_authors(self):
-        self.run_setup('sdist', allow_fail=False)
-        # One commit, something should be in the authors list
-        with open(os.path.join(self.package_dir, 'AUTHORS'), 'r') as f:
-            body = f.read()
-        self.assertNotEqual(body, '')
-
-    def test_changelog(self):
-        self.run_setup('sdist', allow_fail=False)
-        with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f:
-            body = f.read()
-        # One commit, something should be in the ChangeLog list
-        self.assertNotEqual(body, '')
-
-    def test_changelog_handles_astrisk(self):
-        self.repo.commit(message_content="Allow *.openstack.org to work")
-        self.run_setup('sdist', allow_fail=False)
-        with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f:
-            body = f.read()
-        self.assertIn(r'\*', body)
-
-    def test_changelog_handles_dead_links_in_commit(self):
-        self.repo.commit(message_content="See os_ for to_do about qemu_.")
-        self.run_setup('sdist', allow_fail=False)
-        with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f:
-            body = f.read()
-        self.assertIn(r'os\_', body)
-        self.assertIn(r'to\_do', body)
-        self.assertIn(r'qemu\_', body)
-
-    def test_changelog_handles_backticks(self):
-        self.repo.commit(message_content="Allow `openstack.org` to `work")
-        self.run_setup('sdist', allow_fail=False)
-        with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f:
-            body = f.read()
-        self.assertIn(r'\`', body)
-
-    def test_manifest_exclude_honoured(self):
-        self.run_setup('sdist', allow_fail=False)
-        with open(os.path.join(
-                self.package_dir,
-                'pbr_testpackage.egg-info/SOURCES.txt'), 'r') as f:
-            body = f.read()
-        self.assertThat(
-            body, matchers.Not(matchers.Contains('pbr_testpackage/extra.py')))
-        self.assertThat(body, matchers.Contains('pbr_testpackage/__init__.py'))
-
-    def test_install_writes_changelog(self):
-        stdout, _, _ = self.run_setup(
-            'install', '--root', self.temp_dir + 'installed',
-            allow_fail=False)
-        self.expectThat(stdout, matchers.Contains('Generating ChangeLog'))
-
-
-class TestExtrafileInstallation(base.BaseTestCase):
-    def test_install_glob(self):
-        stdout, _, _ = self.run_setup(
-            'install', '--root', self.temp_dir + 'installed',
-            allow_fail=False)
-        self.expectThat(
-            stdout, matchers.Contains('copying data_files/a.txt'))
-        self.expectThat(
-            stdout, matchers.Contains('copying data_files/b.txt'))
-
-
-class TestPackagingInGitRepoWithoutCommit(base.BaseTestCase):
-
-    def setUp(self):
-        super(TestPackagingInGitRepoWithoutCommit, self).setUp()
-        self.useFixture(TestRepo(self.package_dir))
-        self.run_setup('sdist', allow_fail=False)
-
-    def test_authors(self):
-        # No commits, no authors in list
-        with open(os.path.join(self.package_dir, 'AUTHORS'), 'r') as f:
-            body = f.read()
-        self.assertEqual('\n', body)
-
-    def test_changelog(self):
-        # No commits, nothing should be in the ChangeLog list
-        with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f:
-            body = f.read()
-        self.assertEqual('CHANGES\n=======\n\n', body)
-
-
-class TestPackagingWheels(base.BaseTestCase):
-
-    def setUp(self):
-        super(TestPackagingWheels, self).setUp()
-        self.useFixture(TestRepo(self.package_dir))
-        # Build the wheel
-        self.run_setup('bdist_wheel', allow_fail=False)
-        # Slowly construct the path to the generated whl
-        dist_dir = os.path.join(self.package_dir, 'dist')
-        relative_wheel_filename = os.listdir(dist_dir)[0]
-        absolute_wheel_filename = os.path.join(
-            dist_dir, relative_wheel_filename)
-        wheel_file = wheelfile.WheelFile(absolute_wheel_filename)
-        wheel_name = wheel_file.parsed_filename.group('namever')
-        # Create a directory path to unpack the wheel to
-        self.extracted_wheel_dir = os.path.join(dist_dir, wheel_name)
-        # Extract the wheel contents to the directory we just created
-        wheel_file.extractall(self.extracted_wheel_dir)
-        wheel_file.close()
-
-    def test_metadata_directory_has_pbr_json(self):
-        # Build the path to the scripts directory
-        pbr_json = os.path.join(
-            self.extracted_wheel_dir, 'pbr_testpackage-0.0.dist-info/pbr.json')
-        self.assertTrue(os.path.exists(pbr_json))
-
-    def test_data_directory_has_wsgi_scripts(self):
-        # Build the path to the scripts directory
-        scripts_dir = os.path.join(
-            self.extracted_wheel_dir, 'pbr_testpackage-0.0.data/scripts')
-        self.assertTrue(os.path.exists(scripts_dir))
-        scripts = os.listdir(scripts_dir)
-
-        self.assertIn('pbr_test_wsgi', scripts)
-        self.assertIn('pbr_test_wsgi_with_class', scripts)
-        self.assertNotIn('pbr_test_cmd', scripts)
-        self.assertNotIn('pbr_test_cmd_with_class', scripts)
-
-    def test_generates_c_extensions(self):
-        built_package_dir = os.path.join(
-            self.extracted_wheel_dir, 'pbr_testpackage')
-        static_object_filename = 'testext.so'
-        soabi = get_soabi()
-        if soabi:
-            static_object_filename = 'testext.{0}.so'.format(soabi)
-        static_object_path = os.path.join(
-            built_package_dir, static_object_filename)
-
-        self.assertTrue(os.path.exists(built_package_dir))
-        self.assertTrue(os.path.exists(static_object_path))
-
-
-class TestPackagingHelpers(testtools.TestCase):
-
-    def test_generate_script(self):
-        group = 'console_scripts'
-        entry_point = pkg_resources.EntryPoint(
-            name='test-ep',
-            module_name='pbr.packaging',
-            attrs=('LocalInstallScripts',))
-        header = '#!/usr/bin/env fake-header\n'
-        template = ('%(group)s %(module_name)s %(import_target)s '
-                    '%(invoke_target)s')
-
-        generated_script = packaging.generate_script(
-            group, entry_point, header, template)
-
-        expected_script = (
-            '#!/usr/bin/env fake-header\nconsole_scripts pbr.packaging '
-            'LocalInstallScripts LocalInstallScripts'
-        )
-        self.assertEqual(expected_script, generated_script)
-
-    def test_generate_script_validates_expectations(self):
-        group = 'console_scripts'
-        entry_point = pkg_resources.EntryPoint(
-            name='test-ep',
-            module_name='pbr.packaging')
-        header = '#!/usr/bin/env fake-header\n'
-        template = ('%(group)s %(module_name)s %(import_target)s '
-                    '%(invoke_target)s')
-        self.assertRaises(
-            ValueError, packaging.generate_script, group, entry_point, header,
-            template)
-
-        entry_point = pkg_resources.EntryPoint(
-            name='test-ep',
-            module_name='pbr.packaging',
-            attrs=('attr1', 'attr2', 'attr3'))
-        self.assertRaises(
-            ValueError, packaging.generate_script, group, entry_point, header,
-            template)
-
-
-class TestPackagingInPlainDirectory(base.BaseTestCase):
-
-    def setUp(self):
-        super(TestPackagingInPlainDirectory, self).setUp()
-
-    def test_authors(self):
-        self.run_setup('sdist', allow_fail=False)
-        # Not a git repo, no AUTHORS file created
-        filename = os.path.join(self.package_dir, 'AUTHORS')
-        self.assertFalse(os.path.exists(filename))
-
-    def test_changelog(self):
-        self.run_setup('sdist', allow_fail=False)
-        # Not a git repo, no ChangeLog created
-        filename = os.path.join(self.package_dir, 'ChangeLog')
-        self.assertFalse(os.path.exists(filename))
-
-    def test_install_no_ChangeLog(self):
-        stdout, _, _ = self.run_setup(
-            'install', '--root', self.temp_dir + 'installed',
-            allow_fail=False)
-        self.expectThat(
-            stdout, matchers.Not(matchers.Contains('Generating ChangeLog')))
-
-
-class TestPresenceOfGit(base.BaseTestCase):
-
-    def testGitIsInstalled(self):
-        with mock.patch.object(git,
-                               '_run_shell_command') as _command:
-            _command.return_value = 'git version 1.8.4.1'
-            self.assertEqual(True, git._git_is_installed())
-
-    def testGitIsNotInstalled(self):
-        with mock.patch.object(git,
-                               '_run_shell_command') as _command:
-            _command.side_effect = OSError
-            self.assertEqual(False, git._git_is_installed())
+if sys.version_info >= (3, 3):
+    from unittest import mock
+else:
+    import mock  # noqa
 
 
 class ParseRequirementsTest(base.BaseTestCase):
@@ -520,8 +76,9 @@ class ParseRequirementsTest(base.BaseTes
         # the defaults are relative to where pbr is called from so we need to
         # override them. This is OK, however, as we want to validate that
         # defaults are used - not what those defaults are
-        with mock.patch.object(packaging, 'REQUIREMENTS_FILES', (
-                requirements,)):
+        with mock.patch.object(
+            packaging, 'REQUIREMENTS_FILES', (requirements,)
+        ):
             result = packaging.parse_requirements()
         self.assertEqual(['pbr'], result)
 
@@ -531,19 +88,20 @@ class ParseRequirementsTest(base.BaseTes
         with open(tmp_file, 'w') as fh:
             fh.write("foo\nbar")
         self.useFixture(
-            fixtures.EnvironmentVariable('PBR_REQUIREMENTS_FILES', tmp_file))
-        self.assertEqual(['foo', 'bar'],
-                         packaging.parse_requirements())
+            fixtures.EnvironmentVariable('PBR_REQUIREMENTS_FILES', tmp_file)
+        )
+        self.assertEqual(['foo', 'bar'], packaging.parse_requirements())
 
     def test_override_with_env_multiple_files(self):
         _, tmp_file = tempfile.mkstemp(prefix='openstack', suffix='.setup')
         with open(tmp_file, 'w') as fh:
             fh.write("foo\nbar")
         self.useFixture(
-            fixtures.EnvironmentVariable('PBR_REQUIREMENTS_FILES',
-                                         "no-such-file," + tmp_file))
-        self.assertEqual(['foo', 'bar'],
-                         packaging.parse_requirements())
+            fixtures.EnvironmentVariable(
+                'PBR_REQUIREMENTS_FILES', "no-such-file," + tmp_file
+            )
+        )
+        self.assertEqual(['foo', 'bar'], packaging.parse_requirements())
 
     def test_index_present(self):
         tempdir = tempfile.mkdtemp()
@@ -573,87 +131,111 @@ class ParseRequirementsTestScenarios(bas
 
     versioned_scenarios = [
         ('non-versioned', {'versioned': False, 'expected': ['bar']}),
-        ('versioned', {'versioned': True, 'expected': ['bar>=1.2.3']})
+        ('versioned', {'versioned': True, 'expected': ['bar>=1.2.3']}),
     ]
 
     subdirectory_scenarios = [
         ('non-subdirectory', {'has_subdirectory': False}),
-        ('has-subdirectory', {'has_subdirectory': True})
+        ('has-subdirectory', {'has_subdirectory': True}),
     ]
 
     scenarios = [
         ('normal', {'url': "foo\nbar", 'expected': ['foo', 'bar']}),
-        ('normal_with_comments', {
-            'url': "# this is a comment\nfoo\n# and another one\nbar",
-            'expected': ['foo', 'bar']}),
+        (
+            'normal_with_comments',
+            {
+                'url': "# this is a comment\nfoo\n# and another one\nbar",
+                'expected': ['foo', 'bar'],
+            },
+        ),
         ('removes_index_lines', {'url': '-f foobar', 'expected': []}),
     ]
 
-    scenarios = scenarios + testscenarios.multiply_scenarios([
-        ('ssh_egg_url', {'url': 'git+ssh://foo.com/zipball#egg=bar'}),
-        ('git_https_egg_url', {'url': 'git+https://foo.com/zipball#egg=bar'}),
-        ('http_egg_url', {'url': 'https://foo.com/zipball#egg=bar'}),
-    ], versioned_scenarios, subdirectory_scenarios)
+    scenarios = scenarios + testscenarios.multiply_scenarios(
+        [
+            ('ssh_egg_url', {'url': 'git+ssh://foo.com/zipball#egg=bar'}),
+            (
+                'git_https_egg_url',
+                {'url': 'git+https://foo.com/zipball#egg=bar'},
+            ),
+            ('http_egg_url', {'url': 'https://foo.com/zipball#egg=bar'}),
+        ],
+        versioned_scenarios,
+        subdirectory_scenarios,
+    )
 
     scenarios = scenarios + testscenarios.multiply_scenarios(
         [
-            ('git_egg_url',
-                {'url': 'git://foo.com/zipball#egg=bar', 'name': 'bar'})
-        ], [
+            (
+                'git_egg_url',
+                {'url': 'git://foo.com/zipball#egg=bar', 'name': 'bar'},
+            )
+        ],
+        [
             ('non-editable', {'editable': False}),
             ('editable', {'editable': True}),
         ],
-        versioned_scenarios, subdirectory_scenarios)
+        versioned_scenarios,
+        subdirectory_scenarios,
+    )
 
     def test_parse_requirements(self):
         tmp_file = tempfile.NamedTemporaryFile()
         req_string = self.url
         if hasattr(self, 'editable') and self.editable:
-            req_string = ("-e %s" % req_string)
+            req_string = "-e %s" % req_string
         if hasattr(self, 'versioned') and self.versioned:
-            req_string = ("%s-1.2.3" % req_string)
+            req_string = "%s-1.2.3" % req_string
         if hasattr(self, 'has_subdirectory') and self.has_subdirectory:
-            req_string = ("%s&subdirectory=baz" % req_string)
+            req_string = "%s&subdirectory=baz" % req_string
         with open(tmp_file.name, 'w') as fh:
             fh.write(req_string)
-        self.assertEqual(self.expected,
-                         packaging.parse_requirements([tmp_file.name]))
+        self.assertEqual(
+            self.expected, packaging.parse_requirements([tmp_file.name])
+        )
 
 
 class ParseDependencyLinksTest(base.BaseTestCase):
 
     def setUp(self):
         super(ParseDependencyLinksTest, self).setUp()
-        _, self.tmp_file = tempfile.mkstemp(prefix="openstack",
-                                            suffix=".setup")
+        _, self.tmp_file = tempfile.mkstemp(
+            prefix="openstack", suffix=".setup"
+        )
 
     def test_parse_dependency_normal(self):
         with open(self.tmp_file, "w") as fh:
             fh.write("http://test.com\n")
         self.assertEqual(
             ["http://test.com"],
-            packaging.parse_dependency_links([self.tmp_file]))
+            packaging.parse_dependency_links([self.tmp_file]),
+        )
 
     def test_parse_dependency_with_git_egg_url(self):
         with open(self.tmp_file, "w") as fh:
             fh.write("-e git://foo.com/zipball#egg=bar")
         self.assertEqual(
             ["git://foo.com/zipball#egg=bar"],
-            packaging.parse_dependency_links([self.tmp_file]))
+            packaging.parse_dependency_links([self.tmp_file]),
+        )
 
 
 class TestVersions(base.BaseTestCase):
 
     scenarios = [
-        ('preversioned', dict(preversioned=True)),
-        ('postversioned', dict(preversioned=False)),
+        ('preversioned', {'preversioned': True}),
+        ('postversioned', {'preversioned': False}),
     ]
 
     def setUp(self):
         super(TestVersions, self).setUp()
-        self.repo = self.useFixture(TestRepo(self.package_dir))
-        self.useFixture(GPGKeyFixture())
-        self.useFixture(base.DiveDir(self.package_dir))
+        self.repo = self.useFixture(pbr_fixtures.GitRepo(self.package_dir))
+        self.useFixture(pbr_fixtures.GPGKey())
+        self.useFixture(pbr_fixtures.Chdir(self.package_dir))
+
+    def tearDown(self):
+        super(TestVersions, self).tearDown()
+        os.environ.pop('SKIP_WRITE_GIT_CHANGELOG', None)
 
     def test_email_parsing_errors_are_handled(self):
         mocked_open = mock.mock_open()
@@ -661,9 +243,11 @@ class TestVersions(base.BaseTestCase):
             with mock.patch('email.message_from_file') as message_from_file:
                 message_from_file.side_effect = [
                     email.errors.MessageError('Test'),
-                    {'Name': 'pbr_testpackage'}]
+                    {'Name': 'pbr_testpackage'},
+                ]
                 version = packaging._get_version_from_pkg_metadata(
-                    'pbr_testpackage')
+                    'pbr_testpackage'
+                )
 
         self.assertTrue(message_from_file.called)
         self.assertIsNone(version)
@@ -713,12 +297,7 @@ class TestVersions(base.BaseTestCase):
     def test_leading_space_multiline(self):
         self.repo.commit()
         self.repo.tag('1.2.3')
-        self.repo.commit(
-            (
-                '   Some cool text\n'
-                '   sem-ver: api-break'
-            )
-        )
+        self.repo.commit(('   Some cool text\n   sem-ver: api-break'))
         version = packaging._get_version_from_git()
         self.assertThat(version, matchers.StartsWith('2.0.0.dev1'))
 
@@ -813,7 +392,8 @@ class TestVersions(base.BaseTestCase):
         # Note that we can't target 1.2.3 anymore - with 1.2.3 released we
         # need to be working on 1.2.4.
         err = self.assertRaises(
-            ValueError, packaging._get_version_from_git, '1.2.3')
+            ValueError, packaging._get_version_from_git, '1.2.3'
+        )
         self.assertThat(err.args[0], matchers.StartsWith('git history'))
 
     def test_preversion_too_low_semver_headers(self):
@@ -825,7 +405,8 @@ class TestVersions(base.BaseTestCase):
         # Note that we can't target 1.2.4, the feature header means we need
         # to be working on 1.3.0 or above.
         err = self.assertRaises(
-            ValueError, packaging._get_version_from_git, '1.2.4')
+            ValueError, packaging._get_version_from_git, '1.2.4'
+        )
         self.assertThat(err.args[0], matchers.StartsWith('git history'))
 
     def test_get_kwargs_corner_cases(self):
@@ -837,19 +418,20 @@ class TestVersions(base.BaseTestCase):
 
         def _check_combinations(tag):
             self.repo.commit()
-            self.assertEqual(dict(), get_kwargs(tag))
+            self.assertEqual({}, get_kwargs(tag))
             self.repo.commit('sem-ver: bugfix')
-            self.assertEqual(dict(), get_kwargs(tag))
+            self.assertEqual({}, get_kwargs(tag))
             self.repo.commit('sem-ver: feature')
-            self.assertEqual(dict(minor=True), get_kwargs(tag))
+            self.assertEqual({'minor': True}, get_kwargs(tag))
             self.repo.uncommit()
             self.repo.commit('sem-ver: deprecation')
-            self.assertEqual(dict(minor=True), get_kwargs(tag))
+            self.assertEqual({'minor': True}, get_kwargs(tag))
             self.repo.uncommit()
             self.repo.commit('sem-ver: api-break')
-            self.assertEqual(dict(major=True), get_kwargs(tag))
+            self.assertEqual({'major': True}, get_kwargs(tag))
             self.repo.commit('sem-ver: deprecation')
-            self.assertEqual(dict(major=True, minor=True), get_kwargs(tag))
+            self.assertEqual({'major': True, 'minor': True}, get_kwargs(tag))
+
         _check_combinations('')
         self.repo.tag('1.2.3')
         _check_combinations('1.2.3')
@@ -914,165 +496,41 @@ class TestVersions(base.BaseTestCase):
         version = packaging._get_version_from_git('1.2.3')
         self.assertEqual('1.2.3', version)
 
-    def tearDown(self):
-        super(TestVersions, self).tearDown()
-        os.environ.pop('SKIP_WRITE_GIT_CHANGELOG', None)
-
-
-class TestRequirementParsing(base.BaseTestCase):
-
-    def test_requirement_parsing(self):
-        pkgs = {
-            'test_reqparse':
-                {
-                    'requirements.txt': textwrap.dedent("""\
-                        bar
-                        quux<1.0; python_version=='2.6'
-                        requests-aws>=0.1.4    # BSD License (3 clause)
-                        Routes>=1.12.3,!=2.0,!=2.1;python_version=='2.7'
-                        requests-kerberos>=0.6;python_version=='2.7' # MIT
-                    """),
-                    'setup.cfg': textwrap.dedent("""\
-                        [metadata]
-                        name = test_reqparse
-
-                        [extras]
-                        test =
-                            foo
-                            baz>3.2 :python_version=='2.7' # MIT
-                            bar>3.3 :python_version=='2.7' # MIT # Apache
-                    """)},
-        }
-        pkg_dirs = self.useFixture(CreatePackages(pkgs)).package_dirs
-        pkg_dir = pkg_dirs['test_reqparse']
-        # pkg_resources.split_sections uses None as the title of an
-        # anonymous section instead of the empty string. Weird.
-        expected_requirements = {
-            None: ['bar', 'requests-aws>=0.1.4'],
-            ":(python_version=='2.6')": ['quux<1.0'],
-            ":(python_version=='2.7')": ['Routes!=2.0,!=2.1,>=1.12.3',
-                                         'requests-kerberos>=0.6'],
-            'test': ['foo'],
-            "test:(python_version=='2.7')": ['baz>3.2', 'bar>3.3']
-        }
-        venv = self.useFixture(Venv('reqParse'))
-        bin_python = venv.python
-        # Two things are tested by this
-        # 1) pbr properly parses markers from requiremnts.txt and setup.cfg
-        # 2) bdist_wheel causes pbr to not evaluate markers
-        self._run_cmd(bin_python, ('setup.py', 'bdist_wheel'),
-                      allow_fail=False, cwd=pkg_dir)
-        egg_info = os.path.join(pkg_dir, 'test_reqparse.egg-info')
-
-        requires_txt = os.path.join(egg_info, 'requires.txt')
-        with open(requires_txt, 'rt') as requires:
-            generated_requirements = dict(
-                pkg_resources.split_sections(requires))
-
-        # NOTE(dhellmann): We have to spell out the comparison because
-        # the rendering for version specifiers in a range is not
-        # consistent across versions of setuptools.
-
-        for section, expected in expected_requirements.items():
-            exp_parsed = [
-                pkg_resources.Requirement.parse(s)
-                for s in expected
-            ]
-            gen_parsed = [
-                pkg_resources.Requirement.parse(s)
-                for s in generated_requirements[section]
-            ]
-            self.assertEqual(exp_parsed, gen_parsed)
-
-
-class TestPEP517Support(base.BaseTestCase):
-    def test_pep_517_support(self):
-        # Note that the current PBR PEP517 entrypoints rely on a valid
-        # PBR setup.py existing.
-        pkgs = {
-            'test_pep517':
-                {
-                    'requirements.txt': textwrap.dedent("""\
-                        sphinx
-                        iso8601
-                    """),
-                    # Override default setup.py to remove setup_requires.
-                    'setup.py': textwrap.dedent("""\
-                        #!/usr/bin/env python
-                        import setuptools
-                        setuptools.setup(pbr=True)
-                    """),
-                    'setup.cfg': textwrap.dedent("""\
-                        [metadata]
-                        name = test_pep517
-                        summary = A tiny test project
-                        author = PBR Team
-                        author_email = foo@example.com
-                        home_page = https://example.com/
-                        classifier =
-                            Intended Audience :: Information Technology
-                            Intended Audience :: System Administrators
-                            License :: OSI Approved :: Apache Software License
-                            Operating System :: POSIX :: Linux
-                            Programming Language :: Python
-                            Programming Language :: Python :: 2
-                            Programming Language :: Python :: 2.7
-                            Programming Language :: Python :: 3
-                            Programming Language :: Python :: 3.6
-                            Programming Language :: Python :: 3.7
-                            Programming Language :: Python :: 3.8
-                    """),
-                    # note that we use 36.6.0 rather than 64.0.0 since the
-                    # latter doesn't support Python < 3.8 and we run our tests
-                    # against Python 2.7 still. That's okay since we're not
-                    # testing PEP-660 functionality here (which requires the
-                    # newer setuptools)
-                    'pyproject.toml': textwrap.dedent("""\
-                        [build-system]
-                        requires = ["pbr", "setuptools>=36.6.0", "wheel"]
-                        build-backend = "pbr.build"
-                    """)},
-        }
-        pkg_dirs = self.useFixture(CreatePackages(pkgs)).package_dirs
-        pkg_dir = pkg_dirs['test_pep517']
-        venv = self.useFixture(Venv('PEP517'))
-
-        # Test building sdists and wheels works. Note we do not use pip here
-        # because pip will forcefully install the latest version of PBR on
-        # pypi to satisfy the build-system requires. This means we can't self
-        # test changes using pip. Build with --no-isolation appears to avoid
-        # this problem.
-        self._run_cmd(venv.python, ('-m', 'build', '--no-isolation', '.'),
-                      allow_fail=False, cwd=pkg_dir)
-
 
 class TestRepositoryURLDependencies(base.BaseTestCase):
 
     def setUp(self):
         super(TestRepositoryURLDependencies, self).setUp()
-        self.requirements = os.path.join(tempfile.mkdtemp(),
-                                         'requirements.txt')
+        self.requirements = os.path.join(
+            tempfile.mkdtemp(), 'requirements.txt'
+        )
         with open(self.requirements, 'w') as f:
-            f.write('\n'.join([
-                '-e git+git://git.pro-ject.org/oslo.messaging#egg=oslo.messaging-1.0.0-rc',  # noqa
-                '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize',  # noqa
-                '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize-beta',  # noqa
-                '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta',  # noqa
-                '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-4.0.1',  # noqa
-                '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-1.0.0-alpha.beta.1',  # noqa
-                '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay',  # noqa
-                '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-2.0.0-rc.1+build.123',  # noqa
-                '-e git+git://git.project.org/Proj#egg=Proj1',
-                'git+https://git.project.org/Proj#egg=Proj2-0.0.1',
-                '-e git+ssh://git.project.org/Proj#egg=Proj3',
-                'svn+svn://svn.project.org/svn/Proj#egg=Proj4-0.0.2',
-                '-e svn+http://svn.project.org/svn/Proj/trunk@2019#egg=Proj5',
-                'hg+http://hg.project.org/Proj@da39a3ee5e6b#egg=Proj-0.0.3',
-                '-e hg+http://hg.project.org/Proj@2019#egg=Proj',
-                'hg+http://hg.project.org/Proj@v1.0#egg=Proj-0.0.4',
-                '-e hg+http://hg.project.org/Proj@special_feature#egg=Proj',
-                'git://foo.com/zipball#egg=foo-bar-1.2.4',
-                'pypi-proj1', 'pypi-proj2']))
+            f.write(
+                '\n'.join(
+                    [
+                        '-e git+git://git.pro-ject.org/oslo.messaging#egg=oslo.messaging-1.0.0-rc',  # noqa
+                        '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize',  # noqa
+                        '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize-beta',  # noqa
+                        '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta',  # noqa
+                        '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-4.0.1',  # noqa
+                        '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-1.0.0-alpha.beta.1',  # noqa
+                        '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay',  # noqa
+                        '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-2.0.0-rc.1+build.123',  # noqa
+                        '-e git+git://git.project.org/Proj#egg=Proj1',
+                        'git+https://git.project.org/Proj#egg=Proj2-0.0.1',
+                        '-e git+ssh://git.project.org/Proj#egg=Proj3',
+                        'svn+svn://svn.project.org/svn/Proj#egg=Proj4-0.0.2',
+                        '-e svn+http://svn.project.org/svn/Proj/trunk@2019#egg=Proj5',
+                        'hg+http://hg.project.org/Proj@da39a3ee5e6b#egg=Proj-0.0.3',
+                        '-e hg+http://hg.project.org/Proj@2019#egg=Proj',
+                        'hg+http://hg.project.org/Proj@v1.0#egg=Proj-0.0.4',
+                        '-e hg+http://hg.project.org/Proj@special_feature#egg=Proj',
+                        'git://foo.com/zipball#egg=foo-bar-1.2.4',
+                        'pypi-proj1',
+                        'pypi-proj2',
+                    ]
+                )
+            )
 
     def test_egg_fragment(self):
         expected = [
@@ -1174,66 +632,61 @@ class TestRepositoryURLDependencies(base
             'egg=foo-bar-1.2.4',
         ]
         for index, test in enumerate(tests):
-            self.assertEqual(expected[index],
-                             re.sub(r'egg=([^&]+).*$',
-                                    packaging.egg_fragment,
-                                    test))
+            self.assertEqual(
+                expected[index],
+                re.sub(r'egg=([^&]+).*$', packaging.egg_fragment, test),
+            )
 
     def test_parse_repo_url_requirements(self):
         result = packaging.parse_requirements([self.requirements])
-        self.assertEqual(['oslo.messaging>=1.0.0-rc',
-                          'django-thumborize',
-                          'django-thumborize-beta',
-                          'django-thumborize2-beta',
-                          'django-thumborize2-beta>=4.0.1',
-                          'django-thumborize2-beta>=1.0.0-alpha.beta.1',
-                          'django-thumborize2-beta>=1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay',  # noqa
-                          'django-thumborize2-beta>=2.0.0-rc.1+build.123',
-                          'Proj1', 'Proj2>=0.0.1', 'Proj3',
-                          'Proj4>=0.0.2', 'Proj5', 'Proj>=0.0.3',
-                          'Proj', 'Proj>=0.0.4', 'Proj',
-                          'foo-bar>=1.2.4', 'pypi-proj1',
-                          'pypi-proj2'], result)
+        self.assertEqual(
+            [
+                'oslo.messaging>=1.0.0-rc',
+                'django-thumborize',
+                'django-thumborize-beta',
+                'django-thumborize2-beta',
+                'django-thumborize2-beta>=4.0.1',
+                'django-thumborize2-beta>=1.0.0-alpha.beta.1',
+                'django-thumborize2-beta>=1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay',  # noqa
+                'django-thumborize2-beta>=2.0.0-rc.1+build.123',
+                'Proj1',
+                'Proj2>=0.0.1',
+                'Proj3',
+                'Proj4>=0.0.2',
+                'Proj5',
+                'Proj>=0.0.3',
+                'Proj',
+                'Proj>=0.0.4',
+                'Proj',
+                'foo-bar>=1.2.4',
+                'pypi-proj1',
+                'pypi-proj2',
+            ],
+            result,
+        )
 
     def test_parse_repo_url_dependency_links(self):
         result = packaging.parse_dependency_links([self.requirements])
         self.assertEqual(
             [
-             'git+git://git.pro-ject.org/oslo.messaging#egg=oslo.messaging-1.0.0-rc',  # noqa
-             'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize',  # noqa
-             'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize-beta',  # noqa
-             'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta',  # noqa
-             'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-4.0.1',  # noqa
-             'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-1.0.0-alpha.beta.1',  # noqa
-             'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay',  # noqa
-             'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-2.0.0-rc.1+build.123',  # noqa
-             'git+git://git.project.org/Proj#egg=Proj1',
-             'git+https://git.project.org/Proj#egg=Proj2-0.0.1',
-             'git+ssh://git.project.org/Proj#egg=Proj3',
-             'svn+svn://svn.project.org/svn/Proj#egg=Proj4-0.0.2',
-             'svn+http://svn.project.org/svn/Proj/trunk@2019#egg=Proj5',
-             'hg+http://hg.project.org/Proj@da39a3ee5e6b#egg=Proj-0.0.3',
-             'hg+http://hg.project.org/Proj@2019#egg=Proj',
-             'hg+http://hg.project.org/Proj@v1.0#egg=Proj-0.0.4',
-             'hg+http://hg.project.org/Proj@special_feature#egg=Proj',
-             'git://foo.com/zipball#egg=foo-bar-1.2.4'], result)
-
-
-def get_soabi():
-    soabi = None
-    try:
-        soabi = sysconfig.get_config_var('SOABI')
-        arch = sysconfig.get_config_var('MULTIARCH')
-    except IOError:
-        pass
-    if soabi and arch and 'pypy' in sysconfig.get_scheme_names():
-        soabi = '%s-%s' % (soabi, arch)
-    if soabi is None and 'pypy' in sysconfig.get_scheme_names():
-        # NOTE(sigmavirus24): PyPy only added support for the SOABI config var
-        # to sysconfig in 2015. That was well after 2.2.1 was published in the
-        # Ubuntu 14.04 archive.
-        for suffix in get_suffixes():
-            if suffix.startswith('.pypy') and suffix.endswith('.so'):
-                soabi = suffix.split('.')[1]
-                break
-    return soabi
+                'git+git://git.pro-ject.org/oslo.messaging#egg=oslo.messaging-1.0.0-rc',  # noqa
+                'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize',  # noqa
+                'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize-beta',  # noqa
+                'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta',  # noqa
+                'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-4.0.1',  # noqa
+                'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-1.0.0-alpha.beta.1',  # noqa
+                'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay',  # noqa
+                'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-2.0.0-rc.1+build.123',  # noqa
+                'git+git://git.project.org/Proj#egg=Proj1',
+                'git+https://git.project.org/Proj#egg=Proj2-0.0.1',
+                'git+ssh://git.project.org/Proj#egg=Proj3',
+                'svn+svn://svn.project.org/svn/Proj#egg=Proj4-0.0.2',
+                'svn+http://svn.project.org/svn/Proj/trunk@2019#egg=Proj5',
+                'hg+http://hg.project.org/Proj@da39a3ee5e6b#egg=Proj-0.0.3',
+                'hg+http://hg.project.org/Proj@2019#egg=Proj',
+                'hg+http://hg.project.org/Proj@v1.0#egg=Proj-0.0.4',
+                'hg+http://hg.project.org/Proj@special_feature#egg=Proj',
+                'git://foo.com/zipball#egg=foo-bar-1.2.4',
+            ],
+            result,
+        )
diff -pruN 6.1.1-2/pbr/tests/test_pbr_json.py 7.0.1-2/pbr/tests/test_pbr_json.py
--- 6.1.1-2/pbr/tests/test_pbr_json.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/tests/test_pbr_json.py	2025-08-14 16:07:35.000000000 +0000
@@ -10,14 +10,19 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
-try:
-    from unittest import mock
-except ImportError:
-    import mock
+from __future__ import absolute_import
+from __future__ import print_function
+
+import sys
 
 from pbr import pbr_json
 from pbr.tests import base
 
+if sys.version_info >= (3, 3):
+    from unittest import mock
+else:
+    import mock  # noqa
+
 
 class TestJsonContent(base.BaseTestCase):
     @mock.patch('pbr.git._run_git_functions', return_value=True)
@@ -27,7 +32,5 @@ class TestJsonContent(base.BaseTestCase)
         cmd = mock.Mock()
         pbr_json.write_pbr_json(cmd, "basename", "pbr.json")
         cmd.write_file.assert_called_once_with(
-            'pbr',
-            'pbr.json',
-            '{"git_version": "123456", "is_release": true}'
+            'pbr', 'pbr.json', '{"git_version": "123456", "is_release": true}'
         )
diff -pruN 6.1.1-2/pbr/tests/test_setup.py 7.0.1-2/pbr/tests/test_setup.py
--- 6.1.1-2/pbr/tests/test_setup.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/tests/test_setup.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,222 +0,0 @@
-# Copyright (c) 2011 OpenStack Foundation
-# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from __future__ import print_function
-
-import os
-
-try:
-    import cStringIO as io
-    BytesIO = io.StringIO
-except ImportError:
-    import io
-    BytesIO = io.BytesIO
-
-import fixtures
-
-from pbr import git
-from pbr import options
-from pbr.tests import base
-
-
-class SkipFileWrites(base.BaseTestCase):
-
-    scenarios = [
-        ('changelog_option_true',
-         dict(option_key='skip_changelog', option_value='True',
-              env_key='SKIP_WRITE_GIT_CHANGELOG', env_value=None,
-              pkg_func=git.write_git_changelog, filename='ChangeLog')),
-        ('changelog_option_false',
-         dict(option_key='skip_changelog', option_value='False',
-              env_key='SKIP_WRITE_GIT_CHANGELOG', env_value=None,
-              pkg_func=git.write_git_changelog, filename='ChangeLog')),
-        ('changelog_env_true',
-         dict(option_key='skip_changelog', option_value='False',
-              env_key='SKIP_WRITE_GIT_CHANGELOG', env_value='True',
-              pkg_func=git.write_git_changelog, filename='ChangeLog')),
-        ('changelog_both_true',
-         dict(option_key='skip_changelog', option_value='True',
-              env_key='SKIP_WRITE_GIT_CHANGELOG', env_value='True',
-              pkg_func=git.write_git_changelog, filename='ChangeLog')),
-        ('authors_option_true',
-         dict(option_key='skip_authors', option_value='True',
-              env_key='SKIP_GENERATE_AUTHORS', env_value=None,
-              pkg_func=git.generate_authors, filename='AUTHORS')),
-        ('authors_option_false',
-         dict(option_key='skip_authors', option_value='False',
-              env_key='SKIP_GENERATE_AUTHORS', env_value=None,
-              pkg_func=git.generate_authors, filename='AUTHORS')),
-        ('authors_env_true',
-         dict(option_key='skip_authors', option_value='False',
-              env_key='SKIP_GENERATE_AUTHORS', env_value='True',
-              pkg_func=git.generate_authors, filename='AUTHORS')),
-        ('authors_both_true',
-         dict(option_key='skip_authors', option_value='True',
-              env_key='SKIP_GENERATE_AUTHORS', env_value='True',
-              pkg_func=git.generate_authors, filename='AUTHORS')),
-    ]
-
-    def setUp(self):
-        super(SkipFileWrites, self).setUp()
-        self.temp_path = self.useFixture(fixtures.TempDir()).path
-        self.root_dir = os.path.abspath(os.path.curdir)
-        self.git_dir = os.path.join(self.root_dir, ".git")
-        if not os.path.exists(self.git_dir):
-            self.skipTest("%s is missing; skipping git-related checks"
-                          % self.git_dir)
-            return
-        self.filename = os.path.join(self.temp_path, self.filename)
-        self.option_dict = dict()
-        if self.option_key is not None:
-            self.option_dict[self.option_key] = ('setup.cfg',
-                                                 self.option_value)
-        self.useFixture(
-            fixtures.EnvironmentVariable(self.env_key, self.env_value))
-
-    def test_skip(self):
-        self.pkg_func(git_dir=self.git_dir,
-                      dest_dir=self.temp_path,
-                      option_dict=self.option_dict)
-        self.assertEqual(
-            not os.path.exists(self.filename),
-            (self.option_value.lower() in options.TRUE_VALUES or
-             self.env_value is not None))
-
-
-_changelog_content = """7780758\x00Break parser\x00 (tag: refs/tags/1_foo.1)
-04316fe\x00Make python\x00 (refs/heads/review/monty_taylor/27519)
-378261a\x00Add an integration test script.\x00
-3c373ac\x00Merge "Lib\x00 (HEAD, tag: refs/tags/2013.2.rc2, tag: refs/tags/2013.2, refs/heads/mile-proposed)
-182feb3\x00Fix pip invocation for old versions of pip.\x00 (tag: refs/tags/0.5.17)
-fa4f46e\x00Remove explicit depend on distribute.\x00 (tag: refs/tags/0.5.16)
-d1c53dd\x00Use pip instead of easy_install for installation.\x00
-a793ea1\x00Merge "Skip git-checkout related tests when .git is missing"\x00
-6c27ce7\x00Skip git-checkout related tests when .git is missing\x00
-451e513\x00Bug fix: create_stack() fails when waiting\x00
-4c8cfe4\x00Improve test coverage: network delete API\x00 (tag: refs/tags/(evil))
-d7e6167\x00Bug fix: Fix pass thru filtering in list_networks\x00 (tag: refs/tags/ev()il)
-c47ec15\x00Consider 'in-use' a non-pending volume for caching\x00 (tag: refs/tags/ev)il)
-8696fbd\x00Improve test coverage: private extension API\x00 (tag: refs/tags/ev(il)
-f0440f8\x00Improve test coverage: hypervisor list\x00 (tag: refs/tags/e(vi)l)
-04984a5\x00Refactor hooks file.\x00 (HEAD, tag: 0.6.7,b, tag: refs/tags/(12), refs/heads/master)
-a65e8ee\x00Remove jinja pin.\x00 (tag: refs/tags/0.5.14, tag: refs/tags/0.5.13)
-"""  # noqa
-
-
-def _make_old_git_changelog_format(line):
-    """Convert post-1.8.1 git log format to pre-1.8.1 git log format"""
-
-    if not line.strip():
-        return line
-    sha, msg, refname = line.split('\x00')
-    refname = refname.replace('tag: ', '')
-    return '\x00'.join((sha, msg, refname))
-
-
-_old_git_changelog_content = '\n'.join(
-    _make_old_git_changelog_format(line)
-    for line in _changelog_content.split('\n'))
-
-
-class GitLogsTest(base.BaseTestCase):
-
-    scenarios = [
-        ('pre1.8.3', {'changelog': _old_git_changelog_content}),
-        ('post1.8.3', {'changelog': _changelog_content}),
-    ]
-
-    def setUp(self):
-        super(GitLogsTest, self).setUp()
-        self.temp_path = self.useFixture(fixtures.TempDir()).path
-        self.root_dir = os.path.abspath(os.path.curdir)
-        self.git_dir = os.path.join(self.root_dir, ".git")
-        self.useFixture(
-            fixtures.EnvironmentVariable('SKIP_GENERATE_AUTHORS'))
-        self.useFixture(
-            fixtures.EnvironmentVariable('SKIP_WRITE_GIT_CHANGELOG'))
-
-    def test_write_git_changelog(self):
-        self.useFixture(fixtures.FakePopen(lambda _: {
-            "stdout": BytesIO(self.changelog.encode('utf-8'))
-        }))
-
-        git.write_git_changelog(git_dir=self.git_dir,
-                                dest_dir=self.temp_path)
-
-        with open(os.path.join(self.temp_path, "ChangeLog"), "r") as ch_fh:
-            changelog_contents = ch_fh.read()
-            self.assertIn("2013.2", changelog_contents)
-            self.assertIn("0.5.17", changelog_contents)
-            self.assertIn("------", changelog_contents)
-            self.assertIn("Refactor hooks file", changelog_contents)
-            self.assertIn(
-                r"Bug fix: create\_stack() fails when waiting",
-                changelog_contents)
-            self.assertNotIn("Refactor hooks file.", changelog_contents)
-            self.assertNotIn("182feb3", changelog_contents)
-            self.assertNotIn("review/monty_taylor/27519", changelog_contents)
-            self.assertNotIn("0.5.13", changelog_contents)
-            self.assertNotIn("0.6.7", changelog_contents)
-            self.assertNotIn("12", changelog_contents)
-            self.assertNotIn("(evil)", changelog_contents)
-            self.assertNotIn("ev()il", changelog_contents)
-            self.assertNotIn("ev(il", changelog_contents)
-            self.assertNotIn("ev)il", changelog_contents)
-            self.assertNotIn("e(vi)l", changelog_contents)
-            self.assertNotIn('Merge "', changelog_contents)
-            self.assertNotIn(r'1\_foo.1', changelog_contents)
-
-    def test_generate_authors(self):
-        author_old = u"Foo Foo <email@foo.com>"
-        author_new = u"Bar Bar <email@bar.com>"
-        co_author = u"Foo Bar <foo@bar.com>"
-        co_author_by = u"Co-authored-by: " + co_author
-
-        git_log_cmd = (
-            "git --git-dir=%s log --format=%%aN <%%aE>"
-            % self.git_dir)
-        git_co_log_cmd = ("git --git-dir=%s log" % self.git_dir)
-        git_top_level = "git rev-parse --show-toplevel"
-        cmd_map = {
-            git_log_cmd: author_new,
-            git_co_log_cmd: co_author_by,
-            git_top_level: self.root_dir,
-        }
-
-        exist_files = [self.git_dir,
-                       os.path.join(self.temp_path, "AUTHORS.in")]
-        self.useFixture(fixtures.MonkeyPatch(
-            "os.path.exists",
-            lambda path: os.path.abspath(path) in exist_files))
-
-        def _fake_run_shell_command(cmd, **kwargs):
-            return cmd_map[" ".join(cmd)]
-
-        self.useFixture(fixtures.MonkeyPatch(
-            "pbr.git._run_shell_command",
-            _fake_run_shell_command))
-
-        with open(os.path.join(self.temp_path, "AUTHORS.in"), "w") as auth_fh:
-            auth_fh.write("%s\n" % author_old)
-
-        git.generate_authors(git_dir=self.git_dir,
-                             dest_dir=self.temp_path)
-
-        with open(os.path.join(self.temp_path, "AUTHORS"), "r") as auth_fh:
-            authors = auth_fh.read()
-            self.assertIn(author_old, authors)
-            self.assertIn(author_new, authors)
-            self.assertIn(co_author, authors)
diff -pruN 6.1.1-2/pbr/tests/test_util.py 7.0.1-2/pbr/tests/test_util.py
--- 6.1.1-2/pbr/tests/test_util.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/tests/test_util.py	2025-08-14 16:07:35.000000000 +0000
@@ -13,16 +13,16 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
-try:
-    import configparser
-except ImportError:
-    import ConfigParser as configparser
+from __future__ import absolute_import
+from __future__ import print_function
+
 import io
+import os
 import tempfile
 import textwrap
+import warnings
 
-import sys
-
+from pbr._compat.five import ConfigParser
 from pbr.tests import base
 from pbr import util
 
@@ -30,12 +30,8 @@ from pbr import util
 def config_from_ini(ini):
     config = {}
     ini = textwrap.dedent(ini)
-    if sys.version_info >= (3, 2):
-        parser = configparser.ConfigParser()
-        parser.read_file(io.StringIO(ini))
-    else:
-        parser = configparser.SafeConfigParser()
-        parser.readfp(io.StringIO(ini))
+    parser = ConfigParser()
+    parser.read_file(io.StringIO(ini))
     for section in parser.sections():
         config[section] = dict(parser.items(section))
     return config
@@ -95,6 +91,14 @@ class TestBasics(base.BaseTestCase):
                 scripts/hello-world.py
             modules =
                 mod1
+
+            [backwards_compat]
+            zip_safe = true
+            tests_require =
+              fixtures
+            dependency_links =
+              https://example.com/mypackage/v1.2.3.zip#egg=mypackage-1.2.3
+            include_package_data = true
             """
         expected = {
             'name': u'foo',
@@ -123,7 +127,6 @@ class TestBasics(base.BaseTestCase):
             'provides_extras': [u'bar'],
             'obsoletes': [u'baz'],
             'extras_require': {},
-
             'package_dir': {'': u'src'},
             'packages': [u'foo'],
             'package_data': {
@@ -137,6 +140,129 @@ class TestBasics(base.BaseTestCase):
             ],
             'scripts': [u'scripts/hello-world.py'],
             'py_modules': [u'mod1'],
+            'zip_safe': True,
+            'tests_require': [
+                'fixtures',
+            ],
+            'dependency_links': [
+                'https://example.com/mypackage/v1.2.3.zip#egg=mypackage-1.2.3',
+            ],
+            'include_package_data': True,
+        }
+        config = config_from_ini(config_text)
+        with warnings.catch_warnings(record=True) as w:
+            warnings.simplefilter("always")
+            actual = util.setup_cfg_to_setup_kwargs(config)
+        self.assertDictEqual(expected, actual)
+
+        # split on colon to avoid having to repeat the entire string...
+        warning_messages = set(str(x.message).split(':')[0] for x in w)
+        for warning_message in (
+            "The '[metadata] home_page' option is deprecated",
+            "The '[metadata] summary' option is deprecated",
+            "The '[metadata] classifier' option is deprecated",
+            "The '[metadata] platform' option is deprecated",
+            "The '[metadata] requires_dist' option is deprecated",
+            "The '[metadata] setup_requires_dist' option is deprecated",
+            "The '[metadata] python_requires' option is deprecated",
+            # "The '[metadata] requires_python' option is deprecated",
+            "The '[metadata] provides_dist' option is deprecated",
+            "The '[metadata] provides_extras' option is deprecated",
+            "The '[metadata] obsoletes_dist' option is deprecated",
+            "The '[files] packages' option is deprecated",
+            "The '[files] package_data' option is deprecated",
+            "The '[files] namespace_packages' option is deprecated",
+            "The '[files] data_files' option is deprecated",
+            "The '[files] scripts' option is deprecated",
+            "The '[files] modules' option is deprecated",
+            "The '[backwards_compat] zip_safe' option is deprecated",
+            "The '[backwards_compat] dependency_links' option is deprecated",
+            "The '[backwards_compat] tests_require' option is deprecated",
+            "The '[backwards_compat] include_package_data' option is deprecated",
+        ):
+            self.assertIn(warning_message, warning_messages)
+
+    def test_bug_2120575(self):
+        # check behavior with description, long_description (modern)
+        config_text = u"""
+            [metadata]
+            name = foo
+            description = A short package summary
+            long_description = file: README.rst
+        """
+        expected = {
+            'name': u'foo',
+            'description': u'A short package summary',
+            'long_description': u'file: README.rst',
+            'extras_require': {},
+            'install_requires': [],
+        }
+        config = config_from_ini(config_text)
+        actual = util.setup_cfg_to_setup_kwargs(config)
+        self.assertDictEqual(expected, actual)
+
+        readme = os.path.join(self.temp_dir, 'README.rst')
+        with open(readme, 'w') as f:
+            f.write('A longer summary from the README')
+
+        # check behavior with description, description_file (semi-modern)
+        config_text = (
+            u"""
+            [metadata]
+            name = foo
+            description = A short package summary
+            description_file = %s
+        """
+            % readme
+        )
+        expected = {
+            'name': u'foo',
+            'description': u'A short package summary',
+            'long_description': u'A longer summary from the README\n\n',
+            'extras_require': {},
+            'install_requires': [],
+        }
+        config = config_from_ini(config_text)
+        actual = util.setup_cfg_to_setup_kwargs(config)
+        self.assertDictEqual(expected, actual)
+
+        # check behavior with summary, long_description (old)
+        config_text = (
+            u"""
+            [metadata]
+            name = foo
+            summary = A short package summary
+            long_description = %s
+        """
+            % readme
+        )
+        expected = {
+            'name': u'foo',
+            'description': u'A short package summary',
+            # long_description is retrieved by setuptools
+            'extras_require': {},
+            'install_requires': [],
+        }
+        config = config_from_ini(config_text)
+        actual = util.setup_cfg_to_setup_kwargs(config)
+        self.assertDictEqual(expected, actual)
+
+        # check behavior with summary, description_file (ancient)
+        config_text = (
+            u"""
+            [metadata]
+            name = foo
+            summary = A short package summary
+            description_file = %s
+        """
+            % readme
+        )
+        expected = {
+            'name': u'foo',
+            'description': u'A short package summary',
+            'long_description': u'A longer summary from the README\n\n',
+            'extras_require': {},
+            'install_requires': [],
         }
         config = config_from_ini(config_text)
         actual = util.setup_cfg_to_setup_kwargs(config)
@@ -146,8 +272,10 @@ class TestBasics(base.BaseTestCase):
 class TestExtrasRequireParsingScenarios(base.BaseTestCase):
 
     scenarios = [
-        ('simple_extras', {
-            'config_text': u"""
+        (
+            'simple_extras',
+            {
+                'config_text': u"""
                 [extras]
                 first =
                     foo
@@ -156,15 +284,18 @@ class TestExtrasRequireParsingScenarios(
                     baz>=3.2
                     foo
                 """,
-            'expected_extra_requires': {
-                'first': ['foo', 'bar==1.0'],
-                'second': ['baz>=3.2', 'foo'],
-                'test': ['requests-mock'],
-                "test:(python_version=='2.6')": ['ordereddict'],
-            }
-        }),
-        ('with_markers', {
-            'config_text': u"""
+                'expected_extra_requires': {
+                    'first': ['foo', 'bar==1.0'],
+                    'second': ['baz>=3.2', 'foo'],
+                    'test': ['requests-mock'],
+                    "test:(python_version=='2.6')": ['ordereddict'],
+                },
+            },
+        ),
+        (
+            'with_markers',
+            {
+                'config_text': u"""
                 [extras]
                 test =
                     foo:python_version=='2.6'
@@ -172,24 +303,31 @@ class TestExtrasRequireParsingScenarios(
                     baz<1.6 :python_version=='2.6'
                     zaz :python_version>'1.0'
                 """,
-            'expected_extra_requires': {
-                "test:(python_version=='2.6')": ['foo', 'baz<1.6'],
-                "test": ['bar', 'zaz']}}),
-        ('no_extras', {
-            'config_text': u"""
+                'expected_extra_requires': {
+                    "test:(python_version=='2.6')": ['foo', 'baz<1.6'],
+                    "test": ['bar', 'zaz'],
+                },
+            },
+        ),
+        (
+            'no_extras',
+            {
+                'config_text': u"""
             [metadata]
             long_description = foo
             """,
-            'expected_extra_requires':
-            {}
-        })]
+                'expected_extra_requires': {},
+            },
+        ),
+    ]
 
     def test_extras_parsing(self):
         config = config_from_ini(self.config_text)
         kwargs = util.setup_cfg_to_setup_kwargs(config)
 
-        self.assertEqual(self.expected_extra_requires,
-                         kwargs['extras_require'])
+        self.assertEqual(
+            self.expected_extra_requires, kwargs['extras_require']
+        )
 
 
 class TestInvalidMarkers(base.BaseTestCase):
@@ -202,34 +340,40 @@ class TestInvalidMarkers(base.BaseTestCa
 class TestMapFieldsParsingScenarios(base.BaseTestCase):
 
     scenarios = [
-        ('simple_project_urls', {
-            'config_text': u"""
+        (
+            'simple_project_urls',
+            {
+                'config_text': u"""
                 [metadata]
                 project_urls =
                     Bug Tracker = https://bugs.launchpad.net/pbr/
                     Documentation = https://docs.openstack.org/pbr/
                     Source Code = https://opendev.org/openstack/pbr
                 """,  # noqa: E501
-            'expected_project_urls': {
-                'Bug Tracker': 'https://bugs.launchpad.net/pbr/',
-                'Documentation': 'https://docs.openstack.org/pbr/',
-                'Source Code': 'https://opendev.org/openstack/pbr',
+                'expected_project_urls': {
+                    'Bug Tracker': 'https://bugs.launchpad.net/pbr/',
+                    'Documentation': 'https://docs.openstack.org/pbr/',
+                    'Source Code': 'https://opendev.org/openstack/pbr',
+                },
             },
-        }),
-        ('query_parameters', {
-            'config_text': u"""
+        ),
+        (
+            'query_parameters',
+            {
+                'config_text': u"""
                 [metadata]
                 project_urls =
                     Bug Tracker = https://bugs.launchpad.net/pbr/?query=true
                     Documentation = https://docs.openstack.org/pbr/?foo=bar
                     Source Code = https://git.openstack.org/cgit/openstack-dev/pbr/commit/?id=hash
                 """,  # noqa: E501
-            'expected_project_urls': {
-                'Bug Tracker': 'https://bugs.launchpad.net/pbr/?query=true',
-                'Documentation': 'https://docs.openstack.org/pbr/?foo=bar',
-                'Source Code': 'https://git.openstack.org/cgit/openstack-dev/pbr/commit/?id=hash',  # noqa: E501
+                'expected_project_urls': {
+                    'Bug Tracker': 'https://bugs.launchpad.net/pbr/?query=true',
+                    'Documentation': 'https://docs.openstack.org/pbr/?foo=bar',
+                    'Source Code': 'https://git.openstack.org/cgit/openstack-dev/pbr/commit/?id=hash',  # noqa: E501
+                },
             },
-        }),
+        ),
     ]
 
     def test_project_url_parsing(self):
@@ -242,24 +386,29 @@ class TestMapFieldsParsingScenarios(base
 class TestKeywordsParsingScenarios(base.BaseTestCase):
 
     scenarios = [
-        ('keywords_list', {
-            'config_text': u"""
+        (
+            'keywords_list',
+            {
+                'config_text': u"""
                 [metadata]
                 keywords =
                     one
                     two
                     three
                 """,  # noqa: E501
-            'expected_keywords': ['one', 'two', 'three'],
-        },
+                'expected_keywords': ['one', 'two', 'three'],
+            },
         ),
-        ('inline_keywords', {
-            'config_text': u"""
+        (
+            'inline_keywords',
+            {
+                'config_text': u"""
                 [metadata]
                 keywords = one, two, three
                 """,  # noqa: E501
-            'expected_keywords': ['one, two, three'],
-        }),
+                'expected_keywords': ['one, two, three'],
+            },
+        ),
     ]
 
     def test_keywords_parsing(self):
@@ -284,19 +433,28 @@ class TestProvidesExtras(base.BaseTestCa
 class TestDataFilesParsing(base.BaseTestCase):
 
     scenarios = [
-        ('data_files', {
-            'config_text': u"""
+        (
+            'data_files',
+            {
+                'config_text': u"""
             [files]
             data_files =
                 'i like spaces/' =
                     'dir with space/file with spc 2'
                     'dir with space/file with spc 1'
             """,
-            'data_files': [
-                ('i like spaces/', ['dir with space/file with spc 2',
-                                    'dir with space/file with spc 1'])
-            ]
-        })]
+                'data_files': [
+                    (
+                        'i like spaces/',
+                        [
+                            'dir with space/file with spc 2',
+                            'dir with space/file with spc 1',
+                        ],
+                    )
+                ],
+            },
+        )
+    ]
 
     def test_handling_of_whitespace_in_data_files(self):
         config = config_from_ini(self.config_text)
diff -pruN 6.1.1-2/pbr/tests/test_version.py 7.0.1-2/pbr/tests/test_version.py
--- 6.1.1-2/pbr/tests/test_version.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/tests/test_version.py	2025-08-14 16:07:35.000000000 +0000
@@ -13,6 +13,9 @@
 #    License for the specific language governing permissions and limitations
 #    under the License.
 
+from __future__ import absolute_import
+from __future__ import print_function
+
 import itertools
 
 from testtools import matchers
@@ -64,7 +67,8 @@ class TestSemanticVersion(base.BaseTestC
 
     def test_from_pip_string_legacy_alpha(self):
         expected = version.SemanticVersion(
-            1, 2, 0, prerelease_type='rc', prerelease=1)
+            1, 2, 0, prerelease_type='rc', prerelease=1
+        )
         parsed = from_pip_string('1.2.0rc1')
         self.assertEqual(expected, parsed)
 
@@ -82,7 +86,8 @@ class TestSemanticVersion(base.BaseTestC
         # We can't define a mapping for .postN.devM, so it should raise.
         self.expectThat(
             lambda: from_pip_string('1.2.3.post5.dev6'),
-            matchers.raises(ValueError))
+            matchers.raises(ValueError),
+        )
 
     def test_from_pip_string_v_version(self):
         parsed = from_pip_string('v1.2.3')
@@ -94,37 +99,42 @@ class TestSemanticVersion(base.BaseTestC
         self.expectThat(expected, matchers.Equals(parsed))
 
         self.expectThat(
-            lambda: from_pip_string('x1.2.3'),
-            matchers.raises(ValueError))
+            lambda: from_pip_string('x1.2.3'), matchers.raises(ValueError)
+        )
 
     def test_from_pip_string_legacy_nonzero_lead_in(self):
         # reported in bug 1361251
         expected = version.SemanticVersion(
-            0, 0, 1, prerelease_type='a', prerelease=2)
+            0, 0, 1, prerelease_type='a', prerelease=2
+        )
         parsed = from_pip_string('0.0.1a2')
         self.assertEqual(expected, parsed)
 
     def test_from_pip_string_legacy_short_nonzero_lead_in(self):
         expected = version.SemanticVersion(
-            0, 1, 0, prerelease_type='a', prerelease=2)
+            0, 1, 0, prerelease_type='a', prerelease=2
+        )
         parsed = from_pip_string('0.1a2')
         self.assertEqual(expected, parsed)
 
     def test_from_pip_string_legacy_no_0_prerelease(self):
         expected = version.SemanticVersion(
-            2, 1, 0, prerelease_type='rc', prerelease=1)
+            2, 1, 0, prerelease_type='rc', prerelease=1
+        )
         parsed = from_pip_string('2.1.0.rc1')
         self.assertEqual(expected, parsed)
 
     def test_from_pip_string_legacy_no_0_prerelease_2(self):
         expected = version.SemanticVersion(
-            2, 0, 0, prerelease_type='rc', prerelease=1)
+            2, 0, 0, prerelease_type='rc', prerelease=1
+        )
         parsed = from_pip_string('2.0.0.rc1')
         self.assertEqual(expected, parsed)
 
     def test_from_pip_string_legacy_non_440_beta(self):
         expected = version.SemanticVersion(
-            2014, 2, prerelease_type='b', prerelease=2)
+            2014, 2, prerelease_type='b', prerelease=2
+        )
         parsed = from_pip_string('2014.2.b2')
         self.assertEqual(expected, parsed)
 
@@ -132,8 +142,9 @@ class TestSemanticVersion(base.BaseTestC
         self.assertRaises(ValueError, from_pip_string, '6eed5ae')
 
     def test_from_pip_string_non_digit_start(self):
-        self.assertRaises(ValueError, from_pip_string,
-                          'non-release-tag/2014.12.16-1')
+        self.assertRaises(
+            ValueError, from_pip_string, 'non-release-tag/2014.12.16-1'
+        )
 
     def test_final_version(self):
         semver = version.SemanticVersion(1, 2, 3)
@@ -243,22 +254,21 @@ class TestSemanticVersion(base.BaseTestC
     def test_decrement_nonrelease(self):
         # The prior version of any non-release is a release
         semver = version.SemanticVersion(1, 2, 4, 'b', 1)
-        self.assertEqual(
-            version.SemanticVersion(1, 2, 3), semver.decrement())
+        self.assertEqual(version.SemanticVersion(1, 2, 3), semver.decrement())
 
     def test_decrement_nonrelease_zero(self):
         # We set an arbitrary max version of 9999 when decrementing versions
         # - this is part of handling rpm support.
         semver = version.SemanticVersion(1, 0, 0)
         self.assertEqual(
-            version.SemanticVersion(0, 9999, 9999), semver.decrement())
+            version.SemanticVersion(0, 9999, 9999), semver.decrement()
+        )
 
     def test_decrement_release(self):
         # The next patch version of a release version requires a change to the
         # patch level.
         semver = version.SemanticVersion(2, 2, 5)
-        self.assertEqual(
-            version.SemanticVersion(2, 2, 4), semver.decrement())
+        self.assertEqual(version.SemanticVersion(2, 2, 4), semver.decrement())
 
     def test_increment_nonrelease(self):
         # The next patch version of a non-release version is another
@@ -266,23 +276,27 @@ class TestSemanticVersion(base.BaseTestC
         # incremented.
         semver = version.SemanticVersion(1, 2, 4, 'b', 1)
         self.assertEqual(
-            version.SemanticVersion(1, 2, 4, 'b', 2), semver.increment())
+            version.SemanticVersion(1, 2, 4, 'b', 2), semver.increment()
+        )
         # Major and minor increments however need to bump things.
         self.assertEqual(
-            version.SemanticVersion(1, 3, 0), semver.increment(minor=True))
+            version.SemanticVersion(1, 3, 0), semver.increment(minor=True)
+        )
         self.assertEqual(
-            version.SemanticVersion(2, 0, 0), semver.increment(major=True))
+            version.SemanticVersion(2, 0, 0), semver.increment(major=True)
+        )
 
     def test_increment_release(self):
         # The next patch version of a release version requires a change to the
         # patch level.
         semver = version.SemanticVersion(1, 2, 5)
+        self.assertEqual(version.SemanticVersion(1, 2, 6), semver.increment())
         self.assertEqual(
-            version.SemanticVersion(1, 2, 6), semver.increment())
-        self.assertEqual(
-            version.SemanticVersion(1, 3, 0), semver.increment(minor=True))
+            version.SemanticVersion(1, 3, 0), semver.increment(minor=True)
+        )
         self.assertEqual(
-            version.SemanticVersion(2, 0, 0), semver.increment(major=True))
+            version.SemanticVersion(2, 0, 0), semver.increment(major=True)
+        )
 
     def test_rc_dev_version(self):
         semver = version.SemanticVersion(1, 2, 4, 'rc', 1, 12)
@@ -305,7 +319,9 @@ class TestSemanticVersion(base.BaseTestC
     def test_to_dev(self):
         self.assertEqual(
             version.SemanticVersion(1, 2, 3, dev_count=1),
-            version.SemanticVersion(1, 2, 3).to_dev(1))
+            version.SemanticVersion(1, 2, 3).to_dev(1),
+        )
         self.assertEqual(
             version.SemanticVersion(1, 2, 3, 'rc', 1, dev_count=1),
-            version.SemanticVersion(1, 2, 3, 'rc', 1).to_dev(1))
+            version.SemanticVersion(1, 2, 3, 'rc', 1).to_dev(1),
+        )
diff -pruN 6.1.1-2/pbr/tests/test_wsgi.py 7.0.1-2/pbr/tests/test_wsgi.py
--- 6.1.1-2/pbr/tests/test_wsgi.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/tests/test_wsgi.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,163 +0,0 @@
-# Copyright (c) 2015 Hewlett-Packard Development Company, L.P. (HP)
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import os
-import re
-import subprocess
-import sys
-try:
-    # python 2
-    from urllib2 import urlopen
-except ImportError:
-    # python 3
-    from urllib.request import urlopen
-
-from pbr.tests import base
-
-
-class TestWsgiScripts(base.BaseTestCase):
-
-    cmd_names = ('pbr_test_wsgi', 'pbr_test_wsgi_with_class')
-
-    def _get_path(self):
-        if os.path.isdir("%s/lib64" % self.temp_dir):
-            path = "%s/lib64" % self.temp_dir
-        elif os.path.isdir("%s/lib" % self.temp_dir):
-            path = "%s/lib" % self.temp_dir
-        elif os.path.isdir("%s/site-packages" % self.temp_dir):
-            return ".:%s/site-packages" % self.temp_dir
-        else:
-            raise Exception("Could not determine path for test")
-        return ".:%s/python%s.%s/site-packages" % (
-            path,
-            sys.version_info[0],
-            sys.version_info[1])
-
-    def test_wsgi_script_install(self):
-        """Test that we install a non-pkg-resources wsgi script."""
-        if os.name == 'nt':
-            self.skipTest('Windows support is passthrough')
-
-        stdout, _, return_code = self.run_setup(
-            'install', '--prefix=%s' % self.temp_dir)
-
-        self._check_wsgi_install_content(stdout)
-
-    def test_wsgi_script_run(self):
-        """Test that we install a runnable wsgi script.
-
-        This test actually attempts to start and interact with the
-        wsgi script in question to demonstrate that it's a working
-        wsgi script using simple server.
-
-        """
-        if os.name == 'nt':
-            self.skipTest('Windows support is passthrough')
-
-        stdout, _, return_code = self.run_setup(
-            'install', '--prefix=%s' % self.temp_dir)
-
-        self._check_wsgi_install_content(stdout)
-
-        # Live test run the scripts and see that they respond to wsgi
-        # requests.
-        for cmd_name in self.cmd_names:
-            self._test_wsgi(cmd_name, b'Hello World')
-
-    def _test_wsgi(self, cmd_name, output, extra_args=None):
-        cmd = os.path.join(self.temp_dir, 'bin', cmd_name)
-        print("Running %s -p 0 -b 127.0.0.1" % cmd)
-        popen_cmd = [cmd, '-p', '0', '-b', '127.0.0.1']
-        if extra_args:
-            popen_cmd.extend(extra_args)
-
-        env = {'PYTHONPATH': self._get_path()}
-
-        p = subprocess.Popen(popen_cmd, stdout=subprocess.PIPE,
-                             stderr=subprocess.PIPE, cwd=self.temp_dir,
-                             env=env)
-        self.addCleanup(p.kill)
-
-        stdoutdata = p.stdout.readline()  # ****...
-
-        stdoutdata = p.stdout.readline()  # STARTING test server...
-        self.assertIn(
-            b"STARTING test server pbr_testpackage.wsgi",
-            stdoutdata)
-
-        stdoutdata = p.stdout.readline()  # Available at ...
-        print(stdoutdata)
-        m = re.search(br'(http://[^:]+:\d+)/', stdoutdata)
-        self.assertIsNotNone(m, "Regex failed to match on %s" % stdoutdata)
-
-        stdoutdata = p.stdout.readline()  # DANGER! ...
-        self.assertIn(
-            b"DANGER! For testing only, do not use in production",
-            stdoutdata)
-
-        stdoutdata = p.stdout.readline()  # ***...
-
-        f = urlopen(m.group(1).decode('utf-8'))
-        self.assertEqual(output, f.read())
-
-        # Request again so that the application can force stderr.flush(),
-        # otherwise the log is buffered and the next readline() will hang.
-        urlopen(m.group(1).decode('utf-8'))
-
-        stdoutdata = p.stderr.readline()
-        # we should have logged an HTTP request, return code 200, that
-        # returned the right amount of bytes
-        status = '"GET / HTTP/1.1" 200 %d' % len(output)
-        self.assertIn(status.encode('utf-8'), stdoutdata)
-
-    def _check_wsgi_install_content(self, install_stdout):
-        for cmd_name in self.cmd_names:
-            install_txt = 'Installing %s script to %s' % (cmd_name,
-                                                          self.temp_dir)
-            self.assertIn(install_txt, install_stdout)
-
-            cmd_filename = os.path.join(self.temp_dir, 'bin', cmd_name)
-
-            script_txt = open(cmd_filename, 'r').read()
-            self.assertNotIn('pkg_resources', script_txt)
-
-            main_block = """if __name__ == "__main__":
-    import argparse
-    import socket
-    import sys
-    import wsgiref.simple_server as wss"""
-
-            if cmd_name == 'pbr_test_wsgi':
-                app_name = "main"
-            else:
-                app_name = "WSGI.app"
-
-            starting_block = ("STARTING test server pbr_testpackage.wsgi."
-                              "%s" % app_name)
-
-            else_block = """else:
-    application = None"""
-
-            self.assertIn(main_block, script_txt)
-            self.assertIn(starting_block, script_txt)
-            self.assertIn(else_block, script_txt)
-
-    def test_with_argument(self):
-        if os.name == 'nt':
-            self.skipTest('Windows support is passthrough')
-
-        stdout, _, return_code = self.run_setup(
-            'install', '--prefix=%s' % self.temp_dir)
-
-        self._test_wsgi('pbr_test_wsgi', b'Foo Bar', ["--", "-c", "Foo Bar"])
diff -pruN 6.1.1-2/pbr/tests/testpackage/doc/source/conf.py 7.0.1-2/pbr/tests/testpackage/doc/source/conf.py
--- 6.1.1-2/pbr/tests/testpackage/doc/source/conf.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/tests/testpackage/doc/source/conf.py	2025-08-14 16:07:35.000000000 +0000
@@ -52,8 +52,11 @@ pygments_style = 'sphinx'
 # (source start file, target name, title, author, documentclass
 # [howto/manual]).
 latex_documents = [
-    ('index',
-     '%s.tex' % project,
-     u'%s Documentation' % project,
-     u'OpenStack Foundation', 'manual'),
+    (
+        'index',
+        '%s.tex' % project,
+        u'%s Documentation' % project,
+        u'OpenStack Foundation',
+        'manual',
+    ),
 ]
diff -pruN 6.1.1-2/pbr/tests/testpackage/pbr_testpackage/cmd.py 7.0.1-2/pbr/tests/testpackage/pbr_testpackage/cmd.py
--- 6.1.1-2/pbr/tests/testpackage/pbr_testpackage/cmd.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/tests/testpackage/pbr_testpackage/cmd.py	2025-08-14 16:07:35.000000000 +0000
@@ -12,6 +12,7 @@
 # implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+
 from __future__ import print_function
 
 
diff -pruN 6.1.1-2/pbr/tests/testpackage/pbr_testpackage/wsgi.py 7.0.1-2/pbr/tests/testpackage/pbr_testpackage/wsgi.py
--- 6.1.1-2/pbr/tests/testpackage/pbr_testpackage/wsgi.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/tests/testpackage/pbr_testpackage/wsgi.py	2025-08-14 16:07:35.000000000 +0000
@@ -27,8 +27,9 @@ def application(env, start_response, dat
 
 def main():
     parser = argparse.ArgumentParser(description='Return a string.')
-    parser.add_argument('--content', '-c', help='String returned',
-                        default='Hello World')
+    parser.add_argument(
+        '--content', '-c', help='String returned', default='Hello World'
+    )
     args = parser.parse_args()
     return functools.partial(application, data=args.content)
 
diff -pruN 6.1.1-2/pbr/tests/testpackage/setup.py 7.0.1-2/pbr/tests/testpackage/setup.py
--- 6.1.1-2/pbr/tests/testpackage/setup.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/tests/testpackage/setup.py	2025-08-14 16:07:35.000000000 +0000
@@ -13,6 +13,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
+from __future__ import print_function
+
 import setuptools
 
 setuptools.setup(
diff -pruN 6.1.1-2/pbr/tests/util.py 7.0.1-2/pbr/tests/util.py
--- 6.1.1-2/pbr/tests/util.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/tests/util.py	2025-08-14 16:07:35.000000000 +0000
@@ -38,24 +38,22 @@
 # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
 # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
 
+from __future__ import absolute_import
+from __future__ import print_function
+
 import contextlib
 import os
 import shutil
 import stat
+import subprocess
 import sys
 
-try:
-    import ConfigParser as configparser
-except ImportError:
-    import configparser
+from pbr._compat.five import ConfigParser
 
 
 @contextlib.contextmanager
 def open_config(filename):
-    if sys.version_info >= (3, 2):
-        cfg = configparser.ConfigParser()
-    else:
-        cfg = configparser.SafeConfigParser()
+    cfg = ConfigParser()
     cfg.read(filename)
     yield cfg
     with open(filename, 'w') as fp:
@@ -68,11 +66,63 @@ def rmtree(path):
     Handle 'access denied' from trying to delete read-only files.
     """
 
-    def onerror(func, path, exc_info):
+    def onexc(func, path, exc_info):
         if not os.access(path, os.W_OK):
             os.chmod(path, stat.S_IWUSR)
             func(path)
         else:
             raise
 
-    return shutil.rmtree(path, onerror=onerror)
+    if sys.version_info >= (3, 12):
+        return shutil.rmtree(path, onexc=onexc)
+    else:
+        return shutil.rmtree(path, onerror=onexc)
+
+
+def run_cmd(args, cwd):
+    """Run the command args in cwd.
+
+    :param args: The command to run e.g. ['git', 'status']
+    :param cwd: The directory to run the command in.
+    :param env: The environment variables to set. If unset, fallback to the
+        default of inheriting those from the current process.
+    :return: ((stdout, stderr), returncode)
+    """
+    env = os.environ.copy()
+    env['PYTHONWARNINGS'] = 'ignore'
+
+    print('Running %s' % ' '.join(args))
+    p = subprocess.Popen(
+        args,
+        stdin=subprocess.PIPE,
+        stdout=subprocess.PIPE,
+        stderr=subprocess.PIPE,
+        cwd=cwd,
+        env=env,
+    )
+    streams = tuple(s.decode('latin1').strip() for s in p.communicate())
+    print('STDOUT:')
+    print(streams[0])
+    print('STDERR:')
+    print(streams[1])
+    return (streams) + (p.returncode,)
+
+
+def config_git():
+    run_cmd(
+        ['git', 'config', '--global', 'user.email', 'example@example.com'],
+        None,
+    )
+    run_cmd(
+        ['git', 'config', '--global', 'user.name', 'OpenStack Developer'], None
+    )
+    run_cmd(
+        [
+            'git',
+            'config',
+            '--global',
+            'user.signingkey',
+            'example@example.com',
+        ],
+        None,
+    )
diff -pruN 6.1.1-2/pbr/util.py 7.0.1-2/pbr/util.py
--- 6.1.1-2/pbr/util.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/util.py	2025-08-14 16:07:35.000000000 +0000
@@ -43,14 +43,11 @@
 # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
 # DAMAGE.
 
-"""The code in this module is mostly copy/pasted out of the distutils2 source
-code, as recommended by Tarek Ziade.  As such, it may be subject to some change
-as distutils2 development continues, and will have to be kept up to date.
-
-I didn't want to use it directly from distutils2 itself, since I do not want it
-to be an installation dependency for our packages yet--it is still too unstable
-(the latest version on PyPI doesn't even install).
-"""
+# The code in this module is mostly copy/pasted out of the distutils2 source
+# code, as recommended by Tarek Ziade.
+
+from __future__ import absolute_import
+from __future__ import print_function
 
 # These first two imports are not used, but are needed to get around an
 # irritating Python bug that can crop up when using ./setup.py test.
@@ -68,19 +65,16 @@ import re
 import shlex
 import sys
 import traceback
+import warnings
 
-import distutils.ccompiler
 from distutils import errors
 from distutils import log
-import pkg_resources
+import setuptools
 from setuptools import dist as st_dist
 from setuptools import extension
 
-try:
-    import ConfigParser as configparser
-except ImportError:
-    import configparser
-
+from pbr._compat.five import ConfigParser
+import pbr._compat.packaging
 from pbr import extra_files
 import pbr.hooks
 
@@ -125,7 +119,7 @@ CFG_TO_PY_SETUP_ARGS = (
     (('files', 'namespace_packages'), 'namespace_packages'),
     (('files', 'data_files'), 'data_files'),
     (('files', 'scripts'), 'scripts'),
-    (('files', 'modules'), 'py_modules'),   # **
+    (('files', 'modules'), 'py_modules'),  # **
     (('global', 'commands'), 'cmdclass'),
     # Not supported in distutils2, but provided for
     # backwards compatibility with setuptools
@@ -135,24 +129,121 @@ CFG_TO_PY_SETUP_ARGS = (
     (('backwards_compat', 'include_package_data'), 'include_package_data'),
 )
 
+DEPRECATED_CFG = {
+    ('metadata', 'home_page'): (
+        "Use '[metadata] url' (setup.cfg) or '[project.urls]' "
+        "(pyproject.toml) instead"
+    ),
+    ('metadata', 'summary'): (
+        "Use '[metadata] description' (setup.cfg) or '[project] description' "
+        "(pyproject.toml) instead"
+    ),
+    ('metadata', 'description_file'): (
+        "Use '[metadata] long_description' (setup.cfg) or '[project] readme' "
+        "(pyproject.toml) instead"
+    ),
+    ('metadata', 'classifier'): (
+        "Use '[metadata] classifiers' (setup.cfg) or '[project] classifiers' "
+        "(pyproject.toml) instead"
+    ),
+    ('metadata', 'platform'): (
+        "Use '[metadata] platforms' (setup.cfg) or "
+        "'[tool.setuptools] platforms' (pyproject.toml) instead"
+    ),
+    ('metadata', 'requires_dist'): (
+        "Use '[options] install_requires' (setup.cfg) or "
+        "'[project] dependencies' (pyproject.toml) instead"
+    ),
+    ('metadata', 'setup_requires_dist'): (
+        "Use '[options] setup_requires' (setup.cfg) or "
+        "'[build-system] requires' (pyproject.toml) instead"
+    ),
+    ('metadata', 'python_requires'): (
+        "Use '[options] python_requires' (setup.cfg) or "
+        "'[project] requires-python' (pyproject.toml) instead"
+    ),
+    ('metadata', 'requires_python'): (
+        "Use '[options] python_requires' (setup.cfg) or "
+        "'[project] requires-python' (pyproject.toml) instead"
+    ),
+    ('metadata', 'provides_dist'): "This option is ignored by pip",
+    ('metadata', 'provides_extras'): "This option is ignored by pip",
+    ('metadata', 'obsoletes_dist'): "This option is ignored by pip",
+    ('files', 'packages_root'): (
+        "Use '[options] package_dir' (setup.cfg) or '[tools.setuptools] "
+        "package_dir' (pyproject.toml) instead"
+    ),
+    ('files', 'packages'): (
+        "Use '[options] packages' (setup.cfg) or '[tools.setuptools] "
+        "packages' (pyproject.toml) instead"
+    ),
+    ('files', 'package_data'): (
+        "Use '[options.package_data]' (setup.cfg) or "
+        "'[tool.setuptools.package-data]' (pyproject.toml) instead"
+    ),
+    ('files', 'namespace_packages'): (
+        "Use '[options] namespace_packages' (setup.cfg) or migrate to PEP "
+        "420-style namespace packages instead"
+    ),
+    ('files', 'data_files'): (
+        "For package data files, use '[options] package_data' (setup.cfg) "
+        "or '[tools.setuptools] package_data' (pyproject.toml) instead. "
+        "Support for non-package data files is deprecated in setuptools "
+        "and their use is discouraged. If necessary, use "
+        "'[options] data_files' (setup.cfg) or '[tools.setuptools] data-files'"
+        "(pyproject.toml) instead."
+    ),
+    ('files', 'scripts'): (
+        "Migrate to using the console_scripts entrypoint and use "
+        "'[options.entry_points]' (setup.cfg) or '[project.scripts]' "
+        "(pyproject.toml) instead"
+    ),
+    ('files', 'modules'): (
+        "Use '[options] py_modules' (setup.cfg) or '[tools.setuptools] "
+        "py-modules' (pyproject.toml) instead"
+    ),
+    ('backwards_compat', 'zip_safe'): (
+        "This option is obsolete as it was only relevant in the context of "
+        "eggs"
+    ),
+    ('backwards_compat', 'dependency_links'): (
+        "This option is ignored by pip starting from pip 19.0"
+    ),
+    ('backwards_compat', 'tests_require'): (
+        "This option is ignored by pip starting from pip 19.0"
+    ),
+    ('backwards_compat', 'include_package_data'): (
+        "Use '[options] include_package_data' (setup.cfg) or "
+        "'[tools.setuptools] include-package-data' (pyproject.toml) instead"
+    ),
+}
+
 # setup() arguments that can have multiple values in setup.cfg
-MULTI_FIELDS = ("classifiers",
-                "platforms",
-                "install_requires",
-                "provides",
-                "obsoletes",
-                "namespace_packages",
-                "packages",
-                "package_data",
-                "data_files",
-                "scripts",
-                "py_modules",
-                "dependency_links",
-                "setup_requires",
-                "tests_require",
-                "keywords",
-                "cmdclass",
-                "provides_extras")
+MULTI_FIELDS = (
+    "classifiers",
+    "platforms",
+    "install_requires",
+    "provides",
+    "obsoletes",
+    "namespace_packages",
+    "packages",
+    "package_data",
+    "data_files",
+    "scripts",
+    "py_modules",
+    "dependency_links",
+    "setup_requires",
+    "tests_require",
+    "keywords",
+    "cmdclass",
+    "provides_extras",
+)
+
+# a mapping of removed keywords to the version of setuptools that they were deprecated in
+REMOVED_KEYWORDS = {
+    # https://setuptools.pypa.io/en/stable/history.html#v72-0-0
+    'tests_requires': '72.0.0',
+}
 
 # setup() arguments that can have mapping values in setup.cfg
 MAP_FIELDS = ("project_urls",)
@@ -218,16 +309,13 @@ def cfg_to_args(path='setup.cfg', script
     :raises DistutilsFileError:
         When the setup.cfg file is not found.
     """
-
     # The method source code really starts here.
-    if sys.version_info >= (3, 0):
-        parser = configparser.ConfigParser()
-    else:
-        parser = configparser.SafeConfigParser()
+    parser = ConfigParser()
 
     if not os.path.exists(path):
-        raise errors.DistutilsFileError("file '%s' does not exist" %
-                                        os.path.abspath(path))
+        raise errors.DistutilsFileError(
+            "file '%s' does not exist" % os.path.abspath(path)
+        )
     try:
         parser.read(path, encoding='utf-8')
     except TypeError:
@@ -235,7 +323,7 @@ def cfg_to_args(path='setup.cfg', script
         parser.read(path)
     config = {}
     for section in parser.sections():
-        config[section] = dict()
+        config[section] = {}
         for k, value in parser.items(section):
             config[section][k.replace('-', '_')] = value
 
@@ -253,8 +341,10 @@ def cfg_to_args(path='setup.cfg', script
     try:
         if setup_hooks:
             setup_hooks = [
-                hook for hook in split_multiline(setup_hooks)
-                if hook != 'pbr.hooks.setup_hook']
+                hook
+                for hook in split_multiline(setup_hooks)
+                if hook != 'pbr.hooks.setup_hook'
+            ]
             for hook in setup_hooks:
                 hook_fn = resolve_name(hook)
                 try:
@@ -263,8 +353,9 @@ def cfg_to_args(path='setup.cfg', script
                     log.error('setup hook %s terminated the installation')
                 except Exception:
                     e = sys.exc_info()[1]
-                    log.error('setup hook %s raised exception: %s\n' %
-                              (hook, e))
+                    log.error(
+                        'setup hook %s raised exception: %s\n' % (hook, e)
+                    )
                     log.error(traceback.format_exc())
                     sys.exit(1)
 
@@ -277,7 +368,12 @@ def cfg_to_args(path='setup.cfg', script
         kwargs['include_package_data'] = True
         kwargs['zip_safe'] = False
 
-        register_custom_compilers(config)
+        if has_get_option(config, 'global', 'compilers'):
+            warnings.warn(
+                'Support for custom compilers was removed in pbr 7.0 and the '
+                '\'[global] compilers\' option is now ignored.',
+                DeprecationWarning,
+            )
 
         ext_modules = get_extension_modules(config)
         if ext_modules:
@@ -300,6 +396,25 @@ def cfg_to_args(path='setup.cfg', script
     return kwargs
 
 
+def _read_description_file(config):
+    """Handle the legacy 'description_file' option."""
+    description_files = has_get_option(config, 'metadata', 'description_file')
+    if not description_files:
+        return None
+
+    description_files = split_multiline(description_files)
+
+    data = ''
+    for filename in description_files:
+        description_file = io.open(filename, encoding='utf-8')
+        try:
+            data += description_file.read().strip() + '\n\n'
+        finally:
+            description_file.close()
+
+    return data
+
+
 def setup_cfg_to_setup_kwargs(config, script_args=()):
     """Convert config options to kwargs.
 
@@ -313,28 +428,54 @@ def setup_cfg_to_setup_kwargs(config, sc
     # parse env_markers.
     all_requirements = {}
 
+    # We want people to use description and long_description over summary and
+    # description but there is obvious overlap. If we see the both of the
+    # former being used, don't normalize
+    skip_description_normalization = False
+    if has_get_option(config, 'metadata', 'description') and (
+        has_get_option(config, 'metadata', 'long_description')
+        or has_get_option(config, 'metadata', 'description_file')
+    ):
+        kwargs['description'] = has_get_option(
+            config, 'metadata', 'description'
+        )
+        long_description = has_get_option(
+            config, 'metadata', 'long_description'
+        )
+        if long_description:
+            kwargs['long_description'] = long_description
+        else:
+            kwargs['long_description'] = _read_description_file(config)
+
+        skip_description_normalization = True
+
     for alias, arg in CFG_TO_PY_SETUP_ARGS:
         section, option = alias
 
+        if skip_description_normalization and alias in (
+            ('metadata', 'summary'),
+            ('metadata', 'description'),
+        ):
+            continue
+
         in_cfg_value = has_get_option(config, section, option)
-        if not in_cfg_value and arg == "long_description":
-            in_cfg_value = has_get_option(config, section, "description_file")
-            if in_cfg_value:
-                in_cfg_value = split_multiline(in_cfg_value)
-                value = ''
-                for filename in in_cfg_value:
-                    description_file = io.open(filename, encoding='utf-8')
-                    try:
-                        value += description_file.read().strip() + '\n\n'
-                    finally:
-                        description_file.close()
-                in_cfg_value = value
+
+        if alias == ('metadata', 'description') and not in_cfg_value:
+            in_cfg_value = _read_description_file(config)
 
         if not in_cfg_value:
             continue
 
+        if alias in DEPRECATED_CFG:
+            warnings.warn(
+                "The '[%s] %s' option is deprecated: %s"
+                % (alias[0], alias[1], DEPRECATED_CFG[alias]),
+                DeprecationWarning,
+            )
+
         if arg in CSV_FIELDS:
             in_cfg_value = split_csv(in_cfg_value)
+
         if arg in MULTI_FIELDS:
             in_cfg_value = split_multiline(in_cfg_value)
         elif arg in MAP_FIELDS:
@@ -351,17 +492,28 @@ def setup_cfg_to_setup_kwargs(config, sc
                 in_cfg_value = False
 
         if in_cfg_value:
+            if arg in REMOVED_KEYWORDS and (
+                pbr._compat.packaging.parse_version(setuptools.__version__)
+                >= pbr._compat.packaging.parse_version(REMOVED_KEYWORDS[arg])
+            ):
+                # deprecation warnings, if any, will already have been logged,
+                # so simply skip this
+                continue
+
             if arg in ('install_requires', 'tests_require'):
                 # Replaces PEP345-style version specs with the sort expected by
                 # setuptools
-                in_cfg_value = [_VERSION_SPEC_RE.sub(r'\1\2', pred)
-                                for pred in in_cfg_value]
+                in_cfg_value = [
+                    _VERSION_SPEC_RE.sub(r'\1\2', pred)
+                    for pred in in_cfg_value
+                ]
             if arg == 'install_requires':
                 # Split install_requires into package,env_marker tuples
                 # These will be re-assembled later
                 install_requires = []
                 requirement_pattern = (
-                    r'(?P<package>[^;]*);?(?P<env_marker>[^#]*?)(?:\s*#.*)?$')
+                    r'(?P<package>[^;]*);?(?P<env_marker>[^#]*?)(?:\s*#.*)?$'
+                )
                 for requirement in in_cfg_value:
                     m = re.match(requirement_pattern, requirement)
                     requirement_package = m.group('package').strip()
@@ -390,7 +542,8 @@ def setup_cfg_to_setup_kwargs(config, sc
                     elif firstline:
                         raise errors.DistutilsOptionError(
                             'malformed package_data first line %r (misses '
-                            '"=")' % line)
+                            '"=")' % line
+                        )
                     else:
                         prev.extend(shlex_split(line.strip()))
                     firstline = False
@@ -423,14 +576,17 @@ def setup_cfg_to_setup_kwargs(config, sc
 
     if 'extras' in config:
         requirement_pattern = (
-            r'(?P<package>[^:]*):?(?P<env_marker>[^#]*?)(?:\s*#.*)?$')
+            r'(?P<package>[^:]*):?(?P<env_marker>[^#]*?)(?:\s*#.*)?$'
+        )
         extras = config['extras']
         # Add contents of test-requirements, if any, into an extra named
         # 'test' if one does not already exist.
         if 'test' not in extras:
             from pbr import packaging
-            extras['test'] = "\n".join(packaging.parse_requirements(
-                packaging.TEST_REQUIREMENTS_FILES)).replace(';', ':')
+
+            extras['test'] = "\n".join(
+                packaging.parse_requirements(packaging.TEST_REQUIREMENTS_FILES)
+            ).replace(';', ':')
 
         for extra in extras:
             extra_requirements = []
@@ -461,7 +617,9 @@ def setup_cfg_to_setup_kwargs(config, sc
                 # multiple setup.py commands at once.
                 if 'bdist_wheel' not in script_args:
                     try:
-                        if pkg_resources.evaluate_marker('(%s)' % env_marker):
+                        if pbr._compat.packaging.evaluate_marker(
+                            '(%s)' % env_marker
+                        ):
                             extras_key = req_group
                     except SyntaxError:
                         log.error(
@@ -481,57 +639,24 @@ def setup_cfg_to_setup_kwargs(config, sc
     return kwargs
 
 
-def register_custom_compilers(config):
-    """Handle custom compilers.
-
-    This has no real equivalent in distutils, where additional compilers could
-    only be added programmatically, so we have to hack it in somehow.
-    """
-
-    compilers = has_get_option(config, 'global', 'compilers')
-    if compilers:
-        compilers = split_multiline(compilers)
-        for compiler in compilers:
-            compiler = resolve_name(compiler)
-
-            # In distutils2 compilers these class attributes exist; for
-            # distutils1 we just have to make something up
-            if hasattr(compiler, 'name'):
-                name = compiler.name
-            else:
-                name = compiler.__name__
-            if hasattr(compiler, 'description'):
-                desc = compiler.description
-            else:
-                desc = 'custom compiler %s' % name
-
-            module_name = compiler.__module__
-            # Note; this *will* override built in compilers with the same name
-            # TODO(embray): Maybe display a warning about this?
-            cc = distutils.ccompiler.compiler_class
-            cc[name] = (module_name, compiler.__name__, desc)
-
-            # HACK!!!!  Distutils assumes all compiler modules are in the
-            # distutils package
-            sys.modules['distutils.' + module_name] = sys.modules[module_name]
-
-
 def get_extension_modules(config):
     """Handle extension modules"""
 
-    EXTENSION_FIELDS = ("sources",
-                        "include_dirs",
-                        "define_macros",
-                        "undef_macros",
-                        "library_dirs",
-                        "libraries",
-                        "runtime_library_dirs",
-                        "extra_objects",
-                        "extra_compile_args",
-                        "extra_link_args",
-                        "export_symbols",
-                        "swig_opts",
-                        "depends")
+    EXTENSION_FIELDS = (
+        "sources",
+        "include_dirs",
+        "define_macros",
+        "undef_macros",
+        "library_dirs",
+        "libraries",
+        "runtime_library_dirs",
+        "extra_objects",
+        "extra_compile_args",
+        "extra_link_args",
+        "export_symbols",
+        "swig_opts",
+        "depends",
+    )
 
     ext_modules = []
     for section in config:
@@ -564,24 +689,30 @@ def get_extension_modules(config):
             if ext_args:
                 if 'name' not in ext_args:
                     ext_args['name'] = labels[1]
-                ext_modules.append(extension.Extension(ext_args.pop('name'),
-                                                       **ext_args))
+                ext_modules.append(
+                    extension.Extension(ext_args.pop('name'), **ext_args)
+                )
     return ext_modules
 
 
 def get_entry_points(config):
-    """Process the [entry_points] section of setup.cfg.
-
-    Processes setup.cfg to handle setuptools entry points. This is, of course,
-    not a standard feature of distutils2/packaging, but as there is not
-    currently a standard alternative in packaging, we provide support for them.
-    """
+    """Process the [entry_points] section of setup.cfg."""
 
     if 'entry_points' not in config:
         return {}
 
-    return dict((option, split_multiline(value))
-                for option, value in config['entry_points'].items())
+    warnings.warn(
+        "The 'entry_points' section has been deprecated in favour of the "
+        "'[options.entry_points]' section (if using 'setup.cfg') or the "
+        "'[project.scripts]' and/or '[project.entry-points.{name}]' sections "
+        "(if using 'pyproject.toml')",
+        DeprecationWarning,
+    )
+
+    return {
+        option: split_multiline(value)
+        for option, value in config['entry_points'].items()
+    }
 
 
 def has_get_option(config, section, option):
@@ -594,18 +725,22 @@ def has_get_option(config, section, opti
 def split_multiline(value):
     """Special behaviour when we have a multi line options"""
 
-    value = [element for element in
-             (line.strip() for line in value.split('\n'))
-             if element and not element.startswith('#')]
+    value = [
+        element
+        for element in (line.strip() for line in value.split('\n'))
+        if element and not element.startswith('#')
+    ]
     return value
 
 
 def split_csv(value):
     """Special behaviour when we have a comma separated options"""
 
-    value = [element for element in
-             (chunk.strip() for chunk in value.split(','))
-             if element]
+    value = [
+        element
+        for element in (chunk.strip() for chunk in value.split(','))
+        if element
+    ]
     return value
 
 
diff -pruN 6.1.1-2/pbr/version.py 7.0.1-2/pbr/version.py
--- 6.1.1-2/pbr/version.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pbr/version.py	2025-08-14 16:07:35.000000000 +0000
@@ -1,4 +1,3 @@
-
 #    Copyright 2012 OpenStack Foundation
 #    Copyright 2012-2013 Hewlett-Packard Development Company, L.P.
 #
@@ -18,20 +17,14 @@
 Utilities for consuming the version from importlib-metadata.
 """
 
+from __future__ import absolute_import
+from __future__ import print_function
+
 import itertools
 import operator
 import sys
 
-# TODO(stephenfin): Remove this once we drop support for Python < 3.8
-if sys.version_info >= (3, 8):
-    from importlib import metadata as importlib_metadata
-    use_importlib = True
-else:
-    try:
-        import importlib_metadata
-        use_importlib = True
-    except ImportError:
-        use_importlib = False
+import pbr._compat.metadata
 
 
 def _is_int(string):
@@ -49,8 +42,14 @@ class SemanticVersion(object):
     """
 
     def __init__(
-            self, major, minor=0, patch=0, prerelease_type=None,
-            prerelease=None, dev_count=None):
+        self,
+        major,
+        minor=0,
+        patch=0,
+        prerelease_type=None,
+        prerelease=None,
+        dev_count=None,
+    ):
         """Create a SemanticVersion.
 
         :param major: Major component of the version.
@@ -97,10 +96,14 @@ class SemanticVersion(object):
         else:
             uq_dev = 1
         return (
-            self._major, self._minor, self._patch,
+            self._major,
+            self._minor,
+            self._patch,
             uq_dev,
-            rc_lookup[self._prerelease_type], self._prerelease,
-            self._dev_count or sys.maxsize)
+            rc_lookup[self._prerelease_type],
+            self._prerelease,
+            self._dev_count or sys.maxsize,
+        )
 
     def __lt__(self, other):
         """Compare self and other, another Semantic Version."""
@@ -170,15 +173,20 @@ class SemanticVersion(object):
         if digit_len == 0:
             raise ValueError("Invalid version %r" % version_string)
         elif digit_len < 3:
-            if (digit_len < len(input_components) and
-                    input_components[digit_len][0].isdigit()):
+            if (
+                digit_len < len(input_components)
+                and input_components[digit_len][0].isdigit()
+            ):
                 # Handle X.YaZ - Y is a digit not a leadin to pre-release.
                 mixed_component = input_components[digit_len]
-                last_component = ''.join(itertools.takewhile(
-                    lambda x: x.isdigit(), mixed_component))
+                last_component = ''.join(
+                    itertools.takewhile(lambda x: x.isdigit(), mixed_component)
+                )
                 components.append(last_component)
-                input_components[digit_len:digit_len + 1] = [
-                    last_component, mixed_component[len(last_component):]]
+                input_components[digit_len : digit_len + 1] = [
+                    last_component,
+                    mixed_component[len(last_component) :],
+                ]
                 digit_len += 1
             components.extend([0] * (3 - digit_len))
         components.extend(input_components[digit_len:])
@@ -195,8 +203,9 @@ class SemanticVersion(object):
             segment = ''.join(itertools.dropwhile(isdigit, segment))
             isalpha = operator.methodcaller('isalpha')
             prerelease_type = ''.join(itertools.takewhile(isalpha, segment))
-            prerelease = segment[len(prerelease_type)::]
+            prerelease = segment[len(prerelease_type) : :]
             return prerelease_type, int(prerelease)
+
         if _is_int(components[2]):
             patch = int(components[2])
         else:
@@ -215,8 +224,9 @@ class SemanticVersion(object):
             # old dev format - 0.1.2.3.g1234
             dev_count = int(remainder[0])
         else:
-            if remainder and (remainder[0][0] == '0' or
-                              remainder[0][0] in ('a', 'b', 'r')):
+            if remainder and (
+                remainder[0][0] == '0' or remainder[0][0] in ('a', 'b', 'r')
+            ):
                 # Current RC/beta layout
                 prerelease_type, prerelease = _parse_type(remainder[0])
                 remainder = remainder[1:]
@@ -230,16 +240,23 @@ class SemanticVersion(object):
                 else:
                     raise ValueError(
                         'Unknown remainder %r in %r'
-                        % (remainder, version_string))
+                        % (remainder, version_string)
+                    )
                 remainder = remainder[1:]
         result = SemanticVersion(
-            major, minor, patch, prerelease_type=prerelease_type,
-            prerelease=prerelease, dev_count=dev_count)
+            major,
+            minor,
+            patch,
+            prerelease_type=prerelease_type,
+            prerelease=prerelease,
+            dev_count=dev_count,
+        )
         if post_count:
             if dev_count:
                 raise ValueError(
                     'Cannot combine postN and devN - no mapping in %r'
-                    % (version_string,))
+                    % (version_string,)
+                )
             result = result.increment().to_dev(post_count)
         return result
 
@@ -281,8 +298,7 @@ class SemanticVersion(object):
                     new_major = self._major - 1
                 else:
                     new_major = 0
-        return SemanticVersion(
-            new_major, new_minor, new_patch)
+        return SemanticVersion(new_major, new_minor, new_patch)
 
     def increment(self, minor=False, major=False):
         """Return an incremented SemanticVersion.
@@ -323,8 +339,12 @@ class SemanticVersion(object):
         else:
             new_major = self._major
         return SemanticVersion(
-            new_major, new_minor, new_patch,
-            new_prerelease_type, new_prerelease)
+            new_major,
+            new_minor,
+            new_patch,
+            new_prerelease_type,
+            new_prerelease,
+        )
 
     def _long_version(self, pre_separator, rc_marker=""):
         """Construct a long string version of this semver.
@@ -334,16 +354,23 @@ class SemanticVersion(object):
             version number of the component to preserve sorting. (Used for
             rpm support)
         """
-        if ((self._prerelease_type or self._dev_count) and
-                pre_separator is None):
+        if (
+            self._prerelease_type or self._dev_count
+        ) and pre_separator is None:
             segments = [self.decrement().brief_string()]
             pre_separator = "."
         else:
             segments = [self.brief_string()]
         if self._prerelease_type:
             segments.append(
-                "%s%s%s%s" % (pre_separator, rc_marker, self._prerelease_type,
-                              self._prerelease))
+                "%s%s%s%s"
+                % (
+                    pre_separator,
+                    rc_marker,
+                    self._prerelease_type,
+                    self._prerelease,
+                )
+            )
         if self._dev_count:
             if not self._prerelease_type:
                 segments.append(pre_separator)
@@ -376,8 +403,13 @@ class SemanticVersion(object):
         :param dev_count: The number of commits since the last release.
         """
         return SemanticVersion(
-            self._major, self._minor, self._patch, self._prerelease_type,
-            self._prerelease, dev_count=dev_count)
+            self._major,
+            self._minor,
+            self._patch,
+            self._prerelease_type,
+            self._prerelease,
+            dev_count=dev_count,
+        )
 
     def version_tuple(self):
         """Present the version as a version_info tuple.
@@ -397,15 +429,17 @@ class SemanticVersion(object):
         """
         segments = [self._major, self._minor, self._patch]
         if self._prerelease_type:
-            type_map = {('a', False): 'alpha',
-                        ('b', False): 'beta',
-                        ('rc', False): 'candidate',
-                        ('a', True): 'alphadev',
-                        ('b', True): 'betadev',
-                        ('rc', True): 'candidatedev',
-                        }
+            type_map = {
+                ('a', False): 'alpha',
+                ('b', False): 'beta',
+                ('rc', False): 'candidate',
+                ('a', True): 'alphadev',
+                ('b', True): 'betadev',
+                ('rc', True): 'candidatedev',
+            }
             segments.append(
-                type_map[(self._prerelease_type, bool(self._dev_count))])
+                type_map[(self._prerelease_type, bool(self._dev_count))]
+            )
             segments.append(self._dev_count or self._prerelease)
         elif self._dev_count:
             segments.append('dev')
@@ -436,68 +470,34 @@ class VersionInfo(object):
     def __repr__(self):
         """Include the name."""
         return "pbr.version.VersionInfo(%s:%s)" % (
-            self.package, self.version_string())
-
-    def _get_version_from_pkg_resources(self):
-        """Obtain a version from pkg_resources or setup-time logic if missing.
+            self.package,
+            self.version_string(),
+        )
 
-        This will try to get the version of the package from the pkg_resources
-        This will try to get the version of the package from the
-        record associated with the package, and if there is no such record
-        importlib_metadata record associated with the package, and if there
-        falls back to the logic sdist would use.
+    def release_string(self):
+        """Return the full version of the package.
 
-        is no such record falls back to the logic sdist would use.
+        This including suffixes indicating VCS status.
         """
-        import pkg_resources
-
-        try:
-            requirement = pkg_resources.Requirement.parse(self.package)
-            provider = pkg_resources.get_provider(requirement)
-            result_string = provider.version
-        except pkg_resources.DistributionNotFound:
-            # The most likely cause for this is running tests in a tree
-            # produced from a tarball where the package itself has not been
-            # installed into anything. Revert to setup-time logic.
-            from pbr import packaging
-            result_string = packaging.get_version(self.package)
-
-        return SemanticVersion.from_pip_string(result_string)
+        return self.semantic_version().release_string()
 
-    def _get_version_from_importlib_metadata(self):
-        """Obtain a version from importlib or setup-time logic if missing.
+    def semantic_version(self):
+        """Return the SemanticVersion object for this version."""
+        if self._semantic is not None:
+            return self._semantic
 
-        This will try to get the version of the package from the
-        importlib_metadata record associated with the package, and if there
-        is no such record falls back to the logic sdist would use.
-        """
         try:
-            distribution = importlib_metadata.distribution(self.package)
-            result_string = distribution.version
-        except importlib_metadata.PackageNotFoundError:
+            result_string = pbr._compat.metadata.get_version(self.package)
+        except pbr._compat.metadata.PackageNotFound:
             # The most likely cause for this is running tests in a tree
             # produced from a tarball where the package itself has not been
             # installed into anything. Revert to setup-time logic.
             from pbr import packaging
-            result_string = packaging.get_version(self.package)
-        return SemanticVersion.from_pip_string(result_string)
 
-    def release_string(self):
-        """Return the full version of the package.
+            result_string = packaging.get_version(self.package)
 
-        This including suffixes indicating VCS status.
-        """
-        return self.semantic_version().release_string()
+        self._semantic = SemanticVersion.from_pip_string(result_string)
 
-    def semantic_version(self):
-        """Return the SemanticVersion object for this version."""
-        if self._semantic is None:
-            # TODO(damami): simplify this once Python 3.8 is the oldest
-            # we support
-            if use_importlib:
-                self._semantic = self._get_version_from_importlib_metadata()
-            else:
-                self._semantic = self._get_version_from_pkg_resources()
         return self._semantic
 
     def version_string(self):
@@ -516,6 +516,5 @@ class VersionInfo(object):
         prefix and then cached and returned.
         """
         if not self._cached_version:
-            self._cached_version = "%s%s" % (prefix,
-                                             self.version_string())
+            self._cached_version = "%s%s" % (prefix, self.version_string())
         return self._cached_version
diff -pruN 6.1.1-2/pyproject.toml 7.0.1-2/pyproject.toml
--- 6.1.1-2/pyproject.toml	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/pyproject.toml	2025-08-14 16:07:35.000000000 +0000
@@ -14,3 +14,7 @@ backend-path = ["."]
 
 [tools.setuptools]
 py-modules=[]
+
+[tool.black]
+line-length = 79
+skip-string-normalization = true
diff -pruN 6.1.1-2/releasenotes/notes/build_sphinx_removal-de990a5c14a9e64d.yaml 7.0.1-2/releasenotes/notes/build_sphinx_removal-de990a5c14a9e64d.yaml
--- 6.1.1-2/releasenotes/notes/build_sphinx_removal-de990a5c14a9e64d.yaml	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/releasenotes/notes/build_sphinx_removal-de990a5c14a9e64d.yaml	2025-08-14 16:07:35.000000000 +0000
@@ -1,7 +1,7 @@
 ---
 upgrade:
   - |
-    The deprecated support for `setup.py build_sphinx` has been removed.
+    The deprecated support for ``setup.py build_sphinx`` has been removed.
     This feature has been deprecated since version 4.2. Setuptools and
     sphinx have removed support for this command which breaks PBR's ability
     to integrate with those two tools to provide this functionality.
diff -pruN 6.1.1-2/releasenotes/notes/global-compilers-removal-62b131e40de087ef.yaml 7.0.1-2/releasenotes/notes/global-compilers-removal-62b131e40de087ef.yaml
--- 6.1.1-2/releasenotes/notes/global-compilers-removal-62b131e40de087ef.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/releasenotes/notes/global-compilers-removal-62b131e40de087ef.yaml	2025-08-14 16:07:35.000000000 +0000
@@ -0,0 +1,5 @@
+---
+upgrade:
+  - |
+    Support for the ``[global] compilers`` option has been removed. This was an
+    undocumented feature that did not appear to have any use.
diff -pruN 6.1.1-2/releasenotes/notes/setuptools-alignment-b5b1309f47e9cf98.yaml 7.0.1-2/releasenotes/notes/setuptools-alignment-b5b1309f47e9cf98.yaml
--- 6.1.1-2/releasenotes/notes/setuptools-alignment-b5b1309f47e9cf98.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/releasenotes/notes/setuptools-alignment-b5b1309f47e9cf98.yaml	2025-08-14 16:07:35.000000000 +0000
@@ -0,0 +1,99 @@
+---
+deprecations:
+  - |
+    A number of pbr-specific `setup.cfg` sections and options have been
+    deprecated in order in favour of setuptools native `setup.cfg` sections
+    and options or their `pyproject.toml` equivalents:
+
+    - The `[entry_points]` section has been deprecated in favour of the
+      `[options.entry_points]` section (if using `setup.cfg`) or the
+      `[project.scripts]` and/or `[project.entry-points.{name}]` sections
+      (if using `pyproject.toml`)
+
+    - The `[files] packages_root` option has been deprecated in favour of the
+      `[options] package_dir` option (if using `setup.cfg`) or the
+      `[tools.setuptools] package_dir` option (if using `pyproject.toml`)
+
+    - The `[files] packages` option has been deprecated in favour of the
+      `[options] packages` option (if using `setup.cfg`) or the
+      `[tools.setuptools] packages` option (if using `pyproject.toml`)
+
+    - The `[files] package_data` option has been deprecated in favour of the
+      `[options.package_data]` option (if using `setup.cfg`) or the
+      `[tool.setuptools.package-data]` option (if using `pyproject.toml`)
+
+    - The `[files] namespace_packages` option has been deprecated in favour of
+      the `[options] namespace_packages` option (if using `setup.cfg`).
+      Alternatively, migrate to PEP 420-style namespace packages
+
+    - The `[files] data_files` option has been deprecated, and support for
+      non-package data files in general is deprecated in setuptools and their
+      use is discouraged. For package data files, use `[options] package_data`
+      (if using `setup.cfg`) or `[tools.setuptools] package_data` (if using
+      `pyproject.toml`) instead. If non-package data files are necessary, use
+      `[options] data_files` (if using `setup.cfg`) or `[tools.setuptools]
+      data-files` (if using `pyproject.toml`) instead.
+
+    - The `[files] scripts` option has been deprecated. There is no direct
+      replacement. Instead, migrate to using the `console_scripts` entrypoint
+      and use the `[options.entry_points]` option (if using `setup.cfg`) or
+      the `[project.scripts]` option (if using `pyproject.toml`).
+
+    - The `[files] modules` option has been deprecated in favour of the
+      `[options] py_modules` option (if using `setup.cfg`) or the
+      `[tools.setuptools] py-modules` option (if using `pyproject.toml`)
+
+    - The `[metadata] home_page` option has been deprecated in favour of the
+      `[metadata] url` option (if using `setup.cfg`) or the `[project.urls]`
+      option (if using `pyproject.toml`)
+
+    - The `[metadata] summary` option has been deprecated in favour of the
+      `[metadata] description` option (if using `setup.cfg`) or the `[project]
+      description` option (if using `pyproject.toml`)
+
+    - The `[metadata] classifier` option has been deprecated in favour of the
+      `[metadata] classifiers` option (if using `setup.cfg`) or the `[project]
+      classifiers` option (if using `pyproject.toml`)
+
+    - The `[metadata] platform` option has been deprecated in favour of the
+      `[metadata] platforms` option (if using `setup.cfg`) or the
+      `[tool.setuptools] platforms` (if using `pyproject.toml`)
+
+    - The `[metadata] requires_dist` option has been deprecated in favour of
+      the `[options] install_requires` option (if using `setup.cfg`) or the
+      `[project] dependencies` option (if using `pyproject.toml`)
+
+    - The `[metadata] setup_requires_dist` option has been deprecated in favour
+      of the `[options] setup_requires` option (if using `setup.cfg`) or the
+      `[build-system] requires` option (if using `pyproject.toml`)
+
+    - The `[metadata] python_requires` option has been deprecated in favour of
+      the `[options] python_requires` option (if using `setup.cfg`) or the
+      `[project] requires-python` option (if using `pyproject.toml`)
+
+    - The `[metadata] requires_python` option has been deprecated in favour of
+      the `[options] python_requires` option (if using `setup.cfg`) or the
+      `[project] requires-python` option (if using `pyproject.toml`)
+
+    - The `[metadata] provides_dist` option has been deprecated as it is
+      obsolete and is no longer used by pip
+
+    - The `[metadata] provides_extra` option has been deprecated as it is
+      obsolete and is no longer used by pip
+
+    - The `[metadata] obsoletes_dist` option has been deprecated as it is
+      obsolete and is no longer used by pip
+
+    - The `[backwards_compat] zip_safe` option has been deprecated as it is
+      was only relevant in the context of eggs and is therefore obsolete
+
+    - The `[backwards_compat] dependency_links` option has been deprecated as
+      it is obsolete and is no longer used by pip
+
+    - The `[backwards_compat] tests_require` option has been deprecated as it
+      is obsolete and is no longer used by pip
+
+    - The `[backwards_compat] include_package_data` option has been deprecated
+      in favour of the `[options] include_package_data` option (is using
+      `setup.cfg`) or the `[tools.setuptools] include-package-data` option
+      (if using `pyproject.toml`)
diff -pruN 6.1.1-2/releasenotes/notes/test-command-removal-153fc9ecdd6834ef.yaml 7.0.1-2/releasenotes/notes/test-command-removal-153fc9ecdd6834ef.yaml
--- 6.1.1-2/releasenotes/notes/test-command-removal-153fc9ecdd6834ef.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 7.0.1-2/releasenotes/notes/test-command-removal-153fc9ecdd6834ef.yaml	2025-08-14 16:07:35.000000000 +0000
@@ -0,0 +1,10 @@
+---
+upgrade:
+  - |
+    The deprecated support for ``setup.py test`` has been removed.
+    This feature has been deprecated since version 4.0. The two test tools
+    supported, ``nose`` and ``testr``, are no longer maintained, and the
+    general concept of distutils commands is itself deprecated.
+    Users should switch to running the test runners directly and ideally
+    migrate to an alternative, supported test runner like ``stestr`` or
+    ``pytest``.
diff -pruN 6.1.1-2/releasenotes/source/conf.py 7.0.1-2/releasenotes/source/conf.py
--- 6.1.1-2/releasenotes/source/conf.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/releasenotes/source/conf.py	2025-08-14 16:07:35.000000000 +0000
@@ -43,12 +43,6 @@ html_theme = 'openstackdocs'
 
 # -- Options for openstackdocstheme ---------------------------------------
 
-# Deprecated options for openstackdocstheme < 2.2.0, can be removed once
-# pbr stops supporting py27.
-repository_name = 'openstack/pbr'
-bug_project = 'pbr'
-bug_tag = ''
-
 # New options with openstackdocstheme >=2.2.0
 openstackdocs_repo_name = 'openstack/pbr'
 openstackdocs_auto_name = False
diff -pruN 6.1.1-2/setup.cfg 7.0.1-2/setup.cfg
--- 6.1.1-2/setup.cfg	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/setup.cfg	2025-08-14 16:07:35.000000000 +0000
@@ -2,10 +2,11 @@
 name = pbr
 author = OpenStack
 author_email = openstack-discuss@lists.openstack.org
-summary = Python Build Reasonableness
+description = Python Build Reasonableness
 long_description = file: README.rst
 long_description_content_type = text/x-rst; charset=UTF-8
 url = https://docs.openstack.org/pbr/latest/
+license = Apache-2.0
 project_urls =
     Bug Tracker = https://bugs.launchpad.net/pbr/
     Documentation = https://docs.openstack.org/pbr/
@@ -16,7 +17,6 @@ classifiers =
     Environment :: OpenStack
     Intended Audience :: Developers
     Intended Audience :: Information Technology
-    License :: OSI Approved :: Apache Software License
     Operating System :: OS Independent
     Programming Language :: Python
     Programming Language :: Python :: 2
@@ -32,12 +32,10 @@ classifiers =
 
 [options]
 python_requires = >=2.6
-
-[files]
 packages =
     pbr
 
-[entry_points]
+[options.entry_points]
 distutils.setup_keywords =
     pbr = pbr.core:pbr
 egg_info.writers =
diff -pruN 6.1.1-2/setup.py 7.0.1-2/setup.py
--- 6.1.1-2/setup.py	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/setup.py	2025-08-14 16:07:35.000000000 +0000
@@ -17,5 +17,4 @@ import setuptools
 
 from pbr import util
 
-setuptools.setup(
-    **util.cfg_to_args())
+setuptools.setup(**util.cfg_to_args())
diff -pruN 6.1.1-2/test-requirements.txt 7.0.1-2/test-requirements.txt
--- 6.1.1-2/test-requirements.txt	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/test-requirements.txt	2025-08-14 16:07:35.000000000 +0000
@@ -1,10 +1,5 @@
-# The order of packages is significant, because pip processes them in the order
-# of appearance. Changing the order has an impact on the overall integration
-# process, which may cause wedges in the gate later.
-
 wheel>=0.32.0 # MIT
 fixtures>=3.0.0 # Apache-2.0/BSD
-hacking>=1.1.0,<4.0.0;python_version>='3.6' # Apache-2.0
 mock>=2.0.0,<4.0.0;python_version=='2.7' # BSD
 stestr>=2.1.0,<3.0;python_version=='2.7' # Apache-2.0
 stestr>=2.1.0;python_version>='3.0' # Apache-2.0
@@ -18,5 +13,3 @@ coverage!=4.4,>=4.0 # Apache-2.0
 sphinx!=1.6.6,!=1.6.7,>=1.6.2,<2.0.0;python_version=='2.7' # BSD
 sphinx!=1.6.6,!=1.6.7,>=1.6.2;python_version>='3.4' # BSD
 testrepository>=0.0.18 # Apache-2.0/BSD
-
-pre-commit>=2.6.0;python_version>='3.6' # MIT
diff -pruN 6.1.1-2/tools/integration.sh 7.0.1-2/tools/integration.sh
--- 6.1.1-2/tools/integration.sh	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/tools/integration.sh	2025-08-14 16:07:35.000000000 +0000
@@ -134,12 +134,17 @@ function check_setuppy {
     # behaviors.
     $epvenv/bin/pip $PIPFLAGS install -f $WHEELHOUSE setuptools
 
+    # FIXME(stephenfin): This is broken with setuptools v80.0.0+ since that no
+    # longer invokes easy_install but rather defers to pip. In CI, we only see
+    # this failure on Noble or later, because Jammy's version of Python is too
+    # old for setuptools v80.0.0+
+
     # First check develop
-    PBR_VERSION=0.0 $epvenv/bin/python setup.py develop
-    cat $epvenv/bin/test_cmd
-    grep 'PBR Generated' $epvenv/bin/test_cmd
-    $epvenv/bin/test_cmd | grep 'Test cmd'
-    PBR_VERSION=0.0 $epvenv/bin/python setup.py develop --uninstall
+    # PBR_VERSION=0.0 $epvenv/bin/python setup.py develop
+    # cat $epvenv/bin/test_cmd
+    # grep 'PBR Generated' $epvenv/bin/test_cmd
+    # $epvenv/bin/test_cmd | grep 'Test cmd'
+    # PBR_VERSION=0.0 $epvenv/bin/python setup.py develop --uninstall
 
     # Now check install
     PBR_VERSION=0.0 $epvenv/bin/python setup.py install
@@ -157,20 +162,25 @@ function check_pip {
     mkvenv $epvenv
     $epvenv/bin/pip $PIPFLAGS install -f $WHEELHOUSE -e $eppbrdir
 
+    # FIXME(stephenfin): This is broken with setuptools v80.0.0+ since that no
+    # longer invokes easy_install but rather defers to pip. In CI, we only see
+    # this failure on Noble or later, because Jammy's version of Python is too
+    # old for setuptools v80.0.0+
+
     # First check develop
-    PBR_VERSION=0.0 $epvenv/bin/pip install -e ./
-    cat $epvenv/bin/test_cmd
-    if [ -f ./pyproject.toml ] ; then
-        # Pip dev installs with pyproject.toml build from editable wheels
-        # which do not use PBR generated console scripts.
-        grep 'from test_project import main' $epvenv/bin/test_cmd
-        ! grep 'PBR Generated' $epvenv/bin/test_cmd
-    else
-        # Otherwise we should get the PBR generated script
-        grep 'PBR Generated' $epvenv/bin/test_cmd
-    fi
-    $epvenv/bin/test_cmd | grep 'Test cmd'
-    PBR_VERSION=0.0 $epvenv/bin/pip uninstall -y test-project
+    # PBR_VERSION=0.0 $epvenv/bin/pip install -e ./
+    # cat $epvenv/bin/test_cmd
+    # if [ -f ./pyproject.toml ] ; then
+    #     # Pip dev installs with pyproject.toml build from editable wheels
+    #     # which do not use PBR generated console scripts.
+    #     grep 'from test_project import main' $epvenv/bin/test_cmd
+    #     ! grep 'PBR Generated' $epvenv/bin/test_cmd
+    # else
+    #     # Otherwise we should get the PBR generated script
+    #     grep 'PBR Generated' $epvenv/bin/test_cmd
+    # fi
+    # $epvenv/bin/test_cmd | grep 'Test cmd'
+    # PBR_VERSION=0.0 $epvenv/bin/pip uninstall -y test-project
 
     # Now check install
     PBR_VERSION=0.0 $epvenv/bin/pip install ./
diff -pruN 6.1.1-2/tox.ini 7.0.1-2/tox.ini
--- 6.1.1-2/tox.ini	2025-01-28 17:36:46.000000000 +0000
+++ 7.0.1-2/tox.ini	2025-08-14 16:07:35.000000000 +0000
@@ -1,4 +1,6 @@
 [tox]
+# We can't bump minversion yet since we need to support Python 2.7, and
+# tox 4.x requires Python 3.7+
 minversion = 3.18.0
 envlist = pep8,py3,docs
 
@@ -20,7 +22,8 @@ setenv =
 # a broader range of Python versions than OpenStack as a whole
 deps =
   -r{toxinidir}/test-requirements.txt
-commands = stestr run --serial --suppress-attachments {posargs}
+commands =
+  stestr run --serial --suppress-attachments {posargs}
 
 # The latest pip that supports python3.6 assumes that pep660 editable
 # wheel installations should be used for development installs when the
@@ -32,16 +35,20 @@ commands = stestr run --serial --suppres
 usedevelop = false
 
 [testenv:pep8]
-commands = pre-commit run -a
+skip_install = true
+deps =
+  pre-commit
+commands =
+  pre-commit run --all-files --show-diff-on-failure
 
 [testenv:docs]
+usedevelop = false
 allowlist_externals =
   rm
 deps =
   -r{toxinidir}/doc/requirements.txt
 commands =
   rm -rf doc/build doc/source/reference/api
-  python setup.py sdist
   sphinx-build -W -b html doc/source doc/build/html {posargs}
 
 [testenv:releasenotes]
@@ -65,9 +72,10 @@ commands =
   coverage xml -o cover/coverage.xml
 
 [flake8]
-# W504 (you have to choose this or W503)
-# H216 we use mock instead of unittest.mock because we still test
-# against python2.7.
-ignore = W504,H216
+# E203 (conflicts with black)
+# E501 (conflicts with black)
+# W503 (conflicts with black)
+# H216 (we use mock for python2.7 compatibility)
+ignore = E203,E501,W503,H216
 exclude = .venv,.tox,dist,doc,*.egg,build
 show-source = true
