diff -pruN 2.6.1-3/debian/changelog 2.8.0-2/debian/changelog
--- 2.6.1-3/debian/changelog	2025-03-28 09:21:29.000000000 +0000
+++ 2.8.0-2/debian/changelog	2025-09-28 18:45:34.000000000 +0000
@@ -1,3 +1,15 @@
+python-oslo.limit (2.8.0-2) unstable; urgency=medium
+
+  * Uploading to unstable.
+
+ -- Thomas Goirand <zigo@debian.org>  Sun, 28 Sep 2025 20:45:34 +0200
+
+python-oslo.limit (2.8.0-1) experimental; urgency=medium
+
+  * New upstream release.
+
+ -- Thomas Goirand <zigo@debian.org>  Thu, 28 Aug 2025 16:06:32 +0200
+
 python-oslo.limit (2.6.1-3) unstable; urgency=medium
 
   * Uploading to unstable.
diff -pruN 2.6.1-3/oslo_limit/fixture.py 2.8.0-2/oslo_limit/fixture.py
--- 2.6.1-3/oslo_limit/fixture.py	2025-02-15 06:20:11.000000000 +0000
+++ 2.8.0-2/oslo_limit/fixture.py	2025-05-28 04:35:37.000000000 +0000
@@ -17,6 +17,7 @@ import fixtures as fixtures
 
 from openstack.identity.v3 import endpoint
 from openstack.identity.v3 import limit as keystone_limit
+from openstack.identity.v3 import registered_limit as keystone_rlimit
 
 
 class LimitFixture(fixtures.Fixture):
@@ -36,6 +37,35 @@ class LimitFixture(fixtures.Fixture):
         self.reglimits = reglimits
         self.projlimits = projlimits
 
+    def get_reglimit_objects(
+            self, service_id=None, region_id=None, resource_name=None):
+        limits = []
+        for name, value in self.reglimits.items():
+            if resource_name and resource_name != name:
+                continue
+            limit = keystone_rlimit.RegisteredLimit()
+            limit.resource_name = name
+            limit.default_limit = value
+            limits.append(limit)
+        return limits
+
+    def get_projlimit_objects(
+            self, service_id=None, region_id=None, resource_name=None,
+            project_id=None):
+        limits = []
+        for proj_id, limit_dict in self.projlimits.items():
+            if project_id and project_id != proj_id:
+                continue
+            for name, value in limit_dict.items():
+                if resource_name and resource_name != name:
+                    continue
+                limit = keystone_limit.Limit()
+                limit.project_id = proj_id
+                limit.resource_name = name
+                limit.resource_limit = value
+                limits.append(limit)
+        return limits
+
     def setUp(self):
         super().setUp()
 
@@ -56,20 +86,6 @@ class LimitFixture(fixtures.Fixture):
         fake_endpoint.region_id = "region_id"
         self.mock_conn.get_endpoint.return_value = fake_endpoint
 
-        def fake_limits(service_id, region_id, resource_name, project_id=None):
-            this_limit = keystone_limit.Limit()
-            this_limit.resource_name = resource_name
-            if project_id is None:
-                this_limit.default_limit = self.reglimits.get(resource_name)
-                if this_limit.default_limit is None:
-                    return iter([None])
-            else:
-                this_limit.resource_limit = \
-                    self.projlimits.get(project_id, {}).get(resource_name)
-                if this_limit.resource_limit is None:
-                    return iter([None])
-
-            return iter([this_limit])
-
-        self.mock_conn.limits.side_effect = fake_limits
-        self.mock_conn.registered_limits.side_effect = fake_limits
+        self.mock_conn.limits.side_effect = self.get_projlimit_objects
+        self.mock_conn.registered_limits.side_effect = (
+            self.get_reglimit_objects)
diff -pruN 2.6.1-3/oslo_limit/limit.py 2.8.0-2/oslo_limit/limit.py
--- 2.6.1-3/oslo_limit/limit.py	2025-02-15 06:20:11.000000000 +0000
+++ 2.8.0-2/oslo_limit/limit.py	2025-05-28 04:35:37.000000000 +0000
@@ -40,7 +40,19 @@ def _get_keystone_connection():
                 CONF, group='oslo_limit')
             session = loading.load_session_from_conf_options(
                 CONF, group='oslo_limit', auth=auth)
-            _SDK_CONNECTION = connection.Connection(session=session).identity
+            ksa_opts = loading.get_adapter_conf_options(
+                include_deprecated=False)
+            conn_kwargs = {}
+            for opt in ksa_opts:
+                if opt.dest != 'valid_interfaces':
+                    conn_kwargs['identity_' + opt.dest] = getattr(
+                        CONF.oslo_limit, opt.dest)
+            conn_kwargs['identity_interface'] = \
+                CONF.oslo_limit.valid_interfaces
+            _SDK_CONNECTION = connection.Connection(
+                session=session,
+                **conn_kwargs
+            ).identity
         except (ksa_exceptions.NoMatchingPlugin,
                 ksa_exceptions.MissingRequiredOptions,
                 ksa_exceptions.MissingAuthPlugin,
@@ -352,12 +364,28 @@ class _EnforcerUtils:
             LOG.debug("hit limit for project: %s", over_limit_list)
             raise exception.ProjectOverLimit(project_id, over_limit_list)
 
+    def _get_registered_limits(self):
+        registered_limits = []
+        reg_limits = self.connection.registered_limits(
+            service_id=self._service_id, region_id=self._region_id)
+        for reg_limit in reg_limits:
+            name, limit = reg_limit.resource_name, reg_limit.default_limit
+            registered_limits.append((name, limit))
+            if self.should_cache:
+                self.rlimit_cache[name] = reg_limit
+        return registered_limits
+
     def get_registered_limits(self, resource_names):
         """Get all the default limits for a given resource name list
 
         :param resource_names: list of resource_name strings
         :return: list of (resource_name, limit) pairs
         """
+        # If None was passed for resource_names, get and return all of the
+        # registered limits.
+        if resource_names is None:
+            return self._get_registered_limits()
+
         # Using a list to preserve the resource_name order
         registered_limits = []
         for resource_name in resource_names:
@@ -370,6 +398,25 @@ class _EnforcerUtils:
 
         return registered_limits
 
+    def _get_project_limits(self, project_id):
+        if project_id is None:
+            # If we were to pass None, we would receive limits for all projects
+            # and we would have to return {project_id: [(name, limit), ...]}
+            # which would be inconsistent with the return format of the other
+            # methods.
+            raise ValueError('project_id must not be None')
+
+        project_limits = []
+        proj_limits = self.connection.limits(
+            service_id=self._service_id, region_id=self._region_id,
+            project_id=project_id)
+        for proj_limit in proj_limits:
+            name, limit = proj_limit.resource_name, proj_limit.resource_limit
+            project_limits.append((name, limit))
+            if self.should_cache:
+                self.plimit_cache[project_id][name] = proj_limit
+        return project_limits
+
     def get_project_limits(self, project_id, resource_names):
         """Get all the limits for given project a resource_name list
 
@@ -380,6 +427,11 @@ class _EnforcerUtils:
         :param resource_names: list of resource_name strings
         :return: list of (resource_name,limit) pairs
         """
+        # If None was passed for resource_names, get and return all of the
+        # limits.
+        if resource_names is None:
+            return self._get_project_limits(project_id)
+
         # Using a list to preserver the resource_name order
         project_limits = []
         for resource_name in resource_names:
@@ -422,20 +474,21 @@ class _EnforcerUtils:
                 resource_name in self.plimit_cache[project_id]):
             return self.plimit_cache[project_id][resource_name]
 
-        # Get the limit from keystone.
+        # Get the limits from keystone.
         limits = self.connection.limits(
             service_id=self._service_id,
             region_id=self._region_id,
-            resource_name=resource_name,
             project_id=project_id)
-        try:
-            limit = next(limits)
-        except StopIteration:
-            return None
-
-        # Cache the limit if configured.
-        if self.should_cache and limit:
-            self.plimit_cache[project_id][resource_name] = limit
+        limit = None
+        for pl in limits:
+            # NOTE(melwitt): If project_id None was passed in, it's possible
+            # there will be multiple limits for the same resource (from various
+            # projects), so keep the existing oslo.limit behavior and return
+            # the first one we find. This could be considered to be a bug.
+            if limit is None and pl.resource_name == resource_name:
+                limit = pl
+            if self.should_cache:
+                self.plimit_cache[project_id][pl.resource_name] = pl
 
         return limit
 
@@ -444,18 +497,16 @@ class _EnforcerUtils:
         if resource_name in self.rlimit_cache:
             return self.rlimit_cache[resource_name]
 
-        # Get the limit from keystone.
+        # Get the limits from keystone.
         reg_limits = self.connection.registered_limits(
             service_id=self._service_id,
-            region_id=self._region_id,
-            resource_name=resource_name)
-        try:
-            reg_limit = next(reg_limits)
-        except StopIteration:
-            return None
-
-        # Cache the limit if configured.
-        if self.should_cache and reg_limit:
-            self.rlimit_cache[resource_name] = reg_limit
+            region_id=self._region_id)
+        reg_limit = None
+        for rl in reg_limits:
+            if rl.resource_name == resource_name:
+                reg_limit = rl
+            # Cache the limit if configured.
+            if self.should_cache:
+                self.rlimit_cache[rl.resource_name] = rl
 
         return reg_limit
diff -pruN 2.6.1-3/oslo_limit/tests/test_limit.py 2.8.0-2/oslo_limit/tests/test_limit.py
--- 2.6.1-3/oslo_limit/tests/test_limit.py	2025-02-15 06:20:11.000000000 +0000
+++ 2.8.0-2/oslo_limit/tests/test_limit.py	2025-05-28 04:35:37.000000000 +0000
@@ -224,6 +224,45 @@ class TestEnforcer(base.BaseTestCase):
         mock_get_limits.assert_called_once_with(project_id, ["a", "b", "c"])
         self.assertEqual(mock_get_limits.return_value, limits)
 
+    def test_calculate_usage_cache(self, cache=True):
+        project_id = uuid.uuid4().hex
+        fix = self.useFixture(fixture.LimitFixture(
+            {'a': 5, 'b': 7, 'c': 8, 'd': 3},
+            {project_id: {'a': 2, 'b': 4}, 'other': {'a': 1, 'b': 2}}))
+        mock_usage = mock.MagicMock()
+        mock_usage.return_value = {'a': 1, 'b': 3, 'c': 2, 'd': 0}
+
+        enforcer = limit.Enforcer(mock_usage, cache=cache)
+        expected = {
+            'a': limit.ProjectUsage(2, 1),
+            'b': limit.ProjectUsage(4, 3),
+            'c': limit.ProjectUsage(8, 2),
+            'd': limit.ProjectUsage(3, 0),
+        }
+        self.assertEqual(
+            expected,
+            enforcer.calculate_usage(project_id, ['a', 'b', 'c', 'd']))
+
+        # If caching is enabled, there should be three calls to the GET /limits
+        # API: one for 'a' and 'b' and two because of cache misses for 'c' and
+        # 'd' (the project_id has not set a per-project limit for 'c' or 'd',
+        # so they will not be cached for the project).
+        # If caching is disabled, there should be four calls to the GET
+        # /limits API, one for each of 'a', 'b', 'c', and 'd'.
+        expected_count = 3 if cache else 4
+        self.assertEqual(expected_count, fix.mock_conn.limits.call_count)
+
+        # If caching is enabled, there should be one call to the GET
+        # /registered_limits API for 'c' and 'd'.
+        # If caching is disabled, there should be two calls to the GET
+        # /registered_limits API, one for each of 'c' and 'd'.
+        expected_count = 1 if cache else 2
+        self.assertEqual(
+            expected_count, fix.mock_conn.registered_limits.call_count)
+
+    def test_calculate_usage_no_cache(self):
+        self.test_calculate_usage_cache(cache=False)
+
 
 class TestFlatEnforcer(base.BaseTestCase):
     def setUp(self):
@@ -763,3 +802,121 @@ class TestEnforcerUtils(base.BaseTestCas
             utils._get_limit(None, 'foo')
             mgrl.assert_called_once_with('foo')
             mgpl.assert_not_called()
+
+    def test_get_registered_limits_resource_names_none(self):
+        fix = self.useFixture(fixture.LimitFixture({'foo': 5, 'bar': 7}, {}))
+
+        utils = limit._EnforcerUtils()
+        limits = utils.get_registered_limits(None)
+
+        self.assertEqual([('foo', 5), ('bar', 7)], limits)
+        fix.mock_conn.registered_limits.assert_called_once()
+
+        # Call again with resource names to test caching.
+        limits = utils.get_registered_limits(['foo', 'bar'])
+
+        self.assertEqual([('foo', 5), ('bar', 7)], limits)
+        fix.mock_conn.registered_limits.assert_called_once()
+
+    def test_get_registered_limits_resource_names_none_no_cache(self):
+        fix = self.useFixture(fixture.LimitFixture({'foo': 5, 'bar': 7}, {}))
+
+        utils = limit._EnforcerUtils(cache=False)
+        limits = utils.get_registered_limits(None)
+
+        self.assertEqual([('foo', 5), ('bar', 7)], limits)
+        fix.mock_conn.registered_limits.assert_called_once()
+
+        # Call again with resource names to test caching.
+        limits = utils.get_registered_limits(['foo', 'bar'])
+
+        self.assertEqual([('foo', 5), ('bar', 7)], limits)
+        # First call gets all limits, then one call per resource name.
+        self.assertEqual(3, fix.mock_conn.registered_limits.call_count)
+
+    def test_get_registered_limits_resource_names(self):
+        fix = self.useFixture(fixture.LimitFixture({'foo': 5, 'bar': 7}, {}))
+
+        utils = limit._EnforcerUtils()
+        limits = utils.get_registered_limits(['foo', 'bar'])
+
+        self.assertEqual([('foo', 5), ('bar', 7)], limits)
+        fix.mock_conn.registered_limits.assert_called_once()
+
+    def test_get_registered_limits_resource_names_no_cache(self):
+        fix = self.useFixture(fixture.LimitFixture({'foo': 5, 'bar': 7}, {}))
+
+        utils = limit._EnforcerUtils(cache=False)
+        limits = utils.get_registered_limits(['foo', 'bar'])
+
+        self.assertEqual([('foo', 5), ('bar', 7)], limits)
+        self.assertEqual(2, fix.mock_conn.registered_limits.call_count)
+
+    def test_get_project_limits_resource_names_none_project_id_none(self):
+        # We consider project_id None to be invalid for "get_project_limits"
+        # because it would require us to make the return format for getting
+        # project limits different than the format for registered limits.
+        # [(name, limit), (name, limit), ...]
+        utils = limit._EnforcerUtils()
+        self.assertRaises(ValueError, utils.get_project_limits, None, None)
+
+    def test_get_project_limits_resource_names_none(self):
+        project_id = uuid.uuid4().hex
+        fix = self.useFixture(fixture.LimitFixture(
+            {'foo': 5, 'bar': 7},
+            {project_id: {'foo': 2, 'bar': 4}, 'other': {'foo': 1, 'bar': 2}}))
+
+        utils = limit._EnforcerUtils()
+        limits = utils.get_project_limits(project_id, None)
+
+        self.assertEqual([('foo', 2), ('bar', 4)], limits)
+        fix.mock_conn.limits.assert_called_once()
+
+        # Call again with resource names to test caching.
+        limits = utils.get_project_limits(project_id, ['foo', 'bar'])
+
+        self.assertEqual([('foo', 2), ('bar', 4)], limits)
+        fix.mock_conn.limits.assert_called_once()
+
+    def test_get_project_limits_resource_names_none_no_cache(self):
+        project_id = uuid.uuid4().hex
+        fix = self.useFixture(fixture.LimitFixture(
+            {'foo': 5, 'bar': 7},
+            {project_id: {'foo': 2, 'bar': 4}, 'other': {'foo': 1, 'bar': 2}}))
+
+        utils = limit._EnforcerUtils(cache=False)
+        limits = utils.get_project_limits(project_id, None)
+
+        self.assertEqual([('foo', 2), ('bar', 4)], limits)
+        fix.mock_conn.limits.assert_called_once()
+
+        # Call again with resource names to test caching.
+        limits = utils.get_project_limits(project_id, ['foo', 'bar'])
+
+        self.assertEqual([('foo', 2), ('bar', 4)], limits)
+        # First call gets all limits, then one call per resource name.
+        self.assertEqual(3, fix.mock_conn.limits.call_count)
+
+    def test_get_project_limits_resource_names(self):
+        project_id = uuid.uuid4().hex
+        fix = self.useFixture(fixture.LimitFixture(
+            {'foo': 5, 'bar': 7},
+            {project_id: {'foo': 2, 'bar': 4}, 'other': {'foo': 1, 'bar': 2}}))
+
+        utils = limit._EnforcerUtils()
+        limits = utils.get_project_limits(project_id, ['foo', 'bar'])
+
+        self.assertEqual([('foo', 2), ('bar', 4)], limits)
+        fix.mock_conn.limits.assert_called_once()
+
+    def test_get_project_limits_resource_names_no_cache(self):
+        project_id = uuid.uuid4().hex
+        fix = self.useFixture(fixture.LimitFixture(
+            {'foo': 5, 'bar': 7},
+            {project_id: {'foo': 2, 'bar': 4}, 'other': {'foo': 1, 'bar': 2}}))
+
+        utils = limit._EnforcerUtils(cache=False)
+        limits = utils.get_project_limits(project_id, ['foo', 'bar'])
+
+        self.assertEqual([('foo', 2), ('bar', 4)], limits)
+        self.assertEqual(2, fix.mock_conn.limits.call_count)
diff -pruN 2.6.1-3/pyproject.toml 2.8.0-2/pyproject.toml
--- 2.6.1-3/pyproject.toml	1970-01-01 00:00:00.000000000 +0000
+++ 2.8.0-2/pyproject.toml	2025-05-28 04:35:37.000000000 +0000
@@ -0,0 +1,3 @@
+[build-system]
+requires = ["pbr>=6.1.1"]
+build-backend = "pbr.build"
diff -pruN 2.6.1-3/releasenotes/notes/fix-ignored-ksa-adapter-options-e120fac9a6fd35f7.yaml 2.8.0-2/releasenotes/notes/fix-ignored-ksa-adapter-options-e120fac9a6fd35f7.yaml
--- 2.6.1-3/releasenotes/notes/fix-ignored-ksa-adapter-options-e120fac9a6fd35f7.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 2.8.0-2/releasenotes/notes/fix-ignored-ksa-adapter-options-e120fac9a6fd35f7.yaml	2025-05-28 04:35:37.000000000 +0000
@@ -0,0 +1,5 @@
+---
+fixes:
+  - |
+    Fix ignored keystoneauth adapter options such as valid_interfaces.
+
diff -pruN 2.6.1-3/releasenotes/source/2025.1.rst 2.8.0-2/releasenotes/source/2025.1.rst
--- 2.6.1-3/releasenotes/source/2025.1.rst	1970-01-01 00:00:00.000000000 +0000
+++ 2.8.0-2/releasenotes/source/2025.1.rst	2025-05-28 04:35:37.000000000 +0000
@@ -0,0 +1,6 @@
+===========================
+2025.1 Series Release Notes
+===========================
+
+.. release-notes::
+   :branch: stable/2025.1
diff -pruN 2.6.1-3/releasenotes/source/index.rst 2.8.0-2/releasenotes/source/index.rst
--- 2.6.1-3/releasenotes/source/index.rst	2025-02-15 06:20:11.000000000 +0000
+++ 2.8.0-2/releasenotes/source/index.rst	2025-05-28 04:35:37.000000000 +0000
@@ -6,6 +6,7 @@
    :maxdepth: 1
 
    unreleased
+   2025.1
    2024.2
    2024.1
    2023.2
diff -pruN 2.6.1-3/tox.ini 2.8.0-2/tox.ini
--- 2.6.1-3/tox.ini	2025-02-15 06:20:11.000000000 +0000
+++ 2.8.0-2/tox.ini	2025-05-28 04:35:37.000000000 +0000
@@ -1,10 +1,8 @@
 [tox]
 minversion = 3.18.0
 envlist = py3,pep8,docs
-ignore_basepython_conflict = true
 
 [testenv]
-basepython = python3
 deps =
   -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master}
   -r{toxinidir}/test-requirements.txt
