diff -pruN 5.12.0-2/.pre-commit-config.yaml 6.0.2-2/.pre-commit-config.yaml
--- 5.12.0-2/.pre-commit-config.yaml	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/.pre-commit-config.yaml	2025-08-14 03:01:40.000000000 +0000
@@ -24,3 +24,8 @@ repos:
       - id: hacking
         additional_dependencies: []
         exclude: '^(doc|releasenotes|tools)/.*$'
+  - repo: https://github.com/asottile/pyupgrade
+    rev: v3.18.0
+    hooks:
+      - id: pyupgrade
+        args: [--py3-only]
diff -pruN 5.12.0-2/.zuul.yaml 6.0.2-2/.zuul.yaml
--- 5.12.0-2/.zuul.yaml	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/.zuul.yaml	2025-08-14 03:01:40.000000000 +0000
@@ -6,6 +6,7 @@
     vars:
       tox_envlist: functional
     irrelevant-files:
+      - ^\.gitreview$
       - ^.*\.rst$
       - ^doc/.*$
       - ^LICENSE$
diff -pruN 5.12.0-2/debian/changelog 6.0.2-2/debian/changelog
--- 5.12.0-2/debian/changelog	2025-03-28 08:53:00.000000000 +0000
+++ 6.0.2-2/debian/changelog	2025-09-28 16:31:48.000000000 +0000
@@ -1,3 +1,16 @@
+python-taskflow (6.0.2-2) unstable; urgency=medium
+
+  * Uploading to unstable.
+
+ -- Thomas Goirand <zigo@debian.org>  Sun, 28 Sep 2025 18:31:48 +0200
+
+python-taskflow (6.0.2-1) experimental; urgency=medium
+
+  * New upstream release.
+  * Rebased reproducible_build.patch.
+
+ -- Thomas Goirand <zigo@debian.org>  Wed, 27 Aug 2025 17:08:10 +0200
+
 python-taskflow (5.12.0-2) unstable; urgency=medium
 
   * Uploading to unstable.
diff -pruN 5.12.0-2/debian/patches/reproducible_build.patch 6.0.2-2/debian/patches/reproducible_build.patch
--- 5.12.0-2/debian/patches/reproducible_build.patch	2025-03-28 08:53:00.000000000 +0000
+++ 6.0.2-2/debian/patches/reproducible_build.patch	2025-09-28 16:31:48.000000000 +0000
@@ -7,7 +7,7 @@ Index: python-taskflow/taskflow/conducto
 ===================================================================
 --- python-taskflow.orig/taskflow/conductors/backends/impl_executor.py
 +++ python-taskflow/taskflow/conductors/backends/impl_executor.py
-@@ -103,7 +103,9 @@ class ExecutorConductor(base.Conductor,
+@@ -101,7 +101,9 @@
      def __init__(self, name, jobboard,
                   persistence=None, engine=None,
                   engine_options=None, wait_timeout=None,
@@ -15,6 +15,6 @@ Index: python-taskflow/taskflow/conducto
 +                 log=None, max_simultaneous_jobs=None):
 +        if max_simultaneous_jobs is None:
 +            max_simultaneous_jobs = MAX_SIMULTANEOUS_JOBS
-         super(ExecutorConductor, self).__init__(
+         super().__init__(
              name, jobboard, persistence=persistence,
              engine=engine, engine_options=engine_options)
diff -pruN 5.12.0-2/doc/source/conf.py 6.0.2-2/doc/source/conf.py
--- 5.12.0-2/doc/source/conf.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/doc/source/conf.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
 # Copyright (C) 2020 Red Hat, Inc.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
diff -pruN 5.12.0-2/doc/source/user/engines.rst 6.0.2-2/doc/source/user/engines.rst
--- 5.12.0-2/doc/source/user/engines.rst	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/doc/source/user/engines.rst	2025-08-14 03:01:40.000000000 +0000
@@ -449,7 +449,6 @@ Components
 .. automodule:: taskflow.engines.action_engine.completer
 .. automodule:: taskflow.engines.action_engine.deciders
 .. automodule:: taskflow.engines.action_engine.executor
-.. automodule:: taskflow.engines.action_engine.process_executor
 .. automodule:: taskflow.engines.action_engine.runtime
 .. automodule:: taskflow.engines.action_engine.scheduler
 .. automodule:: taskflow.engines.action_engine.selector
diff -pruN 5.12.0-2/pyproject.toml 6.0.2-2/pyproject.toml
--- 5.12.0-2/pyproject.toml	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-2/pyproject.toml	2025-08-14 03:01:40.000000000 +0000
@@ -0,0 +1,3 @@
+[build-system]
+requires = ["pbr>=6.1.1"]
+build-backend = "pbr.build"
diff -pruN 5.12.0-2/releasenotes/notes/mask-keys-74b9bb5c420d8091.yaml 6.0.2-2/releasenotes/notes/mask-keys-74b9bb5c420d8091.yaml
--- 5.12.0-2/releasenotes/notes/mask-keys-74b9bb5c420d8091.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-2/releasenotes/notes/mask-keys-74b9bb5c420d8091.yaml	2025-08-14 03:01:40.000000000 +0000
@@ -0,0 +1,7 @@
+---
+features:
+  - |
+    Added ``mask_inputs_keys`` and ``mask_outputs_keys`` parameters to the
+    constructors for ``FailureFormatter`` and ``DynamicLoggingListener``
+    that can be used to mask sensitive information from the ``requires``
+    and ``provides`` fields respectively when logging a atom.
diff -pruN 5.12.0-2/releasenotes/notes/remove-process_executor-f59d40a5dd287cd7.yaml 6.0.2-2/releasenotes/notes/remove-process_executor-f59d40a5dd287cd7.yaml
--- 5.12.0-2/releasenotes/notes/remove-process_executor-f59d40a5dd287cd7.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-2/releasenotes/notes/remove-process_executor-f59d40a5dd287cd7.yaml	2025-08-14 03:01:40.000000000 +0000
@@ -0,0 +1,4 @@
+---
+upgrade:
+  - |
+    Process executor was removed.
diff -pruN 5.12.0-2/releasenotes/source/2025.1.rst 6.0.2-2/releasenotes/source/2025.1.rst
--- 5.12.0-2/releasenotes/source/2025.1.rst	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-2/releasenotes/source/2025.1.rst	2025-08-14 03:01:40.000000000 +0000
@@ -0,0 +1,6 @@
+===========================
+2025.1 Series Release Notes
+===========================
+
+.. release-notes::
+   :branch: stable/2025.1
diff -pruN 5.12.0-2/releasenotes/source/conf.py 6.0.2-2/releasenotes/source/conf.py
--- 5.12.0-2/releasenotes/source/conf.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/releasenotes/source/conf.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
 # Copyright (C) 2020 Red Hat, Inc.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
diff -pruN 5.12.0-2/releasenotes/source/index.rst 6.0.2-2/releasenotes/source/index.rst
--- 5.12.0-2/releasenotes/source/index.rst	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/releasenotes/source/index.rst	2025-08-14 03:01:40.000000000 +0000
@@ -6,6 +6,7 @@
    :maxdepth: 1
 
    unreleased
+   2025.1
    2024.2
    2024.1
    2023.2
diff -pruN 5.12.0-2/setup-etcd-env.sh 6.0.2-2/setup-etcd-env.sh
--- 5.12.0-2/setup-etcd-env.sh	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/setup-etcd-env.sh	2025-08-14 03:01:40.000000000 +0000
@@ -1,7 +1,7 @@
 #!/bin/bash
 set -eux
 if [ -z "$(which etcd)" ]; then
-    ETCD_VERSION=3.4.27
+    ETCD_VERSION=${ETCD_VERSION:-3.5.21}
     case `uname -s` in
         Darwin)
             OS=darwin
diff -pruN 5.12.0-2/taskflow/atom.py 6.0.2-2/taskflow/atom.py
--- 5.12.0-2/taskflow/atom.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/atom.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Rackspace Hosting Inc. All Rights Reserved.
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
@@ -159,7 +157,7 @@ def _build_arg_mapping(atom_name, reqs,
     return required, optional
 
 
-class Atom(object, metaclass=abc.ABCMeta):
+class Atom(metaclass=abc.ABCMeta):
     """An unit of work that causes a flow to progress (in some manner).
 
     An atom is a named object that operates with input data to perform
@@ -379,7 +377,7 @@ class Atom(object, metaclass=abc.ABCMeta
         """
 
     def __str__(self):
-        return '"%s==%s"' % (self.name, misc.get_version_string(self))
+        return '"{}=={}"'.format(self.name, misc.get_version_string(self))
 
     def __repr__(self):
-        return '<%s %s>' % (reflection.get_class_name(self), self)
+        return '<{} {}>'.format(reflection.get_class_name(self), self)
diff -pruN 5.12.0-2/taskflow/conductors/backends/__init__.py 6.0.2-2/taskflow/conductors/backends/__init__.py
--- 5.12.0-2/taskflow/conductors/backends/__init__.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/conductors/backends/__init__.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/conductors/backends/impl_blocking.py 6.0.2-2/taskflow/conductors/backends/impl_blocking.py
--- 5.12.0-2/taskflow/conductors/backends/impl_blocking.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/conductors/backends/impl_blocking.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
 #    not use this file except in compliance with the License. You may obtain
 #    a copy of the License at
@@ -33,7 +31,7 @@ class BlockingConductor(impl_executor.Ex
                  persistence=None, engine=None,
                  engine_options=None, wait_timeout=None,
                  log=None, max_simultaneous_jobs=MAX_SIMULTANEOUS_JOBS):
-        super(BlockingConductor, self).__init__(
+        super().__init__(
             name, jobboard,
             persistence=persistence, engine=engine,
             engine_options=engine_options,
diff -pruN 5.12.0-2/taskflow/conductors/backends/impl_executor.py 6.0.2-2/taskflow/conductors/backends/impl_executor.py
--- 5.12.0-2/taskflow/conductors/backends/impl_executor.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/conductors/backends/impl_executor.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
 #    not use this file except in compliance with the License. You may obtain
 #    a copy of the License at
@@ -104,7 +102,7 @@ class ExecutorConductor(base.Conductor,
                  persistence=None, engine=None,
                  engine_options=None, wait_timeout=None,
                  log=None, max_simultaneous_jobs=MAX_SIMULTANEOUS_JOBS):
-        super(ExecutorConductor, self).__init__(
+        super().__init__(
             name, jobboard, persistence=persistence,
             engine=engine, engine_options=engine_options)
         self._wait_timeout = tt.convert_to_timeout(
@@ -123,24 +121,19 @@ class ExecutorConductor(base.Conductor,
                                        " it has not been")
 
     def stop(self):
-        """Requests the conductor to stop dispatching.
-
-        This method can be used to request that a conductor stop its
-        consumption & dispatching loop.
-
-        The method returns immediately regardless of whether the conductor has
-        been stopped.
-        """
         self._wait_timeout.interrupt()
 
+    # Inherit the docs, so we can reference them in our class docstring,
+    # if we don't do this sphinx gets confused...
+    stop.__doc__ = base.Conductor.stop.__doc__
+
     @property
     def dispatching(self):
         """Whether or not the dispatching loop is still dispatching."""
         return not self._dead.is_set()
 
     def _listeners_from_job(self, job, engine):
-        listeners = super(ExecutorConductor, self)._listeners_from_job(
-            job, engine)
+        listeners = super()._listeners_from_job(job, engine)
         listeners.append(logging_listener.LoggingListener(engine,
                                                           log=self._log))
         return listeners
@@ -178,7 +171,7 @@ class ExecutorConductor(base.Conductor,
                     stage_func()
                     self._notifier.notify("%s_end" % event_name, details)
             except excp.WrappedFailure as e:
-                if all((f.check(*self.NO_CONSUME_EXCEPTIONS) for f in e)):
+                if all(f.check(*self.NO_CONSUME_EXCEPTIONS) for f in e):
                     consume = False
                 if self._log.isEnabledFor(logging.WARNING):
                     if consume:
@@ -345,14 +338,8 @@ class ExecutorConductor(base.Conductor,
     run.__doc__ = base.Conductor.run.__doc__
 
     def wait(self, timeout=None):
-        """Waits for the conductor to gracefully exit.
-
-        This method waits for the conductor to gracefully exit. An optional
-        timeout can be provided, which will cause the method to return
-        within the specified timeout. If the timeout is reached, the returned
-        value will be ``False``, otherwise it will be ``True``.
-
-        :param timeout: Maximum number of seconds that the :meth:`wait` method
-                        should block for.
-        """
         return self._dead.wait(timeout)
+
+    # Inherit the docs, so we can reference them in our class docstring,
+    # if we don't do this sphinx gets confused...
+    wait.__doc__ = base.Conductor.wait.__doc__
diff -pruN 5.12.0-2/taskflow/conductors/backends/impl_nonblocking.py 6.0.2-2/taskflow/conductors/backends/impl_nonblocking.py
--- 5.12.0-2/taskflow/conductors/backends/impl_nonblocking.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/conductors/backends/impl_nonblocking.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
 #    not use this file except in compliance with the License. You may obtain
 #    a copy of the License at
@@ -54,7 +52,7 @@ class NonBlockingConductor(impl_executor
                  engine_options=None, wait_timeout=None,
                  log=None, max_simultaneous_jobs=MAX_SIMULTANEOUS_JOBS,
                  executor_factory=None):
-        super(NonBlockingConductor, self).__init__(
+        super().__init__(
             name, jobboard,
             persistence=persistence, engine=engine,
             engine_options=engine_options, wait_timeout=wait_timeout,
diff -pruN 5.12.0-2/taskflow/conductors/base.py 6.0.2-2/taskflow/conductors/base.py
--- 5.12.0-2/taskflow/conductors/base.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/conductors/base.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
 #    not use this file except in compliance with the License. You may obtain
 #    a copy of the License at
@@ -25,7 +23,7 @@ from taskflow.types import notifier
 from taskflow.utils import misc
 
 
-class Conductor(object, metaclass=abc.ABCMeta):
+class Conductor(metaclass=abc.ABCMeta):
     """Base for all conductor implementations.
 
     Conductors act as entities which extract jobs from a jobboard, assign
@@ -164,6 +162,30 @@ class Conductor(object, metaclass=abc.AB
         """
 
     @abc.abstractmethod
+    def stop(self):
+        """Requests the conductor to stop dispatching.
+
+        This method can be used to request that a conductor stop its
+        consumption & dispatching loop.
+
+        The method returns immediately regardless of whether the conductor has
+        been stopped.
+        """
+
+    @abc.abstractmethod
+    def wait(self, timeout=None):
+        """Waits for the conductor to gracefully exit.
+
+        This method waits for the conductor to gracefully exit. An optional
+        timeout can be provided, which will cause the method to return
+        within the specified timeout. If the timeout is reached, the returned
+        value will be ``False``, otherwise it will be ``True``.
+
+        :param timeout: Maximum number of seconds that the :meth:`wait` method
+                        should block for.
+        """
+
+    @abc.abstractmethod
     def _dispatch_job(self, job):
         """Dispatches a claimed job for work completion.
 
diff -pruN 5.12.0-2/taskflow/deciders.py 6.0.2-2/taskflow/deciders.py
--- 5.12.0-2/taskflow/deciders.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/deciders.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/engines/__init__.py 6.0.2-2/taskflow/engines/__init__.py
--- 5.12.0-2/taskflow/engines/__init__.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/__init__.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/engines/action_engine/actions/base.py 6.0.2-2/taskflow/engines/action_engine/actions/base.py
--- 5.12.0-2/taskflow/engines/action_engine/actions/base.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/action_engine/actions/base.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -19,7 +17,7 @@ import abc
 from taskflow import states
 
 
-class Action(object, metaclass=abc.ABCMeta):
+class Action(metaclass=abc.ABCMeta):
     """An action that handles executing, state changes, ... of atoms."""
 
     NO_RESULT = object()
diff -pruN 5.12.0-2/taskflow/engines/action_engine/actions/retry.py 6.0.2-2/taskflow/engines/action_engine/actions/retry.py
--- 5.12.0-2/taskflow/engines/action_engine/actions/retry.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/action_engine/actions/retry.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -24,7 +22,7 @@ class RetryAction(base.Action):
     """An action that handles executing, state changes, ... of retry atoms."""
 
     def __init__(self, storage, notifier, retry_executor):
-        super(RetryAction, self).__init__(storage, notifier)
+        super().__init__(storage, notifier)
         self._retry_executor = retry_executor
 
     def _get_retry_args(self, retry, revert=False, addons=None):
diff -pruN 5.12.0-2/taskflow/engines/action_engine/actions/task.py 6.0.2-2/taskflow/engines/action_engine/actions/task.py
--- 5.12.0-2/taskflow/engines/action_engine/actions/task.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/action_engine/actions/task.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -29,7 +27,7 @@ class TaskAction(base.Action):
     """An action that handles scheduling, state changes, ... of task atoms."""
 
     def __init__(self, storage, notifier, task_executor):
-        super(TaskAction, self).__init__(storage, notifier)
+        super().__init__(storage, notifier)
         self._task_executor = task_executor
 
     def _is_identity_transition(self, old_state, state, task, progress=None):
diff -pruN 5.12.0-2/taskflow/engines/action_engine/builder.py 6.0.2-2/taskflow/engines/action_engine/builder.py
--- 5.12.0-2/taskflow/engines/action_engine/builder.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/action_engine/builder.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -58,7 +56,7 @@ TIMED_STATES = (st.ANALYZING, st.RESUMIN
 LOG = logging.getLogger(__name__)
 
 
-class MachineMemory(object):
+class MachineMemory:
     """State machine memory."""
 
     def __init__(self):
@@ -73,7 +71,7 @@ class MachineMemory(object):
             fut.cancel()
 
 
-class MachineBuilder(object):
+class MachineBuilder:
     """State machine *builder* that powers the engine components.
 
     NOTE(harlowja): the machine (states and events that will trigger
diff -pruN 5.12.0-2/taskflow/engines/action_engine/compiler.py 6.0.2-2/taskflow/engines/action_engine/compiler.py
--- 5.12.0-2/taskflow/engines/action_engine/compiler.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/action_engine/compiler.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -46,12 +44,12 @@ ATOMS = (TASK, RETRY)
 FLOWS = (FLOW, FLOW_END)
 
 
-class Terminator(object):
+class Terminator:
     """Flow terminator class."""
 
     def __init__(self, flow):
         self._flow = flow
-        self._name = "%s[$]" % (self._flow.name,)
+        self._name = "{}[$]".format(self._flow.name)
 
     @property
     def flow(self):
@@ -68,7 +66,7 @@ class Terminator(object):
         return '"%s[$]"' % flow_name
 
 
-class Compilation(object):
+class Compilation:
     """The result of a compilers ``compile()`` is this *immutable* object."""
 
     #: Task nodes will have a ``kind`` metadata key with this value.
@@ -135,7 +133,7 @@ def _add_update_edges(graph, nodes_from,
                     graph.add_edge(u, v, attr_dict=attr_dict.copy())
 
 
-class TaskCompiler(object):
+class TaskCompiler:
     """Non-recursive compiler of tasks."""
 
     def compile(self, task, parent=None):
@@ -147,7 +145,7 @@ class TaskCompiler(object):
         return graph, node
 
 
-class FlowCompiler(object):
+class FlowCompiler:
     """Recursive compiler of flows."""
 
     def __init__(self, deep_compiler_func):
@@ -162,9 +160,9 @@ class FlowCompiler(object):
             parent.add(tree_node)
         if flow.retry is not None:
             tree_node.add(tr.Node(flow.retry, kind=RETRY))
-        decomposed = dict(
-            (child, self._deep_compiler_func(child, parent=tree_node)[0])
-            for child in flow)
+        decomposed = {
+            child: self._deep_compiler_func(child, parent=tree_node)[0]
+            for child in flow}
         decomposed_graphs = list(decomposed.values())
         graph = gr.merge_graphs(graph, *decomposed_graphs,
                                 overlap_detector=_overlap_occurrence_detector)
@@ -223,7 +221,7 @@ class FlowCompiler(object):
         return graph, tree_node
 
 
-class PatternCompiler(object):
+class PatternCompiler:
     """Compiles a flow pattern (or task) into a compilation unit.
 
     Let's dive into the basic idea for how this works:
diff -pruN 5.12.0-2/taskflow/engines/action_engine/completer.py 6.0.2-2/taskflow/engines/action_engine/completer.py
--- 5.12.0-2/taskflow/engines/action_engine/completer.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/action_engine/completer.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -29,7 +27,7 @@ from taskflow import states as st
 LOG = logging.getLogger(__name__)
 
 
-class Strategy(object, metaclass=abc.ABCMeta):
+class Strategy(metaclass=abc.ABCMeta):
     """Failure resolution strategy base class."""
 
     strategy = None
@@ -56,7 +54,7 @@ class RevertAndRetry(Strategy):
     strategy = retry_atom.RETRY
 
     def __init__(self, runtime, retry):
-        super(RevertAndRetry, self).__init__(runtime)
+        super().__init__(runtime)
         self._retry = retry
 
     def apply(self):
@@ -73,7 +71,7 @@ class RevertAll(Strategy):
     strategy = retry_atom.REVERT_ALL
 
     def __init__(self, runtime):
-        super(RevertAll, self).__init__(runtime)
+        super().__init__(runtime)
 
     def apply(self):
         return self._runtime.reset_atoms(
@@ -87,7 +85,7 @@ class Revert(Strategy):
     strategy = retry_atom.REVERT
 
     def __init__(self, runtime, atom):
-        super(Revert, self).__init__(runtime)
+        super().__init__(runtime)
         self._atom = atom
 
     def apply(self):
@@ -98,7 +96,7 @@ class Revert(Strategy):
         return tweaked
 
 
-class Completer(object):
+class Completer:
     """Completes atoms using actions to complete them."""
 
     def __init__(self, runtime):
diff -pruN 5.12.0-2/taskflow/engines/action_engine/deciders.py 6.0.2-2/taskflow/engines/action_engine/deciders.py
--- 5.12.0-2/taskflow/engines/action_engine/deciders.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/action_engine/deciders.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -26,7 +24,7 @@ from taskflow import states
 LOG = logging.getLogger(__name__)
 
 
-class Decider(object, metaclass=abc.ABCMeta):
+class Decider(metaclass=abc.ABCMeta):
     """Base class for deciders.
 
     Provides interface to be implemented by sub-classes.
diff -pruN 5.12.0-2/taskflow/engines/action_engine/engine.py 6.0.2-2/taskflow/engines/action_engine/engine.py
--- 5.12.0-2/taskflow/engines/action_engine/engine.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/action_engine/engine.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -40,11 +38,6 @@ from taskflow import storage
 from taskflow.types import failure
 from taskflow.utils import misc
 
-try:
-    from taskflow.engines.action_engine import process_executor
-except ImportError:
-    process_executor = None
-
 LOG = logging.getLogger(__name__)
 
 
@@ -170,7 +163,7 @@ class ActionEngine(base.Engine):
     """
 
     def __init__(self, flow, flow_detail, backend, options):
-        super(ActionEngine, self).__init__(flow, flow_detail, backend, options)
+        super().__init__(flow, flow_detail, backend, options)
         self._runtime = None
         self._compiled = False
         self._compilation = None
@@ -479,8 +472,7 @@ class SerialActionEngine(ActionEngine):
     """Engine that runs tasks in serial manner."""
 
     def __init__(self, flow, flow_detail, backend, options):
-        super(SerialActionEngine, self).__init__(flow, flow_detail,
-                                                 backend, options)
+        super().__init__(flow, flow_detail, backend, options)
         self._task_executor = executor.SerialTaskExecutor()
 
 
@@ -548,7 +540,6 @@ String (case insensitive)    Executor us
       polling while a higher number will involve less polling but a slower time
       for an engine to notice a task has completed.
 
-    .. |pe|  replace:: process_executor
     .. |cfp| replace:: concurrent.futures.process
     .. |cft| replace:: concurrent.futures.thread
     .. |cf| replace:: concurrent.futures
@@ -563,16 +554,9 @@ String (case insensitive)    Executor us
     _executor_cls_matchers = [
         _ExecutorTypeMatch((futures.ThreadPoolExecutor,),
                            executor.ParallelThreadTaskExecutor),
-    ]
-    if process_executor is not None:
-        _executor_cls_matchers.append(
-            _ExecutorTypeMatch((futures.ProcessPoolExecutor,),
-                               process_executor.ParallelProcessTaskExecutor)
-        )
-    _executor_cls_matchers.append(
         _ExecutorTypeMatch((futures.Executor,),
                            executor.ParallelThreadTaskExecutor),
-    )
+    ]
 
     # One of these should match when a string/text is provided for the
     # 'executor' option (a mixed case equivalent is allowed since the match
@@ -584,18 +568,12 @@ String (case insensitive)    Executor us
                                       'greenthreaded']),
                            executor.ParallelGreenThreadTaskExecutor),
     ]
-    if process_executor is not None:
-        _executor_str_matchers.append(
-            _ExecutorTextMatch(frozenset(['processes', 'process']),
-                               process_executor.ParallelProcessTaskExecutor)
-        )
 
     # Used when no executor is provided (either a string or object)...
     _default_executor_cls = executor.ParallelThreadTaskExecutor
 
     def __init__(self, flow, flow_detail, backend, options):
-        super(ParallelActionEngine, self).__init__(flow, flow_detail,
-                                                   backend, options)
+        super().__init__(flow, flow_detail, backend, options)
         # This ensures that any provided executor will be validated before
         # we get to far in the compilation/execution pipeline...
         self._task_executor = self._fetch_task_executor(self._options)
diff -pruN 5.12.0-2/taskflow/engines/action_engine/executor.py 6.0.2-2/taskflow/engines/action_engine/executor.py
--- 5.12.0-2/taskflow/engines/action_engine/executor.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/action_engine/executor.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -78,7 +76,7 @@ def _revert_task(task, arguments, result
     return (REVERTED, result)
 
 
-class SerialRetryExecutor(object):
+class SerialRetryExecutor:
     """Executes and reverts retries."""
 
     def __init__(self):
@@ -105,7 +103,7 @@ class SerialRetryExecutor(object):
         return fut
 
 
-class TaskExecutor(object, metaclass=abc.ABCMeta):
+class TaskExecutor(metaclass=abc.ABCMeta):
     """Executes and reverts tasks.
 
     This class takes task and its arguments and executes or reverts it.
diff -pruN 5.12.0-2/taskflow/engines/action_engine/process_executor.py 6.0.2-2/taskflow/engines/action_engine/process_executor.py
--- 5.12.0-2/taskflow/engines/action_engine/process_executor.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/action_engine/process_executor.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,720 +0,0 @@
-# -*- coding: utf-8 -*-
-
-#    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-import asyncore
-import binascii
-import collections
-import errno
-import functools
-import hashlib
-import hmac
-import math
-import os
-import pickle
-import socket
-import struct
-import time
-
-import futurist
-from oslo_utils import excutils
-
-from taskflow.engines.action_engine import executor as base
-from taskflow import logging
-from taskflow import task as ta
-from taskflow.types import notifier as nt
-from taskflow.utils import iter_utils
-from taskflow.utils import misc
-from taskflow.utils import schema_utils as su
-from taskflow.utils import threading_utils
-
-LOG = logging.getLogger(__name__)
-
-# Internal parent <-> child process protocol schema, message constants...
-MAGIC_HEADER = 0xDECAF
-CHALLENGE = 'identify_yourself'
-CHALLENGE_RESPONSE = 'worker_reporting_in'
-ACK = 'ack'
-EVENT = 'event'
-SCHEMAS = {
-    # Basic jsonschemas for verifying that the data we get back and
-    # forth from parent <-> child observes at least a basic expected
-    # format.
-    CHALLENGE: {
-        "type": "string",
-        "minLength": 1,
-    },
-    ACK: {
-        "type": "string",
-        "minLength": 1,
-    },
-    CHALLENGE_RESPONSE: {
-        "type": "string",
-        "minLength": 1,
-    },
-    EVENT: {
-        "type": "object",
-        "properties": {
-            'event_type': {
-                "type": "string",
-            },
-            'sent_on': {
-                "type": "number",
-            },
-        },
-        "required": ['event_type', 'sent_on'],
-        "additionalProperties": True,
-    },
-}
-
-
-class UnknownSender(Exception):
-    """Exception raised when message from unknown sender is recvd."""
-
-
-class ChallengeIgnored(Exception):
-    """Exception raised when challenge has not been responded to."""
-
-
-class Reader(object):
-    """Reader machine that streams & parses messages that it then dispatches.
-
-    TODO(harlowja): Use python-suitcase in the future when the following
-    are addressed/resolved and released:
-
-    - https://github.com/digidotcom/python-suitcase/issues/28
-    - https://github.com/digidotcom/python-suitcase/issues/29
-
-    Binary format format is the following (no newlines in actual format)::
-
-        <magic-header> (4 bytes)
-        <mac-header-length> (4 bytes)
-        <mac> (1 or more variable bytes)
-        <identity-header-length> (4 bytes)
-        <identity> (1 or more variable bytes)
-        <msg-header-length> (4 bytes)
-        <msg> (1 or more variable bytes)
-    """
-
-    #: Per state memory initializers.
-    _INITIALIZERS = {
-        'magic_header_left': 4,
-        'mac_header_left': 4,
-        'identity_header_left': 4,
-        'msg_header_left': 4,
-    }
-
-    #: Linear steps/transitions (order matters here).
-    _TRANSITIONS = tuple([
-        'magic_header_left',
-        'mac_header_left',
-        'mac_left',
-        'identity_header_left',
-        'identity_left',
-        'msg_header_left',
-        'msg_left',
-    ])
-
-    def __init__(self, auth_key, dispatch_func, msg_limit=-1):
-        if not callable(dispatch_func):
-            raise ValueError("Expected provided dispatch function"
-                             " to be callable")
-        self.auth_key = auth_key
-        self.dispatch_func = dispatch_func
-        msg_limiter = iter_utils.iter_forever(msg_limit)
-        self.msg_count = next(msg_limiter)
-        self._msg_limiter = msg_limiter
-        self._buffer = misc.BytesIO()
-        self._state = None
-        # Local machine variables and such are stored in here.
-        self._memory = {}
-        self._transitions = collections.deque(self._TRANSITIONS)
-        # This is the per state callback handler set. The first entry reads
-        # the data and the second entry is called after reading is completed,
-        # typically to save that data into object memory, or to validate
-        # it.
-        self._handlers = {
-            'magic_header_left': (self._read_field_data,
-                                  self._save_and_validate_magic),
-            'mac_header_left': (self._read_field_data,
-                                functools.partial(self._save_pos_integer,
-                                                  'mac_left')),
-            'mac_left': (functools.partial(self._read_data, 'mac'),
-                         functools.partial(self._save_data, 'mac')),
-            'identity_header_left': (self._read_field_data,
-                                     functools.partial(self._save_pos_integer,
-                                                       'identity_left')),
-            'identity_left': (functools.partial(self._read_data, 'identity'),
-                              functools.partial(self._save_data, 'identity')),
-            'msg_header_left': (self._read_field_data,
-                                functools.partial(self._save_pos_integer,
-                                                  'msg_left')),
-            'msg_left': (functools.partial(self._read_data, 'msg'),
-                         self._dispatch_and_reset),
-        }
-        # Force transition into first state...
-        self._transition()
-
-    def _save_pos_integer(self, key_name, data):
-        key_val = struct.unpack("!i", data)[0]
-        if key_val <= 0:
-            raise IOError("Invalid %s length received for key '%s', expected"
-                          " greater than zero length" % (key_val, key_name))
-        self._memory[key_name] = key_val
-        return True
-
-    def _save_data(self, key_name, data):
-        self._memory[key_name] = data
-        return True
-
-    def _dispatch_and_reset(self, data):
-        self.dispatch_func(
-            self._memory['identity'],
-            # Lazy evaluate so the message can be thrown out as needed
-            # (instead of the receiver discarding it after the fact)...
-            functools.partial(_decode_message, self.auth_key, data,
-                              self._memory['mac']))
-        self.msg_count = next(self._msg_limiter)
-        self._memory.clear()
-
-    def _transition(self):
-        try:
-            self._state = self._transitions.popleft()
-        except IndexError:
-            self._transitions.extend(self._TRANSITIONS)
-            self._state = self._transitions.popleft()
-        try:
-            self._memory[self._state] = self._INITIALIZERS[self._state]
-        except KeyError:
-            pass
-        self._handle_func, self._post_handle_func = self._handlers[self._state]
-
-    def _save_and_validate_magic(self, data):
-        magic_header = struct.unpack("!i", data)[0]
-        if magic_header != MAGIC_HEADER:
-            raise IOError("Invalid magic header received, expected 0x%x but"
-                          " got 0x%x for message %s" % (MAGIC_HEADER,
-                                                        magic_header,
-                                                        self.msg_count + 1))
-        self._memory['magic'] = magic_header
-        return True
-
-    def _read_data(self, save_key_name, data):
-        data_len_left = self._memory[self._state]
-        self._buffer.write(data[0:data_len_left])
-        if len(data) < data_len_left:
-            data_len_left -= len(data)
-            self._memory[self._state] = data_len_left
-            return ''
-        else:
-            self._memory[self._state] = 0
-            buf_data = self._buffer.getvalue()
-            self._buffer.reset()
-            self._post_handle_func(buf_data)
-            self._transition()
-            return data[data_len_left:]
-
-    def _read_field_data(self, data):
-        return self._read_data(self._state, data)
-
-    @property
-    def bytes_needed(self):
-        return self._memory.get(self._state, 0)
-
-    def feed(self, data):
-        while len(data):
-            data = self._handle_func(data)
-
-
-class BadHmacValueError(ValueError):
-    """Value error raised when an invalid hmac is discovered."""
-
-
-def _create_random_string(desired_length):
-    if desired_length <= 0:
-        return b''
-    data_length = int(math.ceil(desired_length / 2.0))
-    data = os.urandom(data_length)
-    hex_data = binascii.hexlify(data)
-    return hex_data[0:desired_length]
-
-
-def _calculate_hmac(auth_key, body):
-    mac = hmac.new(auth_key, body, hashlib.md5).hexdigest()
-    if isinstance(mac, str):
-        mac = mac.encode("ascii")
-    return mac
-
-
-def _encode_message(auth_key, message, identity, reverse=False):
-    message = pickle.dumps(message, 2)
-    message_mac = _calculate_hmac(auth_key, message)
-    pieces = [
-        struct.pack("!i", MAGIC_HEADER),
-        struct.pack("!i", len(message_mac)),
-        message_mac,
-        struct.pack("!i", len(identity)),
-        identity,
-        struct.pack("!i", len(message)),
-        message,
-    ]
-    if reverse:
-        pieces.reverse()
-    return tuple(pieces)
-
-
-def _decode_message(auth_key, message, message_mac):
-    tmp_message_mac = _calculate_hmac(auth_key, message)
-    if tmp_message_mac != message_mac:
-        raise BadHmacValueError('Invalid message hmac')
-    return pickle.loads(message)
-
-
-class Channel(object):
-    """Object that workers use to communicate back to their creator."""
-
-    def __init__(self, port, identity, auth_key):
-        self.identity = identity
-        self.port = port
-        self.auth_key = auth_key
-        self.dead = False
-        self._sent = self._received = 0
-        self._socket = None
-        self._read_pipe = None
-        self._write_pipe = None
-
-    def close(self):
-        if self._socket is not None:
-            self._socket.close()
-            self._socket = None
-            self._read_pipe = None
-            self._write_pipe = None
-
-    def _ensure_connected(self):
-        if self._socket is None:
-            s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-            s.setblocking(1)
-            try:
-                s.connect(("", self.port))
-            except socket.error as e:
-                with excutils.save_and_reraise_exception():
-                    s.close()
-                    if e.errno in (errno.ECONNREFUSED, errno.ENOTCONN,
-                                   errno.ECONNRESET):
-                        # Don't bother with further connections...
-                        self.dead = True
-            read_pipe = s.makefile("rb", 0)
-            write_pipe = s.makefile("wb", 0)
-            try:
-                msg = self._do_recv(read_pipe=read_pipe)
-                su.schema_validate(msg, SCHEMAS[CHALLENGE])
-                if msg != CHALLENGE:
-                    raise IOError("Challenge expected not received")
-                else:
-                    pieces = _encode_message(self.auth_key,
-                                             CHALLENGE_RESPONSE,
-                                             self.identity)
-                    self._do_send_and_ack(pieces, write_pipe=write_pipe,
-                                          read_pipe=read_pipe)
-            except Exception:
-                with excutils.save_and_reraise_exception():
-                    s.close()
-            else:
-                self._socket = s
-                self._read_pipe = read_pipe
-                self._write_pipe = write_pipe
-
-    def recv(self):
-        self._ensure_connected()
-        return self._do_recv()
-
-    def _do_recv(self, read_pipe=None):
-        if read_pipe is None:
-            read_pipe = self._read_pipe
-        msg_capture = collections.deque(maxlen=1)
-        msg_capture_func = (lambda _from_who, msg_decoder_func:
-                            msg_capture.append(msg_decoder_func()))
-        reader = Reader(self.auth_key, msg_capture_func, msg_limit=1)
-        try:
-            maybe_msg_num = self._received + 1
-            bytes_needed = reader.bytes_needed
-            while True:
-                blob = read_pipe.read(bytes_needed)
-                if len(blob) != bytes_needed:
-                    raise EOFError("Read pipe closed while reading %s"
-                                   " bytes for potential message %s"
-                                   % (bytes_needed, maybe_msg_num))
-                reader.feed(blob)
-                bytes_needed = reader.bytes_needed
-        except StopIteration:
-            pass
-        msg = msg_capture[0]
-        self._received += 1
-        return msg
-
-    def _do_send(self, pieces, write_pipe=None):
-        if write_pipe is None:
-            write_pipe = self._write_pipe
-        for piece in pieces:
-            write_pipe.write(piece)
-        write_pipe.flush()
-
-    def _do_send_and_ack(self, pieces, write_pipe=None, read_pipe=None):
-        self._do_send(pieces, write_pipe=write_pipe)
-        self._sent += 1
-        msg = self._do_recv(read_pipe=read_pipe)
-        su.schema_validate(msg, SCHEMAS[ACK])
-        if msg != ACK:
-            raise IOError("Failed receiving ack for sent"
-                          " message %s" % self._metrics['sent'])
-
-    def send(self, message):
-        self._ensure_connected()
-        self._do_send_and_ack(_encode_message(self.auth_key, message,
-                                              self.identity))
-
-
-class EventSender(object):
-    """Sends event information from a child worker process to its creator."""
-
-    def __init__(self, channel):
-        self._channel = channel
-        self._pid = None
-
-    def __call__(self, event_type, details):
-        if not self._channel.dead:
-            if self._pid is None:
-                self._pid = os.getpid()
-            message = {
-                'event_type': event_type,
-                'details': details,
-                'sent_on': time.time(),
-            }
-            LOG.trace("Sending %s (from child %s)", message, self._pid)
-            self._channel.send(message)
-
-
-class DispatcherHandler(asyncore.dispatcher):
-    """Dispatches from a single connection into a target."""
-
-    #: Read/write chunk size.
-    CHUNK_SIZE = 8192
-
-    def __init__(self, sock, addr, dispatcher):
-        super(DispatcherHandler, self).__init__(map=dispatcher.map,
-                                                sock=sock)
-        self.blobs_to_write = list(dispatcher.challenge_pieces)
-        self.reader = Reader(dispatcher.auth_key, self._dispatch)
-        self.targets = dispatcher.targets
-        self.tied_to = None
-        self.challenge_responded = False
-        self.ack_pieces = _encode_message(dispatcher.auth_key, ACK,
-                                          dispatcher.identity,
-                                          reverse=True)
-        self.addr = addr
-
-    def handle_close(self):
-        self.close()
-
-    def writable(self):
-        return bool(self.blobs_to_write)
-
-    def handle_write(self):
-        try:
-            blob = self.blobs_to_write.pop()
-        except IndexError:
-            pass
-        else:
-            sent = self.send(blob[0:self.CHUNK_SIZE])
-            if sent < len(blob):
-                self.blobs_to_write.append(blob[sent:])
-
-    def _send_ack(self):
-        self.blobs_to_write.extend(self.ack_pieces)
-
-    def _dispatch(self, from_who, msg_decoder_func):
-        if not self.challenge_responded:
-            msg = msg_decoder_func()
-            su.schema_validate(msg, SCHEMAS[CHALLENGE_RESPONSE])
-            if msg != CHALLENGE_RESPONSE:
-                raise ChallengeIgnored("Discarding connection from %s"
-                                       " challenge was not responded to"
-                                       % self.addr)
-            else:
-                LOG.trace("Peer %s (%s) has passed challenge sequence",
-                          self.addr, from_who)
-                self.challenge_responded = True
-                self.tied_to = from_who
-                self._send_ack()
-        else:
-            if self.tied_to != from_who:
-                raise UnknownSender("Sender %s previously identified as %s"
-                                    " changed there identity to %s after"
-                                    " challenge sequence" % (self.addr,
-                                                             self.tied_to,
-                                                             from_who))
-            try:
-                task = self.targets[from_who]
-            except KeyError:
-                raise UnknownSender("Unknown message from %s (%s) not matched"
-                                    " to any known target" % (self.addr,
-                                                              from_who))
-            msg = msg_decoder_func()
-            su.schema_validate(msg, SCHEMAS[EVENT])
-            if LOG.isEnabledFor(logging.TRACE):
-                msg_delay = max(0, time.time() - msg['sent_on'])
-                LOG.trace("Dispatching message from %s (%s) (it took %0.3f"
-                          " seconds for it to arrive for processing after"
-                          " being sent)", self.addr, from_who, msg_delay)
-            task.notifier.notify(msg['event_type'], msg.get('details'))
-            self._send_ack()
-
-    def handle_read(self):
-        data = self.recv(self.CHUNK_SIZE)
-        if len(data) == 0:
-            self.handle_close()
-        else:
-            try:
-                self.reader.feed(data)
-            except (IOError, UnknownSender):
-                LOG.warning("Invalid received message", exc_info=True)
-                self.handle_close()
-            except (pickle.PickleError, TypeError):
-                LOG.warning("Badly formatted message", exc_info=True)
-                self.handle_close()
-            except (ValueError, su.ValidationError):
-                LOG.warning("Failed validating message", exc_info=True)
-                self.handle_close()
-            except ChallengeIgnored:
-                LOG.warning("Failed challenge sequence", exc_info=True)
-                self.handle_close()
-
-
-class Dispatcher(asyncore.dispatcher):
-    """Accepts messages received from child worker processes."""
-
-    #: See https://docs.python.org/2/library/socket.html#socket.socket.listen
-    MAX_BACKLOG = 5
-
-    def __init__(self, map, auth_key, identity):
-        super(Dispatcher, self).__init__(map=map)
-        self.identity = identity
-        self.challenge_pieces = _encode_message(auth_key, CHALLENGE,
-                                                identity, reverse=True)
-        self.auth_key = auth_key
-        self.targets = {}
-
-    @property
-    def port(self):
-        if self.socket is not None:
-            return self.socket.getsockname()[1]
-        else:
-            return None
-
-    def setup(self):
-        self.targets.clear()
-        self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
-        self.bind(("", 0))
-        LOG.trace("Accepting dispatch requests on port %s", self.port)
-        self.listen(self.MAX_BACKLOG)
-
-    def writable(self):
-        return False
-
-    @property
-    def map(self):
-        return self._map
-
-    def handle_close(self):
-        if self.socket is not None:
-            self.close()
-
-    def handle_accept(self):
-        pair = self.accept()
-        if pair is not None:
-            sock, addr = pair
-            addr = "%s:%s" % (addr[0], addr[1])
-            LOG.trace("Potentially accepted new connection from %s", addr)
-            DispatcherHandler(sock, addr, self)
-
-
-class ParallelProcessTaskExecutor(base.ParallelTaskExecutor):
-    """Executes tasks in parallel using a process pool executor.
-
-    NOTE(harlowja): this executor executes tasks in external processes, so that
-    implies that tasks that are sent to that external process are pickleable
-    since this is how the multiprocessing works (sending pickled objects back
-    and forth) and that the bound handlers (for progress updating in
-    particular) are proxied correctly from that external process to the one
-    that is alive in the parent process to ensure that callbacks registered in
-    the parent are executed on events in the child.
-    """
-
-    #: Default timeout used by asyncore io loop (and eventually select/poll).
-    WAIT_TIMEOUT = 0.01
-
-    constructor_options = [
-        ('max_workers', lambda v: v if v is None else int(v)),
-        ('wait_timeout', lambda v: v if v is None else float(v)),
-    ]
-    """
-    Optional constructor keyword arguments this executor supports. These will
-    typically be passed via engine options (by a engine user) and converted
-    into the correct type before being sent into this
-    classes ``__init__`` method.
-    """
-
-    def __init__(self, executor=None,
-                 max_workers=None, wait_timeout=None):
-        super(ParallelProcessTaskExecutor, self).__init__(
-            executor=executor, max_workers=max_workers)
-        LOG.warning('Process task executor is deprecated. It is now disabled '
-                    'in Python 3.12 or later and will be removed.')
-        self._auth_key = _create_random_string(32)
-        self._dispatcher = Dispatcher({}, self._auth_key,
-                                      _create_random_string(32))
-        if wait_timeout is None:
-            self._wait_timeout = self.WAIT_TIMEOUT
-        else:
-            if wait_timeout <= 0:
-                raise ValueError("Provided wait timeout must be greater"
-                                 " than zero and not '%s'" % wait_timeout)
-            self._wait_timeout = wait_timeout
-        # Only created after starting...
-        self._worker = None
-
-    def _create_executor(self, max_workers=None):
-        return futurist.ProcessPoolExecutor(max_workers=max_workers)
-
-    def start(self):
-        if threading_utils.is_alive(self._worker):
-            raise RuntimeError("Worker thread must be stopped via stop()"
-                               " before starting/restarting")
-        super(ParallelProcessTaskExecutor, self).start()
-        self._dispatcher.setup()
-        self._worker = threading_utils.daemon_thread(
-            asyncore.loop, map=self._dispatcher.map,
-            timeout=self._wait_timeout)
-        self._worker.start()
-
-    def stop(self):
-        super(ParallelProcessTaskExecutor, self).stop()
-        self._dispatcher.close()
-        if threading_utils.is_alive(self._worker):
-            self._worker.join()
-            self._worker = None
-
-    def _submit_task(self, func, task, *args, **kwargs):
-        """Submit a function to run the given task (with given args/kwargs).
-
-        NOTE(harlowja): Adjust all events to be proxies instead since we want
-        those callbacks to be activated in this process, not in the child,
-        also since typically callbacks are functors (or callables) we can
-        not pickle those in the first place...
-
-        To make sure people understand how this works, the following is a
-        lengthy description of what is going on here, read at will:
-
-        So to ensure that we are proxying task triggered events that occur
-        in the executed subprocess (which will be created and used by the
-        thing using the multiprocessing based executor) we need to establish
-        a link between that process and this process that ensures that when a
-        event is triggered in that task in that process that a corresponding
-        event is triggered on the original task that was requested to be ran
-        in this process.
-
-        To accomplish this we have to create a copy of the task (without
-        any listeners) and then reattach a new set of listeners that will
-        now instead of calling the desired listeners just place messages
-        for this process (a dispatcher thread that is created in this class)
-        to dispatch to the original task (using a common accepting socket and
-        per task sender socket that is used and associated to know
-        which task to proxy back too, since it is possible that there many
-        be *many* subprocess running at the same time).
-
-        Once the subprocess task has finished execution, the executor will
-        then trigger a callback that will remove the task + target from the
-        dispatcher (which will stop any further proxying back to the original
-        task).
-        """
-        progress_callback = kwargs.pop('progress_callback', None)
-        clone = task.copy(retain_listeners=False)
-        identity = _create_random_string(32)
-        channel = Channel(self._dispatcher.port, identity, self._auth_key)
-
-        def rebind_task():
-            # Creates and binds proxies for all events the task could receive
-            # so that when the clone runs in another process that this task
-            # can receive the same notifications (thus making it look like the
-            # the notifications are transparently happening in this process).
-            proxy_event_types = set()
-            for (event_type, listeners) in task.notifier.listeners_iter():
-                if listeners:
-                    proxy_event_types.add(event_type)
-            if progress_callback is not None:
-                proxy_event_types.add(ta.EVENT_UPDATE_PROGRESS)
-            if nt.Notifier.ANY in proxy_event_types:
-                # NOTE(harlowja): If ANY is present, just have it be
-                # the **only** event registered, as all other events will be
-                # sent if ANY is registered (due to the nature of ANY sending
-                # all the things); if we also include the other event types
-                # in this set if ANY is present we will receive duplicate
-                # messages in this process (the one where the local
-                # task callbacks are being triggered). For example the
-                # emissions of the tasks notifier (that is running out
-                # of process) will for specific events send messages for
-                # its ANY event type **and** the specific event
-                # type (2 messages, when we just want one) which will
-                # cause > 1 notify() call on the local tasks notifier, which
-                # causes more local callback triggering than we want
-                # to actually happen.
-                proxy_event_types = set([nt.Notifier.ANY])
-            if proxy_event_types:
-                # This sender acts as our forwarding proxy target, it
-                # will be sent pickled to the process that will execute
-                # the needed task and it will do the work of using the
-                # channel object to send back messages to this process for
-                # dispatch into the local task.
-                sender = EventSender(channel)
-                for event_type in proxy_event_types:
-                    clone.notifier.register(event_type, sender)
-            return bool(proxy_event_types)
-
-        def register():
-            if progress_callback is not None:
-                task.notifier.register(ta.EVENT_UPDATE_PROGRESS,
-                                       progress_callback)
-            self._dispatcher.targets[identity] = task
-
-        def deregister(fut=None):
-            if progress_callback is not None:
-                task.notifier.deregister(ta.EVENT_UPDATE_PROGRESS,
-                                         progress_callback)
-            self._dispatcher.targets.pop(identity, None)
-
-        should_register = rebind_task()
-        if should_register:
-            register()
-        try:
-            fut = self._executor.submit(func, clone, *args, **kwargs)
-        except RuntimeError:
-            with excutils.save_and_reraise_exception():
-                if should_register:
-                    deregister()
-
-        fut.atom = task
-        if should_register:
-            fut.add_done_callback(deregister)
-        return fut
diff -pruN 5.12.0-2/taskflow/engines/action_engine/runtime.py 6.0.2-2/taskflow/engines/action_engine/runtime.py
--- 5.12.0-2/taskflow/engines/action_engine/runtime.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/action_engine/runtime.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -43,7 +41,7 @@ _EdgeDecider = collections.namedtuple('_
 LOG = logging.getLogger(__name__)
 
 
-class Runtime(object):
+class Runtime:
     """A aggregate of runtime objects, properties, ... used during execution.
 
     This object contains various utility methods and properties that represent
diff -pruN 5.12.0-2/taskflow/engines/action_engine/scheduler.py 6.0.2-2/taskflow/engines/action_engine/scheduler.py
--- 5.12.0-2/taskflow/engines/action_engine/scheduler.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/action_engine/scheduler.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -21,7 +19,7 @@ from taskflow import states as st
 from taskflow.types import failure
 
 
-class RetryScheduler(object):
+class RetryScheduler:
     """Schedules retry atoms."""
 
     def __init__(self, runtime):
@@ -52,7 +50,7 @@ class RetryScheduler(object):
                                         " intention: %s" % intention)
 
 
-class TaskScheduler(object):
+class TaskScheduler:
     """Schedules task atoms."""
 
     def __init__(self, runtime):
@@ -75,7 +73,7 @@ class TaskScheduler(object):
                                         " intention: %s" % intention)
 
 
-class Scheduler(object):
+class Scheduler:
     """Safely schedules atoms using a runtime ``fetch_scheduler`` routine."""
 
     def __init__(self, runtime):
diff -pruN 5.12.0-2/taskflow/engines/action_engine/scopes.py 6.0.2-2/taskflow/engines/action_engine/scopes.py
--- 5.12.0-2/taskflow/engines/action_engine/scopes.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/action_engine/scopes.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -21,7 +19,7 @@ from taskflow import logging
 LOG = logging.getLogger(__name__)
 
 
-class ScopeWalker(object):
+class ScopeWalker:
     """Walks through the scopes of a atom using a engines compilation.
 
     NOTE(harlowja): for internal usage only.
@@ -79,9 +77,9 @@ class ScopeWalker(object):
         """
         graph = self._execution_graph
         if self._predecessors is None:
-            predecessors = set(
+            predecessors = {
                 node for node in graph.bfs_predecessors_iter(self._atom)
-                if graph.nodes[node]['kind'] in co.ATOMS)
+                if graph.nodes[node]['kind'] in co.ATOMS}
             self._predecessors = predecessors.copy()
         else:
             predecessors = self._predecessors.copy()
diff -pruN 5.12.0-2/taskflow/engines/action_engine/selector.py 6.0.2-2/taskflow/engines/action_engine/selector.py
--- 5.12.0-2/taskflow/engines/action_engine/selector.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/action_engine/selector.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -27,7 +25,7 @@ from taskflow.utils import iter_utils
 LOG = logging.getLogger(__name__)
 
 
-class Selector(object):
+class Selector:
     """Selector that uses a compilation and aids in execution processes.
 
     Its primary purpose is to get the next atoms for execution or reversion
diff -pruN 5.12.0-2/taskflow/engines/action_engine/traversal.py 6.0.2-2/taskflow/engines/action_engine/traversal.py
--- 5.12.0-2/taskflow/engines/action_engine/traversal.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/action_engine/traversal.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/engines/base.py 6.0.2-2/taskflow/engines/base.py
--- 5.12.0-2/taskflow/engines/base.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/base.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -21,7 +19,7 @@ from taskflow.types import notifier
 from taskflow.utils import misc
 
 
-class Engine(object, metaclass=abc.ABCMeta):
+class Engine(metaclass=abc.ABCMeta):
     """Base for all engines implementations.
 
     :ivar Engine.notifier: A notification object that will dispatch
diff -pruN 5.12.0-2/taskflow/engines/helpers.py 6.0.2-2/taskflow/engines/helpers.py
--- 5.12.0-2/taskflow/engines/helpers.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/helpers.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/engines/worker_based/dispatcher.py 6.0.2-2/taskflow/engines/worker_based/dispatcher.py
--- 5.12.0-2/taskflow/engines/worker_based/dispatcher.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/worker_based/dispatcher.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -23,7 +21,7 @@ from taskflow.utils import kombu_utils a
 LOG = logging.getLogger(__name__)
 
 
-class Handler(object):
+class Handler:
     """Component(s) that will be called on reception of messages."""
 
     __slots__ = ['_process_message', '_validator']
@@ -53,7 +51,7 @@ class Handler(object):
         return self._validator
 
 
-class TypeDispatcher(object):
+class TypeDispatcher:
     """Receives messages and dispatches to type specific handlers."""
 
     def __init__(self, type_handlers=None, requeue_filters=None):
diff -pruN 5.12.0-2/taskflow/engines/worker_based/endpoint.py 6.0.2-2/taskflow/engines/worker_based/endpoint.py
--- 5.12.0-2/taskflow/engines/worker_based/endpoint.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/worker_based/endpoint.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -19,7 +17,7 @@ from oslo_utils import reflection
 from taskflow.engines.action_engine import executor
 
 
-class Endpoint(object):
+class Endpoint:
     """Represents a single task with execute/revert methods."""
 
     def __init__(self, task_cls):
diff -pruN 5.12.0-2/taskflow/engines/worker_based/engine.py 6.0.2-2/taskflow/engines/worker_based/engine.py
--- 5.12.0-2/taskflow/engines/worker_based/engine.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/worker_based/engine.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -53,8 +51,7 @@ class WorkerBasedActionEngine(engine.Act
     """
 
     def __init__(self, flow, flow_detail, backend, options):
-        super(WorkerBasedActionEngine, self).__init__(flow, flow_detail,
-                                                      backend, options)
+        super().__init__(flow, flow_detail, backend, options)
         # This ensures that any provided executor will be validated before
         # we get to far in the compilation/execution pipeline...
         self._task_executor = self._fetch_task_executor(self._options,
diff -pruN 5.12.0-2/taskflow/engines/worker_based/executor.py 6.0.2-2/taskflow/engines/worker_based/executor.py
--- 5.12.0-2/taskflow/engines/worker_based/executor.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/worker_based/executor.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/engines/worker_based/protocol.py 6.0.2-2/taskflow/engines/worker_based/protocol.py
--- 5.12.0-2/taskflow/engines/worker_based/protocol.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/worker_based/protocol.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -147,7 +145,7 @@ def failure_to_dict(failure):
         return failure.to_dict(include_args=False)
 
 
-class Message(object, metaclass=abc.ABCMeta):
+class Message(metaclass=abc.ABCMeta):
     """Base class for all message types."""
 
     def __repr__(self):
diff -pruN 5.12.0-2/taskflow/engines/worker_based/proxy.py 6.0.2-2/taskflow/engines/worker_based/proxy.py
--- 5.12.0-2/taskflow/engines/worker_based/proxy.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/worker_based/proxy.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -40,7 +38,7 @@ _TransportDetails = collections.namedtup
                                             'driver_name', 'driver_version'])
 
 
-class Proxy(object):
+class Proxy:
     """A proxy processes messages from/to the named exchange.
 
     For **internal** usage only (not for public consumption).
@@ -145,7 +143,7 @@ class Proxy(object):
 
     def _make_queue(self, routing_key, exchange, channel=None):
         """Make a named queue for the given exchange."""
-        queue_name = "%s_%s" % (self._exchange_name, routing_key)
+        queue_name = "{}_{}".format(self._exchange_name, routing_key)
         return kombu.Queue(name=queue_name,
                            routing_key=routing_key, durable=False,
                            exchange=exchange, auto_delete=True,
diff -pruN 5.12.0-2/taskflow/engines/worker_based/server.py 6.0.2-2/taskflow/engines/worker_based/server.py
--- 5.12.0-2/taskflow/engines/worker_based/server.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/worker_based/server.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -31,7 +29,7 @@ from taskflow.utils import misc
 LOG = logging.getLogger(__name__)
 
 
-class Server(object):
+class Server:
     """Server implementation that waits for incoming tasks requests."""
 
     def __init__(self, topic, exchange, executor, endpoints,
@@ -53,8 +51,8 @@ class Server(object):
                                   transport_options=transport_options,
                                   retry_options=retry_options)
         self._topic = topic
-        self._endpoints = dict([(endpoint.name, endpoint)
-                                for endpoint in endpoints])
+        self._endpoints = {endpoint.name: endpoint
+                           for endpoint in endpoints}
 
     def _delayed_process(self, func):
         """Runs the function using the instances executor (eventually).
diff -pruN 5.12.0-2/taskflow/engines/worker_based/types.py 6.0.2-2/taskflow/engines/worker_based/types.py
--- 5.12.0-2/taskflow/engines/worker_based/types.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/worker_based/types.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -30,7 +28,7 @@ LOG = logging.getLogger(__name__)
 # TODO(harlowja): this needs to be made better, once
 # https://blueprints.launchpad.net/taskflow/+spec/wbe-worker-info is finally
 # implemented we can go about using that instead.
-class TopicWorker(object):
+class TopicWorker:
     """A (read-only) worker and its relevant information + useful methods."""
 
     _NO_IDENTITY = object()
@@ -72,14 +70,15 @@ class TopicWorker(object):
     def __repr__(self):
         r = reflection.get_class_name(self, fully_qualified=False)
         if self.identity is not self._NO_IDENTITY:
-            r += "(identity=%s, tasks=%s, topic=%s)" % (self.identity,
-                                                        self.tasks, self.topic)
+            r += "(identity={}, tasks={}, topic={})".format(
+                self.identity, self.tasks, self.topic)
         else:
-            r += "(identity=*, tasks=%s, topic=%s)" % (self.tasks, self.topic)
+            r += "(identity=*, tasks={}, topic={})".format(
+                self.tasks, self.topic)
         return r
 
 
-class ProxyWorkerFinder(object):
+class ProxyWorkerFinder:
     """Requests and receives responses about workers topic+task details."""
 
     def __init__(self, uuid, proxy, topics,
diff -pruN 5.12.0-2/taskflow/engines/worker_based/worker.py 6.0.2-2/taskflow/engines/worker_based/worker.py
--- 5.12.0-2/taskflow/engines/worker_based/worker.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/engines/worker_based/worker.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -33,7 +31,7 @@ from taskflow.utils import threading_uti
 LOG = logging.getLogger(__name__)
 
 
-class Worker(object):
+class Worker:
     """Worker that can be started on a remote host for handling tasks requests.
 
     :param url: broker url
@@ -88,13 +86,13 @@ class Worker(object):
         connection_details = self._server.connection_details
         transport = connection_details.transport
         if transport.driver_version:
-            transport_driver = "%s v%s" % (transport.driver_name,
-                                           transport.driver_version)
+            transport_driver = "{} v{}".format(transport.driver_name,
+                                               transport.driver_version)
         else:
             transport_driver = transport.driver_name
         try:
             hostname = socket.getfqdn()
-        except socket.error:
+        except OSError:
             hostname = "???"
         try:
             pid = os.getpid()
diff -pruN 5.12.0-2/taskflow/examples/99_bottles.py 6.0.2-2/taskflow/examples/99_bottles.py
--- 5.12.0-2/taskflow/examples/99_bottles.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/99_bottles.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/alphabet_soup.py 6.0.2-2/taskflow/examples/alphabet_soup.py
--- 5.12.0-2/taskflow/examples/alphabet_soup.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/alphabet_soup.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/build_a_car.py 6.0.2-2/taskflow/examples/build_a_car.py
--- 5.12.0-2/taskflow/examples/build_a_car.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/build_a_car.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -111,7 +109,7 @@ def flow_watch(state, details):
 
 
 def task_watch(state, details):
-    print('Task %s => %s' % (details.get('task_name'), state))
+    print('Task {} => {}'.format(details.get('task_name'), state))
 
 
 flow = lf.Flow("make-auto").add(
diff -pruN 5.12.0-2/taskflow/examples/buildsystem.py 6.0.2-2/taskflow/examples/buildsystem.py
--- 5.12.0-2/taskflow/examples/buildsystem.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/buildsystem.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -56,7 +54,7 @@ class LinkTask(task.Task):
     default_provides = 'executable'
 
     def __init__(self, executable_path, *args, **kwargs):
-        super(LinkTask, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
         self._executable_path = executable_path
 
     def execute(self, **kwargs):
diff -pruN 5.12.0-2/taskflow/examples/calculate_in_parallel.py 6.0.2-2/taskflow/examples/calculate_in_parallel.py
--- 5.12.0-2/taskflow/examples/calculate_in_parallel.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/calculate_in_parallel.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -46,7 +44,7 @@ from taskflow import task
 # more uniform manner).
 class Provider(task.Task):
     def __init__(self, name, *args, **kwargs):
-        super(Provider, self).__init__(name=name, **kwargs)
+        super().__init__(name=name, **kwargs)
         self._provide = args
 
     def execute(self):
diff -pruN 5.12.0-2/taskflow/examples/calculate_linear.py 6.0.2-2/taskflow/examples/calculate_linear.py
--- 5.12.0-2/taskflow/examples/calculate_linear.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/calculate_linear.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -54,7 +52,7 @@ from taskflow import task
 class Provider(task.Task):
 
     def __init__(self, name, *args, **kwargs):
-        super(Provider, self).__init__(name=name, **kwargs)
+        super().__init__(name=name, **kwargs)
         self._provide = args
 
     def execute(self):
@@ -79,8 +77,8 @@ class Adder(task.Task):
 # this function needs to undo if some later operation fails.
 class Multiplier(task.Task):
     def __init__(self, name, multiplier, provides=None, rebind=None):
-        super(Multiplier, self).__init__(name=name, provides=provides,
-                                         rebind=rebind)
+        super().__init__(name=name, provides=provides,
+                         rebind=rebind)
         self._multiplier = multiplier
 
     def execute(self, z):
diff -pruN 5.12.0-2/taskflow/examples/create_parallel_volume.py 6.0.2-2/taskflow/examples/create_parallel_volume.py
--- 5.12.0-2/taskflow/examples/create_parallel_volume.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/create_parallel_volume.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -45,7 +43,7 @@ def show_time(name):
     start = time.time()
     yield
     end = time.time()
-    print(" -- %s took %0.3f seconds" % (name, end - start))
+    print(" -- {} took {:0.3f} seconds".format(name, end - start))
 
 
 # This affects how many volumes to create and how much time to *simulate*
@@ -85,8 +83,7 @@ class VolumeCreator(task.Task):
         # volume create can be resumed/revert, and is much easier to use for
         # audit and tracking purposes.
         base_name = reflection.get_callable_name(self)
-        super(VolumeCreator, self).__init__(name="%s-%s" % (base_name,
-                                                            volume_id))
+        super().__init__(name="{}-{}".format(base_name, volume_id))
         self._volume_id = volume_id
 
     def execute(self):
diff -pruN 5.12.0-2/taskflow/examples/delayed_return.py 6.0.2-2/taskflow/examples/delayed_return.py
--- 5.12.0-2/taskflow/examples/delayed_return.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/delayed_return.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -44,7 +42,7 @@ from taskflow.types import notifier
 
 class PokeFutureListener(base.Listener):
     def __init__(self, engine, future, task_name):
-        super(PokeFutureListener, self).__init__(
+        super().__init__(
             engine,
             task_listen_for=(notifier.Notifier.ANY,),
             flow_listen_for=[])
diff -pruN 5.12.0-2/taskflow/examples/distance_calculator.py 6.0.2-2/taskflow/examples/distance_calculator.py
--- 5.12.0-2/taskflow/examples/distance_calculator.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/distance_calculator.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Hewlett-Packard Development Company, L.P.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -62,31 +60,24 @@ if __name__ == '__main__':
     any_distance = linear_flow.Flow("origin").add(DistanceTask())
     results = engines.run(any_distance)
     print(results)
-    print("%s is near-enough to %s: %s" % (results['distance'],
-                                           0.0,
-                                           is_near(results['distance'], 0.0)))
+    print("{} is near-enough to {}: {}".format(
+        results['distance'], 0.0, is_near(results['distance'], 0.0)))
 
     results = engines.run(any_distance, store={'a': Point(1, 1)})
     print(results)
-    print("%s is near-enough to %s: %s" % (results['distance'],
-                                           1.4142,
-                                           is_near(results['distance'],
-                                                   1.4142)))
+    print("{} is near-enough to {}: {}".format(
+        results['distance'], 1.4142, is_near(results['distance'], 1.4142)))
 
     results = engines.run(any_distance, store={'a': Point(10, 10)})
     print(results)
-    print("%s is near-enough to %s: %s" % (results['distance'],
-                                           14.14199,
-                                           is_near(results['distance'],
-                                                   14.14199)))
+    print("{} is near-enough to {}: {}".format(
+        results['distance'], 14.14199, is_near(results['distance'], 14.14199)))
 
     results = engines.run(any_distance,
                           store={'a': Point(5, 5), 'b': Point(10, 10)})
     print(results)
-    print("%s is near-enough to %s: %s" % (results['distance'],
-                                           7.07106,
-                                           is_near(results['distance'],
-                                                   7.07106)))
+    print("{} is near-enough to {}: {}".format(
+        results['distance'], 7.07106, is_near(results['distance'], 7.07106)))
 
     # For this we use the ability to override at task creation time the
     # optional arguments so that we don't need to continue to send them
@@ -97,13 +88,10 @@ if __name__ == '__main__':
     ten_distance.add(DistanceTask(inject={'a': Point(10, 10)}))
     results = engines.run(ten_distance, store={'b': Point(10, 10)})
     print(results)
-    print("%s is near-enough to %s: %s" % (results['distance'],
-                                           0.0,
-                                           is_near(results['distance'], 0.0)))
+    print("{} is near-enough to {}: {}".format(
+        results['distance'], 0.0, is_near(results['distance'], 0.0)))
 
     results = engines.run(ten_distance)
     print(results)
-    print("%s is near-enough to %s: %s" % (results['distance'],
-                                           14.14199,
-                                           is_near(results['distance'],
-                                                   14.14199)))
+    print("{} is near-enough to {}: {}".format(
+        results['distance'], 14.14199, is_near(results['distance'], 14.14199)))
diff -pruN 5.12.0-2/taskflow/examples/dump_memory_backend.py 6.0.2-2/taskflow/examples/dump_memory_backend.py
--- 5.12.0-2/taskflow/examples/dump_memory_backend.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/dump_memory_backend.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -67,6 +65,6 @@ print("---------")
 for path in backend.memory.ls_r(backend.memory.root_path, absolute=True):
     value = backend.memory[path]
     if value:
-        print("%s -> %s" % (path, value))
+        print("{} -> {}".format(path, value))
     else:
         print("%s" % (path))
diff -pruN 5.12.0-2/taskflow/examples/echo_listener.py 6.0.2-2/taskflow/examples/echo_listener.py
--- 5.12.0-2/taskflow/examples/echo_listener.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/echo_listener.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/example_utils.py 6.0.2-2/taskflow/examples/example_utils.py
--- 5.12.0-2/taskflow/examples/example_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/example_utils.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -52,7 +50,7 @@ def rm_path(persist_path):
         raise ValueError("Unknown how to `rm` path: %s" % (persist_path))
     try:
         rm_func(persist_path)
-    except (IOError, OSError):
+    except OSError:
         pass
 
 
diff -pruN 5.12.0-2/taskflow/examples/fake_billing.py 6.0.2-2/taskflow/examples/fake_billing.py
--- 5.12.0-2/taskflow/examples/fake_billing.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/fake_billing.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -47,12 +45,12 @@ from taskflow.utils import misc
 # complete to 100% complete.
 
 
-class DB(object):
+class DB:
     def query(self, sql):
         print("Querying with: %s" % (sql))
 
 
-class UrlCaller(object):
+class UrlCaller:
     def __init__(self):
         self._send_time = 0.5
         self._chunks = 25
@@ -73,7 +71,7 @@ class UrlCaller(object):
 # that require access to a set of resources it is a common pattern to provide
 # a object (in this case this object) on construction of those tasks via the
 # task constructor.
-class ResourceFetcher(object):
+class ResourceFetcher:
     def __init__(self):
         self._db_handle = None
         self._url_handle = None
@@ -93,7 +91,7 @@ class ResourceFetcher(object):
 
 class ExtractInputRequest(task.Task):
     def __init__(self, resources):
-        super(ExtractInputRequest, self).__init__(provides="parsed_request")
+        super().__init__(provides="parsed_request")
         self._resources = resources
 
     def execute(self, request):
@@ -106,7 +104,7 @@ class ExtractInputRequest(task.Task):
 
 class MakeDBEntry(task.Task):
     def __init__(self, resources):
-        super(MakeDBEntry, self).__init__()
+        super().__init__()
         self._resources = resources
 
     def execute(self, parsed_request):
@@ -120,7 +118,7 @@ class MakeDBEntry(task.Task):
 
 class ActivateDriver(task.Task):
     def __init__(self, resources):
-        super(ActivateDriver, self).__init__(provides='sent_to')
+        super().__init__(provides='sent_to')
         self._resources = resources
         self._url = "http://blahblah.com"
 
@@ -138,8 +136,8 @@ class ActivateDriver(task.Task):
 
     def update_progress(self, progress, **kwargs):
         # Override the parent method to also print out the status.
-        super(ActivateDriver, self).update_progress(progress, **kwargs)
-        print("%s is %0.2f%% done" % (self.name, progress * 100))
+        super().update_progress(progress, **kwargs)
+        print("{} is {:0.2f}% done".format(self.name, progress * 100))
 
 
 class DeclareSuccess(task.Task):
@@ -148,7 +146,7 @@ class DeclareSuccess(task.Task):
         print("All data processed and sent to %s" % (sent_to))
 
 
-class DummyUser(object):
+class DummyUser:
     def __init__(self, user, id_):
         self.user = user
         self.id = id_
diff -pruN 5.12.0-2/taskflow/examples/graph_flow.py 6.0.2-2/taskflow/examples/graph_flow.py
--- 5.12.0-2/taskflow/examples/graph_flow.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/graph_flow.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -99,7 +97,7 @@ print("Single threaded engine result %s"
 for (name, value) in expected:
     actual = result.get(name)
     if actual != value:
-        sys.stderr.write("%s != %s\n" % (actual, value))
+        sys.stderr.write("{} != {}\n".format(actual, value))
         unexpected += 1
 
 result = taskflow.engines.run(
@@ -109,7 +107,7 @@ print("Multi threaded engine result %s"
 for (name, value) in expected:
     actual = result.get(name)
     if actual != value:
-        sys.stderr.write("%s != %s\n" % (actual, value))
+        sys.stderr.write("{} != {}\n".format(actual, value))
         unexpected += 1
 
 if unexpected:
diff -pruN 5.12.0-2/taskflow/examples/hello_world.py 6.0.2-2/taskflow/examples/hello_world.py
--- 5.12.0-2/taskflow/examples/hello_world.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/hello_world.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -38,12 +36,12 @@ from taskflow import task
 
 class PrinterTask(task.Task):
     def __init__(self, name, show_name=True, inject=None):
-        super(PrinterTask, self).__init__(name, inject=inject)
+        super().__init__(name, inject=inject)
         self._show_name = show_name
 
     def execute(self, output):
         if self._show_name:
-            print("%s: %s" % (self.name, output))
+            print("{}: {}".format(self.name, output))
         else:
             print(output)
 
diff -pruN 5.12.0-2/taskflow/examples/jobboard_produce_consume_colors.py 6.0.2-2/taskflow/examples/jobboard_produce_consume_colors.py
--- 5.12.0-2/taskflow/examples/jobboard_produce_consume_colors.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/jobboard_produce_consume_colors.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -84,9 +82,9 @@ def dispatch_work(job):
 def safe_print(name, message, prefix=""):
     with STDOUT_LOCK:
         if prefix:
-            print("%s %s: %s" % (prefix, name, message))
+            print("{} {}: {}".format(prefix, name, message))
         else:
-            print("%s: %s" % (name, message))
+            print("{}: {}".format(name, message))
 
 
 def worker(ident, client, consumed):
@@ -138,7 +136,7 @@ def producer(ident, client):
     safe_print(name, "started")
     with backends.backend(name, SHARED_CONF.copy(), client=client) as board:
         for i in range(0, PRODUCER_UNITS):
-            job_name = "%s-%s" % (name, i)
+            job_name = "{}-{}".format(name, i)
             details = {
                 'color': random.choice(['red', 'blue']),
             }
diff -pruN 5.12.0-2/taskflow/examples/parallel_table_multiply.py 6.0.2-2/taskflow/examples/parallel_table_multiply.py
--- 5.12.0-2/taskflow/examples/parallel_table_multiply.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/parallel_table_multiply.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -47,7 +45,7 @@ class RowMultiplier(task.Task):
     """Performs a modification of an input row, creating a output row."""
 
     def __init__(self, name, index, row, multiplier):
-        super(RowMultiplier, self).__init__(name=name)
+        super().__init__(name=name)
         self.index = index
         self.multiplier = multiplier
         self.row = row
diff -pruN 5.12.0-2/taskflow/examples/persistence_example.py 6.0.2-2/taskflow/examples/persistence_example.py
--- 5.12.0-2/taskflow/examples/persistence_example.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/persistence_example.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -58,7 +56,7 @@ class HiTask(task.Task):
 
 class ByeTask(task.Task):
     def __init__(self, blowup):
-        super(ByeTask, self).__init__()
+        super().__init__()
         self._blowup = blowup
 
     def execute(self):
diff -pruN 5.12.0-2/taskflow/examples/pseudo_scoping.py 6.0.2-2/taskflow/examples/pseudo_scoping.py
--- 5.12.0-2/taskflow/examples/pseudo_scoping.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/pseudo_scoping.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Ivan Melnikov <iv at altlinux dot org>
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -67,7 +65,7 @@ class CallTask(task.Task):
     """Task that calls person by number."""
 
     def execute(self, person, number):
-        print('Calling %s %s.' % (person, number))
+        print('Calling {} {}.'.format(person, number))
 
 # This is how it works for one person:
 
@@ -84,7 +82,7 @@ taskflow.engines.run(simple_flow, store=
 # we use `rebind` argument of task constructor.
 def subflow_factory(prefix):
     def pr(what):
-        return '%s-%s' % (prefix, what)
+        return '{}-{}'.format(prefix, what)
 
     return lf.Flow(pr('flow')).add(
         FetchNumberTask(pr('fetch'),
diff -pruN 5.12.0-2/taskflow/examples/resume_from_backend.py 6.0.2-2/taskflow/examples/resume_from_backend.py
--- 5.12.0-2/taskflow/examples/resume_from_backend.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/resume_from_backend.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -62,7 +60,7 @@ import example_utils as eu  # noqa
 
 def print_task_states(flowdetail, msg):
     eu.print_wrapped(msg)
-    print("Flow '%s' state: %s" % (flowdetail.name, flowdetail.state))
+    print("Flow '{}' state: {}".format(flowdetail.name, flowdetail.state))
     # Sort by these so that our test validation doesn't get confused by the
     # order in which the items in the flow detail can be in.
     items = sorted((td.name, td.version, td.state, td.results)
diff -pruN 5.12.0-2/taskflow/examples/resume_many_flows/my_flows.py 6.0.2-2/taskflow/examples/resume_many_flows/my_flows.py
--- 5.12.0-2/taskflow/examples/resume_many_flows/my_flows.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/resume_many_flows/my_flows.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/resume_many_flows/resume_all.py 6.0.2-2/taskflow/examples/resume_many_flows/resume_all.py
--- 5.12.0-2/taskflow/examples/resume_many_flows/resume_all.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/resume_many_flows/resume_all.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -39,7 +37,7 @@ FINISHED_STATES = (states.SUCCESS, state
 
 
 def resume(flowdetail, backend):
-    print('Resuming flow %s %s' % (flowdetail.name, flowdetail.uuid))
+    print('Resuming flow {} {}'.format(flowdetail.name, flowdetail.uuid))
     engine = taskflow.engines.load_from_detail(flow_detail=flowdetail,
                                                backend=backend)
     engine.run()
diff -pruN 5.12.0-2/taskflow/examples/resume_many_flows/run_flow.py 6.0.2-2/taskflow/examples/resume_many_flows/run_flow.py
--- 5.12.0-2/taskflow/examples/resume_many_flows/run_flow.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/resume_many_flows/run_flow.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -38,6 +36,6 @@ import my_flows  # noqa
 with example_utils.get_backend() as backend:
     engine = taskflow.engines.load_from_factory(my_flows.flow_factory,
                                                 backend=backend)
-    print('Running flow %s %s' % (engine.storage.flow_name,
-                                  engine.storage.flow_uuid))
+    print('Running flow {} {}'.format(engine.storage.flow_name,
+                                      engine.storage.flow_uuid))
     engine.run()
diff -pruN 5.12.0-2/taskflow/examples/resume_many_flows.py 6.0.2-2/taskflow/examples/resume_many_flows.py
--- 5.12.0-2/taskflow/examples/resume_many_flows.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/resume_many_flows.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/resume_vm_boot.py 6.0.2-2/taskflow/examples/resume_vm_boot.py
--- 5.12.0-2/taskflow/examples/resume_vm_boot.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/resume_vm_boot.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -63,7 +61,7 @@ class PrintText(task.Task):
     """Just inserts some text print outs in a workflow."""
     def __init__(self, print_what, no_slow=False):
         content_hash = hashlib.md5(print_what.encode('utf-8')).hexdigest()[0:8]
-        super(PrintText, self).__init__(name="Print: %s" % (content_hash))
+        super().__init__(name="Print: %s" % (content_hash))
         self._text = print_what
         self._no_slow = no_slow
 
@@ -78,7 +76,7 @@ class PrintText(task.Task):
 class DefineVMSpec(task.Task):
     """Defines a vm specification to be."""
     def __init__(self, name):
-        super(DefineVMSpec, self).__init__(provides='vm_spec', name=name)
+        super().__init__(provides='vm_spec', name=name)
 
     def execute(self):
         return {
@@ -93,8 +91,7 @@ class DefineVMSpec(task.Task):
 class LocateImages(task.Task):
     """Locates where the vm images are."""
     def __init__(self, name):
-        super(LocateImages, self).__init__(provides='image_locations',
-                                           name=name)
+        super().__init__(provides='image_locations', name=name)
 
     def execute(self, vm_spec):
         image_locations = {}
@@ -107,13 +104,13 @@ class LocateImages(task.Task):
 class DownloadImages(task.Task):
     """Downloads all the vm images."""
     def __init__(self, name):
-        super(DownloadImages, self).__init__(provides='download_paths',
-                                             name=name)
+        super().__init__(provides='download_paths',
+                         name=name)
 
     def execute(self, image_locations):
         for src, loc in image_locations.items():
             with slow_down(1):
-                print("Downloading from %s => %s" % (src, loc))
+                print("Downloading from {} => {}".format(src, loc))
         return sorted(image_locations.values())
 
 
@@ -125,8 +122,8 @@ IPADDR=%s
 ONBOOT=yes"""
 
     def __init__(self, name):
-        super(CreateNetworkTpl, self).__init__(provides='network_settings',
-                                               name=name)
+        super().__init__(provides='network_settings',
+                         name=name)
 
     def execute(self, ips):
         settings = []
@@ -138,7 +135,7 @@ ONBOOT=yes"""
 class AllocateIP(task.Task):
     """Allocates the ips for the given vm."""
     def __init__(self, name):
-        super(AllocateIP, self).__init__(provides='ips', name=name)
+        super().__init__(provides='ips', name=name)
 
     def execute(self, vm_spec):
         ips = []
@@ -152,7 +149,7 @@ class WriteNetworkSettings(task.Task):
     def execute(self, download_paths, network_settings):
         for j, path in enumerate(download_paths):
             with slow_down(1):
-                print("Mounting %s to /tmp/%s" % (path, j))
+                print("Mounting {} to /tmp/{}".format(path, j))
             for i, setting in enumerate(network_settings):
                 filename = ("/tmp/etc/sysconfig/network-scripts/"
                             "ifcfg-eth%s" % (i))
@@ -263,8 +260,8 @@ with eu.get_backend() as backend:
                                            backend=backend, book=book,
                                            engine='parallel',
                                            executor=executor)
-        print("!! Your tracking id is: '%s+%s'" % (book.uuid,
-                                                   engine.storage.flow_uuid))
+        print("!! Your tracking id is: '{}+{}'".format(
+            book.uuid, engine.storage.flow_uuid))
         print("!! Please submit this on later runs for tracking purposes")
     else:
         # Attempt to load from a previously partially completed flow.
diff -pruN 5.12.0-2/taskflow/examples/resume_volume_create.py 6.0.2-2/taskflow/examples/resume_volume_create.py
--- 5.12.0-2/taskflow/examples/resume_volume_create.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/resume_volume_create.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -72,7 +70,7 @@ def find_flow_detail(backend, book_id, f
 class PrintText(task.Task):
     def __init__(self, print_what, no_slow=False):
         content_hash = hashlib.md5(print_what.encode('utf-8')).hexdigest()[0:8]
-        super(PrintText, self).__init__(name="Print: %s" % (content_hash))
+        super().__init__(name="Print: %s" % (content_hash))
         self._text = print_what
         self._no_slow = no_slow
 
@@ -141,8 +139,8 @@ with example_utils.get_backend() as back
         book.add(flow_detail)
         with contextlib.closing(backend.get_connection()) as conn:
             conn.save_logbook(book)
-        print("!! Your tracking id is: '%s+%s'" % (book.uuid,
-                                                   flow_detail.uuid))
+        print("!! Your tracking id is: '{}+{}'".format(book.uuid,
+                                                       flow_detail.uuid))
         print("!! Please submit this on later runs for tracking purposes")
     else:
         flow_detail = find_flow_detail(backend, book_id, flow_id)
diff -pruN 5.12.0-2/taskflow/examples/retry_flow.py 6.0.2-2/taskflow/examples/retry_flow.py
--- 5.12.0-2/taskflow/examples/retry_flow.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/retry_flow.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/reverting_linear.py 6.0.2-2/taskflow/examples/reverting_linear.py
--- 5.12.0-2/taskflow/examples/reverting_linear.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/reverting_linear.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -59,7 +57,7 @@ class CallJoe(task.Task):
 
 class CallSuzzie(task.Task):
     def execute(self, suzzie_number, *args, **kwargs):
-        raise IOError("Suzzie not home right now.")
+        raise OSError("Suzzie not home right now.")
 
 
 # Create your flow and associated tasks (the work to be done).
diff -pruN 5.12.0-2/taskflow/examples/run_by_iter.py 6.0.2-2/taskflow/examples/run_by_iter.py
--- 5.12.0-2/taskflow/examples/run_by_iter.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/run_by_iter.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/run_by_iter_enumerate.py 6.0.2-2/taskflow/examples/run_by_iter_enumerate.py
--- 5.12.0-2/taskflow/examples/run_by_iter_enumerate.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/run_by_iter_enumerate.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -51,4 +49,4 @@ e.compile()
 e.prepare()
 
 for i, st in enumerate(e.run_iter(), 1):
-    print("Transition %s: %s" % (i, st))
+    print("Transition {}: {}".format(i, st))
diff -pruN 5.12.0-2/taskflow/examples/share_engine_thread.py 6.0.2-2/taskflow/examples/share_engine_thread.py
--- 5.12.0-2/taskflow/examples/share_engine_thread.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/share_engine_thread.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -43,11 +41,11 @@ from taskflow.utils import threading_uti
 
 class DelayedTask(task.Task):
     def __init__(self, name):
-        super(DelayedTask, self).__init__(name=name)
+        super().__init__(name=name)
         self._wait_for = random.random()
 
     def execute(self):
-        print("Running '%s' in thread '%s'" % (self.name, tu.get_ident()))
+        print("Running '{}' in thread '{}'".format(self.name, tu.get_ident()))
         time.sleep(self._wait_for)
 
 
diff -pruN 5.12.0-2/taskflow/examples/simple_linear.py 6.0.2-2/taskflow/examples/simple_linear.py
--- 5.12.0-2/taskflow/examples/simple_linear.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/simple_linear.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/simple_linear_listening.py 6.0.2-2/taskflow/examples/simple_linear_listening.py
--- 5.12.0-2/taskflow/examples/simple_linear_listening.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/simple_linear_listening.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -71,7 +69,7 @@ def flow_watch(state, details):
 
 
 def task_watch(state, details):
-    print('Task %s => %s' % (details.get('task_name'), state))
+    print('Task {} => {}'.format(details.get('task_name'), state))
 
 
 # Wrap your functions into a task type that knows how to treat your functions
diff -pruN 5.12.0-2/taskflow/examples/simple_linear_pass.py 6.0.2-2/taskflow/examples/simple_linear_pass.py
--- 5.12.0-2/taskflow/examples/simple_linear_pass.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/simple_linear_pass.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/simple_map_reduce.py 6.0.2-2/taskflow/examples/simple_map_reduce.py
--- 5.12.0-2/taskflow/examples/simple_map_reduce.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/simple_map_reduce.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/switch_graph_flow.py 6.0.2-2/taskflow/examples/switch_graph_flow.py
--- 5.12.0-2/taskflow/examples/switch_graph_flow.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/switch_graph_flow.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -64,7 +62,7 @@ while entries:
     path = entries.pop()
     value = backend.memory[path]
     if value:
-        print("%s -> %s" % (path, value))
+        print("{} -> {}".format(path, value))
     else:
         print("%s" % (path))
     entries.extend(os.path.join(path, child)
diff -pruN 5.12.0-2/taskflow/examples/timing_listener.py 6.0.2-2/taskflow/examples/timing_listener.py
--- 5.12.0-2/taskflow/examples/timing_listener.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/timing_listener.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -45,7 +43,7 @@ from taskflow import task
 
 class VariableTask(task.Task):
     def __init__(self, name):
-        super(VariableTask, self).__init__(name)
+        super().__init__(name)
         self._sleepy_time = random.random()
 
     def execute(self):
diff -pruN 5.12.0-2/taskflow/examples/tox_conductor.py 6.0.2-2/taskflow/examples/tox_conductor.py
--- 5.12.0-2/taskflow/examples/tox_conductor.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/tox_conductor.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -60,7 +58,7 @@ from taskflow.utils import threading_uti
 RUN_TIME = 5
 REVIEW_CREATION_DELAY = 0.5
 SCAN_DELAY = 0.1
-NAME = "%s_%s" % (socket.getfqdn(), os.getpid())
+NAME = "{}_{}".format(socket.getfqdn(), os.getpid())
 
 # This won't really use zookeeper but will use a local version of it using
 # the zake library that mimics an actual zookeeper cluster using threads and
@@ -74,7 +72,7 @@ class RunReview(task.Task):
     # A dummy task that clones the review and runs tox...
 
     def _clone_review(self, review, temp_dir):
-        print("Cloning review '%s' into %s" % (review['id'], temp_dir))
+        print("Cloning review '{}' into {}".format(review['id'], temp_dir))
 
     def _run_tox(self, temp_dir):
         print("Running tox in %s" % temp_dir)
@@ -177,7 +175,7 @@ def generate_reviewer(client, saver, nam
                         'review': review,
                     },
                 }
-                job_name = "%s_%s" % (real_name, review['id'])
+                job_name = "{}_{}".format(real_name, review['id'])
                 print("Posting review '%s'" % review['id'])
                 jb.post(job_name,
                         book=make_save_book(saver, review['id']),
diff -pruN 5.12.0-2/taskflow/examples/wbe_event_sender.py 6.0.2-2/taskflow/examples/wbe_event_sender.py
--- 5.12.0-2/taskflow/examples/wbe_event_sender.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/wbe_event_sender.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/wbe_mandelbrot.py 6.0.2-2/taskflow/examples/wbe_mandelbrot.py
--- 5.12.0-2/taskflow/examples/wbe_mandelbrot.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/wbe_mandelbrot.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/wbe_simple_linear.py 6.0.2-2/taskflow/examples/wbe_simple_linear.py
--- 5.12.0-2/taskflow/examples/wbe_simple_linear.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/wbe_simple_linear.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/wrapped_exception.py 6.0.2-2/taskflow/examples/wrapped_exception.py
--- 5.12.0-2/taskflow/examples/wrapped_exception.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/examples/wrapped_exception.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/exceptions.py 6.0.2-2/taskflow/exceptions.py
--- 5.12.0-2/taskflow/exceptions.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/exceptions.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -67,7 +65,7 @@ class TaskFlowException(Exception):
                   this is not yet implemented/supported natively.
     """
     def __init__(self, message, cause=None):
-        super(TaskFlowException, self).__init__(message)
+        super().__init__(message)
         self._cause = cause
 
     @property
@@ -192,7 +190,7 @@ class MissingDependencies(DependencyFail
         message = self.MESSAGE_TPL % {'who': who, 'requirements': requirements}
         if method:
             message = (self.METHOD_TPL % {'method': method}) + message
-        super(MissingDependencies, self).__init__(message, cause=cause)
+        super().__init__(message, cause=cause)
         self.missing_requirements = requirements
 
 
@@ -228,7 +226,7 @@ class DisallowedAccess(TaskFlowException
     """Raised when storage access is not possible due to state limitations."""
 
     def __init__(self, message, cause=None, state=None):
-        super(DisallowedAccess, self).__init__(message, cause=cause)
+        super().__init__(message, cause=cause)
         self.state = state
 
 
@@ -261,7 +259,7 @@ class WrappedFailure(Exception):
     """
 
     def __init__(self, causes):
-        super(WrappedFailure, self).__init__()
+        super().__init__()
         self._causes = []
         for cause in causes:
             if cause.check(type(self)) and cause.exception:
@@ -306,8 +304,8 @@ class WrappedFailure(Exception):
 
     def __str__(self):
         buf = io.StringIO()
-        buf.write(u'WrappedFailure: [')
+        buf.write('WrappedFailure: [')
         causes_gen = (str(cause) for cause in self._causes)
-        buf.write(u", ".join(causes_gen))
-        buf.write(u']')
+        buf.write(", ".join(causes_gen))
+        buf.write(']')
         return buf.getvalue()
diff -pruN 5.12.0-2/taskflow/flow.py 6.0.2-2/taskflow/flow.py
--- 5.12.0-2/taskflow/flow.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/flow.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -42,7 +40,7 @@ _CHOP_PAT_LEN = len(_CHOP_PAT)
 LINK_DECIDER_DEPTH = 'decider_depth'
 
 
-class Flow(object, metaclass=abc.ABCMeta):
+class Flow(metaclass=abc.ABCMeta):
     """The base abstract class of all flow implementations.
 
     A flow is a structure that defines relationships between tasks. You can
diff -pruN 5.12.0-2/taskflow/formatters.py 6.0.2-2/taskflow/formatters.py
--- 5.12.0-2/taskflow/formatters.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/formatters.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -14,6 +12,7 @@
 #    License for the specific language governing permissions and limitations
 #    under the License.
 
+import copy
 import functools
 
 from taskflow.engines.action_engine import compiler
@@ -58,17 +57,29 @@ def _fetch_predecessor_tree(graph, atom)
     return root
 
 
-class FailureFormatter(object):
+class FailureFormatter:
     """Formats a failure and connects it to associated atoms & engine."""
 
     _BUILDERS = {
         states.EXECUTE: (_fetch_predecessor_tree, 'predecessors'),
     }
 
-    def __init__(self, engine, hide_inputs_outputs_of=()):
+    def __init__(self, engine, hide_inputs_outputs_of=(),
+                 mask_inputs_keys=(), mask_outputs_keys=()):
         self._hide_inputs_outputs_of = hide_inputs_outputs_of
+        self._mask_inputs_keys = mask_inputs_keys
+        self._mask_outputs_keys = mask_outputs_keys
         self._engine = engine
 
+    def _mask_keys(self, data, mask_keys):
+        if not data or not isinstance(data, dict):
+            return data
+        result = copy.deepcopy(data)
+        for k in mask_keys:
+            if k in result:
+                result[k] = '***'
+        return result
+
     def _format_node(self, storage, cache, node):
         """Formats a single tree node into a string version."""
         if node.metadata['kind'] == compiler.FLOW:
@@ -100,14 +111,16 @@ class FailureFormatter(object):
                                                        atom_name,
                                                        fetch_mapped_args)
                 if requires_found:
-                    atom_attrs['requires'] = requires
+                    atom_attrs['requires'] = self._mask_keys(
+                        requires, self._mask_inputs_keys)
                 provides, provides_found = _cached_get(
                     cache, 'provides', atom_name,
                     storage.get_execute_result, atom_name)
                 if provides_found:
-                    atom_attrs['provides'] = provides
+                    atom_attrs['provides'] = self._mask_keys(
+                        provides, self._mask_outputs_keys)
             if atom_attrs:
-                return "Atom '%s' %s" % (atom_name, atom_attrs)
+                return "Atom '{}' {}".format(atom_name, atom_attrs)
             else:
                 return "Atom '%s'" % (atom_name)
         else:
@@ -156,7 +169,8 @@ class FailureFormatter(object):
             builder, kind = self._BUILDERS[atom_intention]
             rooted_tree = builder(graph, atom)
             child_count = rooted_tree.child_count(only_direct=False)
-            buff.write_nl('%s %s (most recent first):' % (child_count, kind))
+            buff.write_nl(
+                '{} {} (most recent first):'.format(child_count, kind))
             formatter = functools.partial(self._format_node, storage, cache)
             direct_child_count = rooted_tree.child_count(only_direct=True)
             for i, child in enumerate(rooted_tree, 1):
diff -pruN 5.12.0-2/taskflow/jobs/backends/__init__.py 6.0.2-2/taskflow/jobs/backends/__init__.py
--- 5.12.0-2/taskflow/jobs/backends/__init__.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/jobs/backends/__init__.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/jobs/backends/impl_redis.py 6.0.2-2/taskflow/jobs/backends/impl_redis.py
--- 5.12.0-2/taskflow/jobs/backends/impl_redis.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/jobs/backends/impl_redis.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -17,7 +15,6 @@
 import contextlib
 import datetime
 import functools
-import re
 import string
 import threading
 import time
@@ -26,6 +23,7 @@ import fasteners
 import msgpack
 from oslo_serialization import msgpackutils
 from oslo_utils import excutils
+from oslo_utils import netutils
 from oslo_utils import strutils
 from oslo_utils import timeutils
 from oslo_utils import uuidutils
@@ -69,10 +67,10 @@ class RedisJob(base.Job):
                  created_on=None, backend=None,
                  book=None, book_data=None,
                  priority=base.JobPriority.NORMAL):
-        super(RedisJob, self).__init__(board, name,
-                                       uuid=uuid, details=details,
-                                       backend=backend,
-                                       book=book, book_data=book_data)
+        super().__init__(board, name,
+                         uuid=uuid, details=details,
+                         backend=backend,
+                         book=book, book_data=book_data)
         self._created_on = created_on
         self._client = board._client
         self._redis_version = board._redis_version
@@ -561,15 +559,17 @@ return cmsgpack.pack(result)
 
     @classmethod
     def _parse_sentinel(cls, sentinel):
-        # IPv6 (eg. [::1]:6379 )
-        match = re.search(r'^\[(\S+)\]:(\d+)$', sentinel)
-        if match:
-            return (match[1], int(match[2]))
-        # IPv4 or hostname (eg. 127.0.0.1:6379 or localhost:6379)
-        match = re.search(r'^(\S+):(\d+)$', sentinel)
-        if match:
-            return (match[1], int(match[2]))
-        raise ValueError('Malformed sentinel server format')
+        host, port = netutils.parse_host_port(sentinel)
+        if host is None or port is None:
+            raise ValueError('Malformed sentinel server format')
+        return (host, port)
+
+    @classmethod
+    def _filter_ssl_options(cls, opts):
+        if not opts.get('ssl', False):
+            return {k: v for (k, v) in opts.items()
+                    if not k.startswith('ssl_')}
+        return opts
 
     @classmethod
     def _make_client(cls, conf):
@@ -584,8 +584,12 @@ return cmsgpack.pack(result)
             sentinels = [(client_conf.pop('host'), client_conf.pop('port'))]
             for fallback in conf.get('sentinel_fallbacks', []):
                 sentinels.append(cls._parse_sentinel(fallback))
+            client_conf = cls._filter_ssl_options(client_conf)
+            sentinel_kwargs = conf.get('sentinel_kwargs')
+            if sentinel_kwargs is not None:
+                sentinel_kwargs = cls._filter_ssl_options(sentinel_kwargs)
             s = sentinel.Sentinel(sentinels,
-                                  sentinel_kwargs=conf.get('sentinel_kwargs'),
+                                  sentinel_kwargs=sentinel_kwargs,
                                   **client_conf)
             return s.master_for(conf['sentinel'])
         else:
@@ -593,7 +597,7 @@ return cmsgpack.pack(result)
 
     def __init__(self, name, conf,
                  client=None, persistence=None):
-        super(RedisJobBoard, self).__init__(name, conf)
+        super().__init__(name, conf)
         self._closed = True
         if client is not None:
             self._client = client
diff -pruN 5.12.0-2/taskflow/jobs/backends/impl_zookeeper.py 6.0.2-2/taskflow/jobs/backends/impl_zookeeper.py
--- 5.12.0-2/taskflow/jobs/backends/impl_zookeeper.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/jobs/backends/impl_zookeeper.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -51,10 +49,10 @@ class ZookeeperJob(base.Job):
                  uuid=None, details=None, book=None, book_data=None,
                  created_on=None, backend=None,
                  priority=base.JobPriority.NORMAL):
-        super(ZookeeperJob, self).__init__(board, name,
-                                           uuid=uuid, details=details,
-                                           backend=backend,
-                                           book=book, book_data=book_data)
+        super().__init__(board, name,
+                         uuid=uuid, details=details,
+                         backend=backend,
+                         book=book, book_data=book_data)
         self._client = client
         self._path = k_paths.normpath(path)
         self._lock_path = self._path + board.LOCK_POSTFIX
@@ -281,7 +279,7 @@ class ZookeeperJobBoard(base.NotifyingJo
 
     def __init__(self, name, conf,
                  client=None, persistence=None, emit_notifications=True):
-        super(ZookeeperJobBoard, self).__init__(name, conf)
+        super().__init__(name, conf)
         if client is not None:
             self._client = client
             self._owned = False
@@ -552,7 +550,8 @@ class ZookeeperJobBoard(base.NotifyingJo
             except Exception:
                 owner = None
             if owner:
-                message = "Job %s already claimed by '%s'" % (job.uuid, owner)
+                message = "Job {} already claimed by '{}'".format(
+                    job.uuid, owner)
             else:
                 message = "Job %s already claimed" % (job.uuid)
             excp.raise_with_cause(excp.UnclaimableJob,
diff -pruN 5.12.0-2/taskflow/jobs/base.py 6.0.2-2/taskflow/jobs/base.py
--- 5.12.0-2/taskflow/jobs/base.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/jobs/base.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Rackspace Hosting Inc. All Rights Reserved.
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
@@ -110,7 +108,7 @@ class JobPriority(enum.Enum):
                 return tuple(values)
 
 
-class Job(object, metaclass=abc.ABCMeta):
+class Job(metaclass=abc.ABCMeta):
     """A abstraction that represents a named and trackable unit of work.
 
     A job connects a logbook, a owner, a priority, last modified and created
@@ -277,12 +275,12 @@ class Job(object, metaclass=abc.ABCMeta)
     def __str__(self):
         """Pretty formats the job into something *more* meaningful."""
         cls_name = type(self).__name__
-        return "%s: %s (priority=%s, uuid=%s, details=%s)" % (
+        return "{}: {} (priority={}, uuid={}, details={})".format(
             cls_name, self.name, self.priority,
             self.uuid, self.details)
 
 
-class JobBoardIterator(object):
+class JobBoardIterator:
     """Iterator over a jobboard that iterates over potential jobs.
 
     It provides the following attributes:
@@ -355,7 +353,7 @@ class JobBoardIterator(object):
             return job
 
 
-class JobBoard(object, metaclass=abc.ABCMeta):
+class JobBoard(metaclass=abc.ABCMeta):
     """A place where jobs can be posted, reposted, claimed and transferred.
 
     There can be multiple implementations of this job board, depending on the
@@ -565,7 +563,7 @@ class NotifyingJobBoard(JobBoard):
     registered are thread safe (and block for as little time as possible).
     """
     def __init__(self, name, conf):
-        super(NotifyingJobBoard, self).__init__(name, conf)
+        super().__init__(name, conf)
         self.notifier = notifier.Notifier()
 
 
diff -pruN 5.12.0-2/taskflow/listeners/base.py 6.0.2-2/taskflow/listeners/base.py
--- 5.12.0-2/taskflow/listeners/base.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/listeners/base.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -77,7 +75,7 @@ def _bulk_register(watch_states, notifie
         return registered
 
 
-class Listener(object):
+class Listener:
     """Base class for listeners.
 
     A listener can be attached to an engine to do various actions on flow and
diff -pruN 5.12.0-2/taskflow/listeners/capturing.py 6.0.2-2/taskflow/listeners/capturing.py
--- 5.12.0-2/taskflow/listeners/capturing.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/listeners/capturing.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -65,7 +63,7 @@ class CaptureListener(base.Listener):
                  # Provide your own list (or previous list) to accumulate
                  # into...
                  values=None):
-        super(CaptureListener, self).__init__(
+        super().__init__(
             engine,
             task_listen_for=task_listen_for,
             flow_listen_for=flow_listen_for,
diff -pruN 5.12.0-2/taskflow/listeners/claims.py 6.0.2-2/taskflow/listeners/claims.py
--- 5.12.0-2/taskflow/listeners/claims.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/listeners/claims.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -49,7 +47,7 @@ class CheckingClaimListener(base.Listene
     """
 
     def __init__(self, engine, job, board, owner, on_job_loss=None):
-        super(CheckingClaimListener, self).__init__(engine)
+        super().__init__(engine)
         self._job = job
         self._board = board
         self._owner = owner
diff -pruN 5.12.0-2/taskflow/listeners/logging.py 6.0.2-2/taskflow/listeners/logging.py
--- 5.12.0-2/taskflow/listeners/logging.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/listeners/logging.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -46,7 +44,7 @@ class LoggingListener(base.DumpingListen
                  retry_listen_for=base.DEFAULT_LISTEN_FOR,
                  log=None,
                  level=logging.DEBUG):
-        super(LoggingListener, self).__init__(
+        super().__init__(
             engine, task_listen_for=task_listen_for,
             flow_listen_for=flow_listen_for, retry_listen_for=retry_listen_for)
         self._logger = misc.pick_first_not_none(log, self._LOGGER, LOG)
@@ -110,8 +108,10 @@ class DynamicLoggingListener(base.Listen
                  retry_listen_for=base.DEFAULT_LISTEN_FOR,
                  log=None, failure_level=logging.WARNING,
                  level=logging.DEBUG, hide_inputs_outputs_of=(),
-                 fail_formatter=None):
-        super(DynamicLoggingListener, self).__init__(
+                 fail_formatter=None,
+                 mask_inputs_keys=(),
+                 mask_outputs_keys=()):
+        super().__init__(
             engine, task_listen_for=task_listen_for,
             flow_listen_for=flow_listen_for, retry_listen_for=retry_listen_for)
         self._failure_level = failure_level
@@ -127,11 +127,15 @@ class DynamicLoggingListener(base.Listen
             states.REVERTED: self._failure_level,
         }
         self._hide_inputs_outputs_of = frozenset(hide_inputs_outputs_of)
+        self._mask_inputs_keys = frozenset(mask_inputs_keys)
+        self._mask_outputs_keys = frozenset(mask_outputs_keys)
         self._logger = misc.pick_first_not_none(log, self._LOGGER, LOG)
         if fail_formatter is None:
             self._fail_formatter = formatters.FailureFormatter(
                 self._engine,
-                hide_inputs_outputs_of=self._hide_inputs_outputs_of)
+                hide_inputs_outputs_of=self._hide_inputs_outputs_of,
+                mask_inputs_keys=self._mask_inputs_keys,
+                mask_outputs_keys=self._mask_outputs_keys)
         else:
             self._fail_formatter = fail_formatter
 
diff -pruN 5.12.0-2/taskflow/listeners/printing.py 6.0.2-2/taskflow/listeners/printing.py
--- 5.12.0-2/taskflow/listeners/printing.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/listeners/printing.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -27,7 +25,7 @@ class PrintingListener(base.DumpingListe
                  flow_listen_for=base.DEFAULT_LISTEN_FOR,
                  retry_listen_for=base.DEFAULT_LISTEN_FOR,
                  stderr=False):
-        super(PrintingListener, self).__init__(
+        super().__init__(
             engine, task_listen_for=task_listen_for,
             flow_listen_for=flow_listen_for, retry_listen_for=retry_listen_for)
         if stderr:
diff -pruN 5.12.0-2/taskflow/listeners/timing.py 6.0.2-2/taskflow/listeners/timing.py
--- 5.12.0-2/taskflow/listeners/timing.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/listeners/timing.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -26,7 +24,7 @@ from taskflow import logging
 from taskflow import states
 
 STARTING_STATES = frozenset((states.RUNNING, states.REVERTING))
-FINISHED_STATES = frozenset((base.FINISH_STATES + (states.REVERTED,)))
+FINISHED_STATES = frozenset(base.FINISH_STATES + (states.REVERTED,))
 WATCH_STATES = frozenset(itertools.chain(FINISHED_STATES, STARTING_STATES,
                                          [states.PENDING]))
 
@@ -48,13 +46,13 @@ class DurationListener(base.Listener):
     to task metadata with key ``'duration'``.
     """
     def __init__(self, engine):
-        super(DurationListener, self).__init__(engine,
-                                               task_listen_for=WATCH_STATES,
-                                               flow_listen_for=WATCH_STATES)
+        super().__init__(engine,
+                         task_listen_for=WATCH_STATES,
+                         flow_listen_for=WATCH_STATES)
         self._timers = {co.TASK: {}, co.FLOW: {}}
 
     def deregister(self):
-        super(DurationListener, self).deregister()
+        super().deregister()
         # There should be none that still exist at deregistering time, so log a
         # warning if there were any that somehow still got left behind...
         for item_type, timers in self._timers.items():
@@ -105,23 +103,22 @@ class PrintingDurationListener(DurationL
     """Listener that prints the duration as well as recording it."""
 
     def __init__(self, engine, printer=None):
-        super(PrintingDurationListener, self).__init__(engine)
+        super().__init__(engine)
         if printer is None:
             self._printer = _printer
         else:
             self._printer = printer
 
     def _record_ending(self, timer, item_type, item_name, state):
-        super(PrintingDurationListener, self)._record_ending(
+        super()._record_ending(
             timer, item_type, item_name, state)
         self._printer("It took %s '%s' %0.2f seconds to"
                       " finish." % (item_type, item_name, timer.elapsed()))
 
     def _receiver(self, item_type, item_name, state):
-        super(PrintingDurationListener, self)._receiver(item_type,
-                                                        item_name, state)
+        super()._receiver(item_type, item_name, state)
         if state in STARTING_STATES:
-            self._printer("'%s' %s started." % (item_name, item_type))
+            self._printer("'{}' {} started.".format(item_name, item_type))
 
 
 class EventTimeListener(base.Listener):
@@ -139,7 +136,7 @@ class EventTimeListener(base.Listener):
                  task_listen_for=base.DEFAULT_LISTEN_FOR,
                  flow_listen_for=base.DEFAULT_LISTEN_FOR,
                  retry_listen_for=base.DEFAULT_LISTEN_FOR):
-        super(EventTimeListener, self).__init__(
+        super().__init__(
             engine, task_listen_for=task_listen_for,
             flow_listen_for=flow_listen_for, retry_listen_for=retry_listen_for)
 
diff -pruN 5.12.0-2/taskflow/logging.py 6.0.2-2/taskflow/logging.py
--- 5.12.0-2/taskflow/logging.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/logging.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/patterns/graph_flow.py 6.0.2-2/taskflow/patterns/graph_flow.py
--- 5.12.0-2/taskflow/patterns/graph_flow.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/patterns/graph_flow.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -66,7 +64,7 @@ class Flow(flow.Flow):
     """
 
     def __init__(self, name, retry=None):
-        super(Flow, self).__init__(name, retry)
+        super().__init__(name, retry)
         self._graph = gr.DiGraph(name=name)
         self._graph.freeze()
 
@@ -332,7 +330,7 @@ class TargetedFlow(Flow):
     """
 
     def __init__(self, *args, **kwargs):
-        super(TargetedFlow, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
         self._subgraph = None
         self._target = None
 
diff -pruN 5.12.0-2/taskflow/patterns/linear_flow.py 6.0.2-2/taskflow/patterns/linear_flow.py
--- 5.12.0-2/taskflow/patterns/linear_flow.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/patterns/linear_flow.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -36,7 +34,7 @@ class Flow(flow.Flow):
     """
 
     def __init__(self, name, retry=None):
-        super(Flow, self).__init__(name, retry)
+        super().__init__(name, retry)
         self._graph = gr.OrderedDiGraph(name=name)
         self._last_item = self._no_last_item
 
@@ -55,8 +53,7 @@ class Flow(flow.Flow):
         return len(self._graph)
 
     def __iter__(self):
-        for item in self._graph.nodes:
-            yield item
+        yield from self._graph.nodes
 
     @property
     def requires(self):
@@ -71,9 +68,7 @@ class Flow(flow.Flow):
         return frozenset(requires)
 
     def iter_nodes(self):
-        for (n, n_data) in self._graph.nodes(data=True):
-            yield (n, n_data)
+        yield from self._graph.nodes(data=True)
 
     def iter_links(self):
-        for (u, v, e_data) in self._graph.edges(data=True):
-            yield (u, v, e_data)
+        yield from self._graph.edges(data=True)
diff -pruN 5.12.0-2/taskflow/patterns/unordered_flow.py 6.0.2-2/taskflow/patterns/unordered_flow.py
--- 5.12.0-2/taskflow/patterns/unordered_flow.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/patterns/unordered_flow.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -26,7 +24,7 @@ class Flow(flow.Flow):
     """
 
     def __init__(self, name, retry=None):
-        super(Flow, self).__init__(name, retry)
+        super().__init__(name, retry)
         self._graph = gr.Graph(name=name)
 
     def add(self, *items):
@@ -40,16 +38,13 @@ class Flow(flow.Flow):
         return len(self._graph)
 
     def __iter__(self):
-        for item in self._graph:
-            yield item
+        yield from self._graph
 
     def iter_links(self):
-        for (u, v, e_data) in self._graph.edges(data=True):
-            yield (u, v, e_data)
+        yield from self._graph.edges(data=True)
 
     def iter_nodes(self):
-        for n, n_data in self._graph.nodes(data=True):
-            yield (n, n_data)
+        yield from self._graph.nodes(data=True)
 
     @property
     def requires(self):
diff -pruN 5.12.0-2/taskflow/persistence/backends/__init__.py 6.0.2-2/taskflow/persistence/backends/__init__.py
--- 5.12.0-2/taskflow/persistence/backends/__init__.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/persistence/backends/__init__.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Rackspace Hosting Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -64,7 +62,7 @@ def fetch(conf, namespace=BACKEND_NAMESP
                                    invoke_kwds=kwargs)
         return mgr.driver
     except RuntimeError as e:
-        raise exc.NotFound("Could not find backend %s: %s" % (backend, e))
+        raise exc.NotFound("Could not find backend {}: {}".format(backend, e))
 
 
 @contextlib.contextmanager
diff -pruN 5.12.0-2/taskflow/persistence/backends/impl_dir.py 6.0.2-2/taskflow/persistence/backends/impl_dir.py
--- 5.12.0-2/taskflow/persistence/backends/impl_dir.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/persistence/backends/impl_dir.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #    Copyright (C) 2013 Rackspace Hosting All Rights Reserved.
 #
@@ -17,7 +15,6 @@
 
 import contextlib
 import errno
-import io
 import os
 import shutil
 
@@ -69,7 +66,7 @@ class DirBackend(path_based.PathBasedBac
     """
 
     def __init__(self, conf):
-        super(DirBackend, self).__init__(conf)
+        super().__init__(conf)
         max_cache_size = self._conf.get('max_cache_size')
         if max_cache_size is not None:
             max_cache_size = int(max_cache_size)
@@ -100,7 +97,7 @@ class Connection(path_based.PathBasedCon
         mtime = os.path.getmtime(filename)
         cache_info = self.backend.file_cache.setdefault(filename, {})
         if not cache_info or mtime > cache_info.get('mtime', 0):
-            with io.open(filename, 'r', encoding=self.backend.encoding) as fp:
+            with open(filename, encoding=self.backend.encoding) as fp:
                 cache_info['data'] = fp.read()
                 cache_info['mtime'] = mtime
         return cache_info['data']
@@ -108,7 +105,7 @@ class Connection(path_based.PathBasedCon
     def _write_to(self, filename, contents):
         contents = misc.binary_encode(contents,
                                       encoding=self.backend.encoding)
-        with io.open(filename, 'wb') as fp:
+        with open(filename, 'wb') as fp:
             fp.write(contents)
         self.backend.file_cache.pop(filename, None)
 
diff -pruN 5.12.0-2/taskflow/persistence/backends/impl_memory.py 6.0.2-2/taskflow/persistence/backends/impl_memory.py
--- 5.12.0-2/taskflow/persistence/backends/impl_memory.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/persistence/backends/impl_memory.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #    Copyright (C) 2013 Rackspace Hosting All Rights Reserved.
 #
@@ -31,10 +29,10 @@ class FakeInode(tree.Node):
     """A in-memory filesystem inode-like object."""
 
     def __init__(self, item, path, value=None):
-        super(FakeInode, self).__init__(item, path=path, value=value)
+        super().__init__(item, path=path, value=value)
 
 
-class FakeFilesystem(object):
+class FakeFilesystem:
     """An in-memory filesystem-like structure.
 
     This filesystem uses posix style paths **only** so users must be careful
@@ -249,8 +247,7 @@ class FakeFilesystem(object):
             parts = path.split(pp.sep)[1:]
         if include_root:
             parts.insert(0, self._root.item)
-        for piece in parts:
-            yield piece
+        yield from parts
 
     def __delitem__(self, path):
         self.delete(path, recursive=True)
@@ -258,7 +255,7 @@ class FakeFilesystem(object):
     @staticmethod
     def _stringify_node(node):
         if 'target' in node.metadata:
-            return "%s (link to %s)" % (node.item, node.metadata['target'])
+            return "{} (link to {})".format(node.item, node.metadata['target'])
         else:
             return str(node.item)
 
@@ -309,7 +306,7 @@ class MemoryBackend(path_based.PathBased
     DEFAULT_PATH = pp.sep
 
     def __init__(self, conf=None):
-        super(MemoryBackend, self).__init__(conf)
+        super().__init__(conf)
         self.memory = FakeFilesystem(deep_copy=self._conf.get('deep_copy',
                                                               True))
         self.lock = fasteners.ReaderWriterLock()
@@ -323,7 +320,7 @@ class MemoryBackend(path_based.PathBased
 
 class Connection(path_based.PathBasedConnection):
     def __init__(self, backend):
-        super(Connection, self).__init__(backend)
+        super().__init__(backend)
         self.upgrade()
 
     @contextlib.contextmanager
diff -pruN 5.12.0-2/taskflow/persistence/backends/impl_sqlalchemy.py 6.0.2-2/taskflow/persistence/backends/impl_sqlalchemy.py
--- 5.12.0-2/taskflow/persistence/backends/impl_sqlalchemy.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/persistence/backends/impl_sqlalchemy.py	2025-08-14 03:01:40.000000000 +0000
@@ -179,7 +179,7 @@ def _ping_listener(dbapi_conn, connectio
             raise
 
 
-class _Alchemist(object):
+class _Alchemist:
     """Internal <-> external row <-> objects + other helper functions.
 
     NOTE(harlowja): for internal usage only.
@@ -235,7 +235,7 @@ class SQLAlchemyBackend(base.Backend):
         }
     """
     def __init__(self, conf, engine=None):
-        super(SQLAlchemyBackend, self).__init__(conf)
+        super().__init__(conf)
         if engine is not None:
             self._engine = engine
             self._owns_engine = False
@@ -581,8 +581,7 @@ class Connection(base.Connection):
             exc.raise_with_cause(exc.StorageFailure,
                                  "Failed getting flow details in"
                                  " logbook '%s'" % book_uuid)
-        for flow_details in gathered:
-            yield flow_details
+        yield from gathered
 
     def get_flow_details(self, fd_uuid, lazy=False):
         try:
@@ -631,8 +630,7 @@ class Connection(base.Connection):
             exc.raise_with_cause(exc.StorageFailure,
                                  "Failed getting atom details in flow"
                                  " detail '%s'" % fd_uuid)
-        for atom_details in gathered:
-            yield atom_details
+        yield from gathered
 
     def close(self):
         pass
diff -pruN 5.12.0-2/taskflow/persistence/backends/impl_zookeeper.py 6.0.2-2/taskflow/persistence/backends/impl_zookeeper.py
--- 5.12.0-2/taskflow/persistence/backends/impl_zookeeper.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/persistence/backends/impl_zookeeper.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 AT&T Labs All Rights Reserved.
 #    Copyright (C) 2015 Rackspace Hosting All Rights Reserved.
 #
@@ -56,7 +54,7 @@ class ZkBackend(path_based.PathBasedBack
     DEFAULT_PATH = '/taskflow'
 
     def __init__(self, conf, client=None):
-        super(ZkBackend, self).__init__(conf)
+        super().__init__(conf)
         if not paths.isabs(self._path):
             raise ValueError("Zookeeper path must be absolute")
         if client is not None:
@@ -87,7 +85,7 @@ class ZkBackend(path_based.PathBasedBack
 
 class ZkConnection(path_based.PathBasedConnection):
     def __init__(self, backend, client, conf):
-        super(ZkConnection, self).__init__(backend)
+        super().__init__(backend)
         self._conf = conf
         self._client = client
         with self._exc_wrapper():
diff -pruN 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/env.py 6.0.2-2/taskflow/persistence/backends/sqlalchemy/alembic/env.py
--- 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/env.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/persistence/backends/sqlalchemy/alembic/env.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/00af93df9d77_add_unique_into_all_indexes.py 6.0.2-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/00af93df9d77_add_unique_into_all_indexes.py
--- 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/00af93df9d77_add_unique_into_all_indexes.py	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/00af93df9d77_add_unique_into_all_indexes.py	2025-08-14 03:01:40.000000000 +0000
@@ -0,0 +1,74 @@
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+"""Add unique into all indexes
+
+Revision ID: 00af93df9d77
+Revises: 40fc8c914bd2
+Create Date: 2025-02-28 15:44:37.066720
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '00af93df9d77'
+down_revision = '40fc8c914bd2'
+
+from alembic import op
+
+
+def upgrade():
+    bind = op.get_bind()
+    engine = bind.engine
+    if engine.name == 'mysql':
+        with op.batch_alter_table("logbooks") as batch_op:
+            batch_op.drop_index("logbook_uuid_idx")
+            batch_op.create_index(
+                index_name="logbook_uuid_idx",
+                columns=['uuid'],
+                unique=True)
+
+        with op.batch_alter_table("flowdetails") as batch_op:
+            batch_op.drop_index("flowdetails_uuid_idx")
+            batch_op.create_index(
+                index_name="flowdetails_uuid_idx",
+                columns=['uuid'],
+                unique=True)
+
+        with op.batch_alter_table("atomdetails") as batch_op:
+            batch_op.drop_index("taskdetails_uuid_idx")
+            batch_op.create_index(
+                index_name="taskdetails_uuid_idx",
+                columns=['uuid'],
+                unique=True)
+
+
+def downgrade():
+    bind = op.get_bind()
+    engine = bind.engine
+    if engine.name == 'mysql':
+        with op.batch_alter_table("logbooks") as batch_op:
+            batch_op.drop_index("logbook_uuid_idx")
+            batch_op.create_index(
+                index_name="logbook_uuid_idx",
+                columns=['uuid'])
+
+        with op.batch_alter_table("flowdetails") as batch_op:
+            batch_op.drop_index("flowdetails_uuid_idx")
+            batch_op.create_index(
+                index_name="flowdetails_uuid_idx",
+                columns=['uuid'])
+
+        with op.batch_alter_table("atomdetails") as batch_op:
+            batch_op.drop_index("taskdetails_uuid_idx")
+            batch_op.create_index(
+                index_name="taskdetails_uuid_idx",
+                columns=['uuid'])
diff -pruN 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/14b227d79a87_add_intention_column.py 6.0.2-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/14b227d79a87_add_intention_column.py
--- 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/14b227d79a87_add_intention_column.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/14b227d79a87_add_intention_column.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/1c783c0c2875_replace_exception_an.py 6.0.2-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/1c783c0c2875_replace_exception_an.py
--- 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/1c783c0c2875_replace_exception_an.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/1c783c0c2875_replace_exception_an.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/1cea328f0f65_initial_logbook_deta.py 6.0.2-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/1cea328f0f65_initial_logbook_deta.py
--- 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/1cea328f0f65_initial_logbook_deta.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/1cea328f0f65_initial_logbook_deta.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/2ad4984f2864_switch_postgres_to_json_native.py 6.0.2-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/2ad4984f2864_switch_postgres_to_json_native.py
--- 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/2ad4984f2864_switch_postgres_to_json_native.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/2ad4984f2864_switch_postgres_to_json_native.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/3162c0f3f8e4_add_revert_results_and_revert_failure_.py 6.0.2-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/3162c0f3f8e4_add_revert_results_and_revert_failure_.py
--- 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/3162c0f3f8e4_add_revert_results_and_revert_failure_.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/3162c0f3f8e4_add_revert_results_and_revert_failure_.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/589dccdf2b6e_rename_taskdetails_to_atomdetails.py 6.0.2-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/589dccdf2b6e_rename_taskdetails_to_atomdetails.py
--- 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/589dccdf2b6e_rename_taskdetails_to_atomdetails.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/589dccdf2b6e_rename_taskdetails_to_atomdetails.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/84d6e888850_add_task_detail_type.py 6.0.2-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/84d6e888850_add_task_detail_type.py
--- 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/84d6e888850_add_task_detail_type.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/84d6e888850_add_task_detail_type.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/persistence/backends/sqlalchemy/tables.py 6.0.2-2/taskflow/persistence/backends/sqlalchemy/tables.py
--- 5.12.0-2/taskflow/persistence/backends/sqlalchemy/tables.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/persistence/backends/sqlalchemy/tables.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/persistence/base.py 6.0.2-2/taskflow/persistence/base.py
--- 5.12.0-2/taskflow/persistence/base.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/persistence/base.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Rackspace Hosting Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -19,7 +17,7 @@ import abc
 from taskflow.persistence import models
 
 
-class Backend(object, metaclass=abc.ABCMeta):
+class Backend(metaclass=abc.ABCMeta):
     """Base class for persistence backends."""
 
     def __init__(self, conf):
@@ -39,7 +37,7 @@ class Backend(object, metaclass=abc.ABCM
         """Closes any resources this backend has open."""
 
 
-class Connection(object, metaclass=abc.ABCMeta):
+class Connection(metaclass=abc.ABCMeta):
     """Base class for backend connections."""
 
     @property
diff -pruN 5.12.0-2/taskflow/persistence/models.py 6.0.2-2/taskflow/persistence/models.py
--- 5.12.0-2/taskflow/persistence/models.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/persistence/models.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #    Copyright (C) 2013 Rackspace Hosting All Rights Reserved.
 #
@@ -43,7 +41,7 @@ def _format_meta(metadata, indent):
         # in percent format.
         if k == 'progress' and isinstance(v, misc.NUMERIC_TYPES):
             v = "%0.2f%%" % (v * 100.0)
-        lines.append("%s+ %s = %s" % (" " * (indent + 2), k, v))
+        lines.append("{}+ {} = {}".format(" " * (indent + 2), k, v))
     return lines
 
 
@@ -55,8 +53,8 @@ def _format_shared(obj, indent):
     for attr_name in ("uuid", "state"):
         if not hasattr(obj, attr_name):
             continue
-        lines.append("%s- %s = %s" % (" " * indent, attr_name,
-                                      getattr(obj, attr_name)))
+        lines.append("{}- {} = {}".format(" " * indent, attr_name,
+                                          getattr(obj, attr_name)))
     return lines
 
 
@@ -98,7 +96,7 @@ def _fix_meta(data):
     return meta
 
 
-class LogBook(object):
+class LogBook:
     """A collection of flow details and associated metadata.
 
     Typically this class contains a collection of flow detail entries
@@ -143,7 +141,7 @@ class LogBook(object):
          - created_at = ...
         """
         cls_name = self.__class__.__name__
-        lines = ["%s%s: '%s'" % (" " * indent, cls_name, self.name)]
+        lines = ["{}{}: '{}'".format(" " * indent, cls_name, self.name)]
         lines.extend(_format_shared(self, indent=indent + 1))
         lines.extend(_format_meta(self.meta, indent=indent + 1))
         if self.created_at is not None:
@@ -258,8 +256,7 @@ class LogBook(object):
         return self._name
 
     def __iter__(self):
-        for fd in self._flowdetails_by_id.values():
-            yield fd
+        yield from self._flowdetails_by_id.values()
 
     def __len__(self):
         return len(self._flowdetails_by_id)
@@ -288,7 +285,7 @@ class LogBook(object):
         return clone
 
 
-class FlowDetail(object):
+class FlowDetail:
     """A collection of atom details and associated metadata.
 
     Typically this class contains a collection of atom detail entries that
@@ -345,7 +342,7 @@ class FlowDetail(object):
          - state = ...
         """
         cls_name = self.__class__.__name__
-        lines = ["%s%s: '%s'" % (" " * indent, cls_name, self.name)]
+        lines = ["{}{}: '{}'".format(" " * indent, cls_name, self.name)]
         lines.extend(_format_shared(self, indent=indent + 1))
         lines.extend(_format_meta(self.meta, indent=indent + 1))
         for atom_detail in self:
@@ -463,14 +460,13 @@ class FlowDetail(object):
         return self._name
 
     def __iter__(self):
-        for ad in self._atomdetails_by_id.values():
-            yield ad
+        yield from self._atomdetails_by_id.values()
 
     def __len__(self):
         return len(self._atomdetails_by_id)
 
 
-class AtomDetail(object, metaclass=abc.ABCMeta):
+class AtomDetail(metaclass=abc.ABCMeta):
     """A collection of atom specific runtime information and metadata.
 
     This is a base **abstract** class that contains attributes that are used
@@ -688,14 +684,14 @@ class AtomDetail(object, metaclass=abc.A
     def pformat(self, indent=0, linesep=os.linesep):
         """Pretty formats this atom detail into a string."""
         cls_name = self.__class__.__name__
-        lines = ["%s%s: '%s'" % (" " * (indent), cls_name, self.name)]
+        lines = ["{}{}: '{}'".format(" " * (indent), cls_name, self.name)]
         lines.extend(_format_shared(self, indent=indent + 1))
         lines.append("%s- version = %s"
                      % (" " * (indent + 1), misc.get_version_string(self)))
         lines.append("%s- results = %s"
                      % (" " * (indent + 1), self.results))
-        lines.append("%s- failure = %s" % (" " * (indent + 1),
-                                           bool(self.failure)))
+        lines.append("{}- failure = {}".format(" " * (indent + 1),
+                                               bool(self.failure)))
         lines.extend(_format_meta(self.meta, indent=indent + 1))
         return linesep.join(lines)
 
@@ -793,7 +789,7 @@ class TaskDetail(AtomDetail):
                                           " task details")
         if other is self:
             return self
-        super(TaskDetail, self).merge(other, deep_copy=deep_copy)
+        super().merge(other, deep_copy=deep_copy)
         self.results = other.results
         self.revert_results = other.revert_results
         return self
@@ -834,7 +830,7 @@ class RetryDetail(AtomDetail):
     """
 
     def __init__(self, name, uuid):
-        super(RetryDetail, self).__init__(name, uuid)
+        super().__init__(name, uuid)
         self.results = []
 
     def reset(self, state):
@@ -983,7 +979,7 @@ class RetryDetail(AtomDetail):
                 new_results.append((data, new_failures))
             return new_results
 
-        obj = super(RetryDetail, cls).from_dict(data)
+        obj = super().from_dict(data)
         obj.results = decode_results(obj.results)
         return obj
 
@@ -1001,7 +997,7 @@ class RetryDetail(AtomDetail):
                 new_results.append((data, new_failures))
             return new_results
 
-        base = super(RetryDetail, self).to_dict()
+        base = super().to_dict()
         base['results'] = encode_results(base.get('results'))
         return base
 
@@ -1033,7 +1029,7 @@ class RetryDetail(AtomDetail):
                                           " retry details")
         if other is self:
             return self
-        super(RetryDetail, self).merge(other, deep_copy=deep_copy)
+        super().merge(other, deep_copy=deep_copy)
         results = []
         # NOTE(imelnikov): we can't just deep copy Failures, as they
         # contain tracebacks, which are not copyable.
@@ -1053,8 +1049,7 @@ _DETAIL_TO_NAME = {
     RetryDetail: 'RETRY_DETAIL',
     TaskDetail: 'TASK_DETAIL',
 }
-_NAME_TO_DETAIL = dict((name, cls)
-                       for (cls, name) in _DETAIL_TO_NAME.items())
+_NAME_TO_DETAIL = {name: cls for (cls, name) in _DETAIL_TO_NAME.items()}
 ATOM_TYPES = list(_NAME_TO_DETAIL.keys())
 
 
diff -pruN 5.12.0-2/taskflow/persistence/path_based.py 6.0.2-2/taskflow/persistence/path_based.py
--- 5.12.0-2/taskflow/persistence/path_based.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/persistence/path_based.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Rackspace Hosting All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -36,7 +34,7 @@ class PathBasedBackend(base.Backend, met
     DEFAULT_PATH = None
 
     def __init__(self, conf):
-        super(PathBasedBackend, self).__init__(conf)
+        super().__init__(conf)
         self._path = self._conf.get('path', None)
         if not self._path:
             self._path = self.DEFAULT_PATH
diff -pruN 5.12.0-2/taskflow/retry.py 6.0.2-2/taskflow/retry.py
--- 5.12.0-2/taskflow/retry.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/retry.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Rackspace Hosting Inc. All Rights Reserved.
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
@@ -71,7 +69,7 @@ EXECUTE_REVERT_HISTORY = 'history'
 REVERT_FLOW_FAILURES = 'flow_failures'
 
 
-class History(object):
+class History:
     """Helper that simplifies interactions with retry historical contents."""
 
     def __init__(self, contents, failure=None):
@@ -99,8 +97,7 @@ class History(object):
                 self._contents[index],
             ]
         for (provided, outcomes) in contents:
-            for (owner, outcome) in outcomes.items():
-                yield (owner, outcome)
+            yield from outcomes.items()
 
     def __len__(self):
         return len(self._contents)
@@ -154,10 +151,10 @@ class Retry(atom.Atom, metaclass=abc.ABC
 
     def __init__(self, name=None, provides=None, requires=None,
                  auto_extract=True, rebind=None):
-        super(Retry, self).__init__(name=name, provides=provides,
-                                    requires=requires, rebind=rebind,
-                                    auto_extract=auto_extract,
-                                    ignore_list=[EXECUTE_REVERT_HISTORY])
+        super().__init__(name=name, provides=provides,
+                         requires=requires, rebind=rebind,
+                         auto_extract=auto_extract,
+                         ignore_list=[EXECUTE_REVERT_HISTORY])
 
     @property
     def name(self):
@@ -262,8 +259,7 @@ class Times(Retry):
 
     def __init__(self, attempts=1, name=None, provides=None, requires=None,
                  auto_extract=True, rebind=None, revert_all=False):
-        super(Times, self).__init__(name, provides, requires,
-                                    auto_extract, rebind)
+        super().__init__(name, provides, requires, auto_extract, rebind)
         self._attempts = attempts
 
         if revert_all:
@@ -285,8 +281,7 @@ class ForEachBase(Retry):
 
     def __init__(self, name=None, provides=None, requires=None,
                  auto_extract=True, rebind=None, revert_all=False):
-        super(ForEachBase, self).__init__(name, provides, requires,
-                                          auto_extract, rebind)
+        super().__init__(name, provides, requires, auto_extract, rebind)
 
         if revert_all:
             self._revert_action = REVERT_ALL
@@ -336,8 +331,8 @@ class ForEach(ForEachBase):
 
     def __init__(self, values, name=None, provides=None, requires=None,
                  auto_extract=True, rebind=None, revert_all=False):
-        super(ForEach, self).__init__(name, provides, requires,
-                                      auto_extract, rebind, revert_all)
+        super().__init__(name, provides, requires, auto_extract, rebind,
+                         revert_all)
         self._values = values
 
     def on_failure(self, history, *args, **kwargs):
@@ -368,9 +363,8 @@ class ParameterizedForEach(ForEachBase):
 
     def __init__(self, name=None, provides=None, requires=None,
                  auto_extract=True, rebind=None, revert_all=False):
-        super(ParameterizedForEach, self).__init__(name, provides, requires,
-                                                   auto_extract, rebind,
-                                                   revert_all)
+        super().__init__(name, provides, requires, auto_extract, rebind,
+                         revert_all)
 
     def on_failure(self, values, history, *args, **kwargs):
         return self._on_failure(values, history)
diff -pruN 5.12.0-2/taskflow/states.py 6.0.2-2/taskflow/states.py
--- 5.12.0-2/taskflow/states.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/states.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/storage.py 6.0.2-2/taskflow/storage.py
--- 5.12.0-2/taskflow/storage.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/storage.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -87,7 +85,7 @@ META_PROGRESS = 'progress'
 META_PROGRESS_DETAILS = 'progress_details'
 
 
-class _ProviderLocator(object):
+class _ProviderLocator:
     """Helper to start to better decouple the finding logic from storage.
 
     WIP: part of the larger effort to cleanup/refactor the finding of named
@@ -149,7 +147,7 @@ class _ProviderLocator(object):
                 return (searched_providers, providers_and_results)
         if not atom_providers:
             return (searched_providers, providers_and_results)
-        atom_providers_by_name = dict((p.name, p) for p in atom_providers)
+        atom_providers_by_name = {p.name: p for p in atom_providers}
         for accessible_atom_names in iter(scope_walker):
             # *Always* retain the scope ordering (if any matches
             # happen); instead of retaining the possible provider match
@@ -199,7 +197,7 @@ class _ProviderLocator(object):
         _searched_providers, providers_and_results = self._find(
             looking_for, scope_walker=scope_walker,
             short_circuit=False, find_potentials=True)
-        return set(p for (p, _provider_results) in providers_and_results)
+        return {p for (p, _provider_results) in providers_and_results}
 
     def find(self, looking_for, scope_walker=None, short_circuit=True):
         """Returns the accessible providers."""
@@ -208,7 +206,7 @@ class _ProviderLocator(object):
                           find_potentials=False)
 
 
-class _Provider(object):
+class _Provider:
     """A named symbol provider that produces a output at the given index."""
 
     def __init__(self, name, index):
@@ -270,7 +268,7 @@ def _item_from_first_of(providers, looki
         " extraction" % (looking_for, providers))
 
 
-class Storage(object):
+class Storage:
     """Interface between engines and logbook and its backend (if any).
 
     This class provides a simple interface to save atoms of a given flow and
@@ -326,8 +324,8 @@ class Storage(object):
                 fail_cache[states.REVERT] = ad.revert_failure
             self._failures[ad.name] = fail_cache
 
-        self._atom_name_to_uuid = dict((ad.name, ad.uuid)
-                                       for ad in self._flowdetail)
+        self._atom_name_to_uuid = {ad.name: ad.uuid
+                                   for ad in self._flowdetail}
         try:
             source, _clone = self._atomdetail_by_name(
                 self.injector_name, expected_type=models.TaskDetail)
@@ -336,7 +334,7 @@ class Storage(object):
         else:
             names_iter = source.results.keys()
             self._set_result_mapping(source.name,
-                                     dict((name, name) for name in names_iter))
+                                     {name: name for name in names_iter})
 
     def _with_connection(self, functor, *args, **kwargs):
         # Run the given functor with a backend connection as its first
@@ -911,7 +909,7 @@ class Storage(object):
             provider_name, names = save_persistent()
 
         self._set_result_mapping(provider_name,
-                                 dict((name, name) for name in names))
+                                 {name: name for name in names})
 
     def _fetch_providers(self, looking_for, providers=None):
         """Return pair of (default providers, atom providers)."""
diff -pruN 5.12.0-2/taskflow/task.py 6.0.2-2/taskflow/task.py
--- 5.12.0-2/taskflow/task.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/task.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright 2015 Hewlett-Packard Development Company, L.P.
 #    Copyright (C) 2013 Rackspace Hosting Inc. All Rights Reserved.
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
@@ -61,10 +59,10 @@ class Task(atom.Atom, metaclass=abc.ABCM
                  ignore_list=None, revert_rebind=None, revert_requires=None):
         if name is None:
             name = reflection.get_class_name(self)
-        super(Task, self).__init__(name, provides=provides, requires=requires,
-                                   auto_extract=auto_extract, rebind=rebind,
-                                   inject=inject, revert_rebind=revert_rebind,
-                                   revert_requires=revert_requires)
+        super().__init__(name, provides=provides, requires=requires,
+                         auto_extract=auto_extract, rebind=rebind,
+                         inject=inject, revert_rebind=revert_rebind,
+                         revert_requires=revert_requires)
         self._notifier = notifier.RestrictedNotifier(self.TASK_EVENTS)
 
     @property
@@ -131,8 +129,7 @@ class FunctorTask(Task):
                                  " be callable")
         if name is None:
             name = reflection.get_callable_name(execute)
-        super(FunctorTask, self).__init__(name, provides=provides,
-                                          inject=inject)
+        super().__init__(name, provides=provides, inject=inject)
         self._execute = execute
         self._revert = revert
         if version is not None:
@@ -190,12 +187,12 @@ class ReduceFunctorTask(Task):
 
         if name is None:
             name = reflection.get_callable_name(functor)
-        super(ReduceFunctorTask, self).__init__(name=name,
-                                                provides=provides,
-                                                inject=inject,
-                                                requires=requires,
-                                                rebind=rebind,
-                                                auto_extract=auto_extract)
+        super().__init__(name=name,
+                         provides=provides,
+                         inject=inject,
+                         requires=requires,
+                         rebind=rebind,
+                         auto_extract=auto_extract)
 
         self._functor = functor
 
@@ -235,10 +232,10 @@ class MapFunctorTask(Task):
 
         if name is None:
             name = reflection.get_callable_name(functor)
-        super(MapFunctorTask, self).__init__(name=name, provides=provides,
-                                             inject=inject, requires=requires,
-                                             rebind=rebind,
-                                             auto_extract=auto_extract)
+        super().__init__(name=name, provides=provides,
+                         inject=inject, requires=requires,
+                         rebind=rebind,
+                         auto_extract=auto_extract)
 
         self._functor = functor
 
diff -pruN 5.12.0-2/taskflow/test.py 6.0.2-2/taskflow/test.py
--- 5.12.0-2/taskflow/test.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/test.py	2025-08-14 03:01:40.000000000 +0000
@@ -27,7 +27,7 @@ from taskflow.tests import utils
 from taskflow.utils import misc
 
 
-class GreaterThanEqual(object):
+class GreaterThanEqual:
     """Matches if the item is geq than the matchers reference object."""
 
     def __init__(self, source):
@@ -36,10 +36,10 @@ class GreaterThanEqual(object):
     def match(self, other):
         if other >= self.source:
             return None
-        return matchers.Mismatch("%s was not >= %s" % (other, self.source))
+        return matchers.Mismatch("{} was not >= {}".format(other, self.source))
 
 
-class FailureRegexpMatcher(object):
+class FailureRegexpMatcher:
     """Matches if the failure was caused by the given exception and message.
 
     This will match if a given failure contains and exception of the given
@@ -60,7 +60,7 @@ class FailureRegexpMatcher(object):
                                  (failure, self.exc_class))
 
 
-class ItemsEqual(object):
+class ItemsEqual:
     """Matches the items in two sequences.
 
     This matcher will validate that the provided sequence has the same elements
@@ -166,7 +166,7 @@ class TestCase(base.BaseTestCase):
 class MockTestCase(TestCase):
 
     def setUp(self):
-        super(MockTestCase, self).setUp()
+        super().setUp()
         self.master_mock = mock.Mock(name='master_mock')
 
     def patch(self, target, autospec=True, **kwargs):
diff -pruN 5.12.0-2/taskflow/tests/test_examples.py 6.0.2-2/taskflow/tests/test_examples.py
--- 5.12.0-2/taskflow/tests/test_examples.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/test_examples.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/action_engine/test_builder.py 6.0.2-2/taskflow/tests/unit/action_engine/test_builder.py
--- 5.12.0-2/taskflow/tests/unit/action_engine/test_builder.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/action_engine/test_builder.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -298,7 +296,7 @@ class BuildersTest(test.TestCase):
             flow, initial_state=st.RUNNING)
         transitions = list(machine_runner.run_iter(builder.START))
 
-        occurrences = dict((t, transitions.count(t)) for t in transitions)
+        occurrences = {t: transitions.count(t) for t in transitions}
         self.assertEqual(10, occurrences.get((st.SCHEDULING, st.WAITING)))
         self.assertEqual(10, occurrences.get((st.WAITING, st.ANALYZING)))
         self.assertEqual(9, occurrences.get((st.ANALYZING, st.SCHEDULING)))
diff -pruN 5.12.0-2/taskflow/tests/unit/action_engine/test_compile.py 6.0.2-2/taskflow/tests/unit/action_engine/test_compile.py
--- 5.12.0-2/taskflow/tests/unit/action_engine/test_compile.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/action_engine/test_compile.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -110,7 +108,7 @@ class PatternCompileTest(test.TestCase):
             ('c', 'test[$]'),
             ('d', 'test[$]'),
         ])
-        self.assertEqual(set(['test']), set(g.no_predecessors_iter()))
+        self.assertEqual({'test'}, set(g.no_predecessors_iter()))
 
     def test_linear_nested(self):
         a, b, c, d = test_utils.make_many(4)
@@ -283,7 +281,7 @@ class PatternCompileTest(test.TestCase):
         self.assertEqual(4, len(g))
         self.assertCountEqual(g.edges(data=True), [
             ('test', 'a', {'invariant': True}),
-            ('a', 'b', {'reasons': set(['x'])}),
+            ('a', 'b', {'reasons': {'x'}}),
             ('b', 'test[$]', {'invariant': True}),
         ])
         self.assertCountEqual(['test'], g.no_predecessors_iter())
@@ -302,7 +300,7 @@ class PatternCompileTest(test.TestCase):
         self.assertCountEqual(g.edges(data=True), [
             ('test', 'a', {'invariant': True}),
             ('test2', 'b', {'invariant': True}),
-            ('a', 'test2', {'reasons': set(['x'])}),
+            ('a', 'test2', {'reasons': {'x'}}),
             ('b', 'c', {'invariant': True}),
             ('c', 'test2[$]', {'invariant': True}),
             ('test2[$]', 'test[$]', {'invariant': True}),
@@ -325,7 +323,7 @@ class PatternCompileTest(test.TestCase):
             ('a', 'test[$]', {'invariant': True}),
 
             # The 'x' requirement is produced out of test2...
-            ('test2[$]', 'a', {'reasons': set(['x'])}),
+            ('test2[$]', 'a', {'reasons': {'x'}}),
 
             ('test2', 'b', {'invariant': True}),
             ('b', 'c', {'invariant': True}),
diff -pruN 5.12.0-2/taskflow/tests/unit/action_engine/test_creation.py 6.0.2-2/taskflow/tests/unit/action_engine/test_creation.py
--- 5.12.0-2/taskflow/tests/unit/action_engine/test_creation.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/action_engine/test_creation.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -26,11 +24,6 @@ from taskflow.tests import utils
 from taskflow.utils import eventlet_utils as eu
 from taskflow.utils import persistence_utils as pu
 
-try:
-    from taskflow.engines.action_engine import process_executor as pe
-except ImportError:
-    pe = None
-
 
 class ParallelCreationTest(test.TestCase):
     @staticmethod
@@ -48,26 +41,12 @@ class ParallelCreationTest(test.TestCase
             self.assertIsInstance(eng._task_executor,
                                   executor.ParallelThreadTaskExecutor)
 
-    @testtools.skipIf(pe is None, 'process_executor is not available')
-    def test_process_string_creation(self):
-        for s in ['process', 'processes']:
-            eng = self._create_engine(executor=s)
-            self.assertIsInstance(eng._task_executor,
-                                  pe.ParallelProcessTaskExecutor)
-
     def test_thread_executor_creation(self):
         with futurist.ThreadPoolExecutor(1) as e:
             eng = self._create_engine(executor=e)
             self.assertIsInstance(eng._task_executor,
                                   executor.ParallelThreadTaskExecutor)
 
-    @testtools.skipIf(pe is None, 'process_executor is not available')
-    def test_process_executor_creation(self):
-        with futurist.ProcessPoolExecutor(1) as e:
-            eng = self._create_engine(executor=e)
-            self.assertIsInstance(eng._task_executor,
-                                  pe.ParallelProcessTaskExecutor)
-
     @testtools.skipIf(not eu.EVENTLET_AVAILABLE, 'eventlet is not available')
     def test_green_executor_creation(self):
         with futurist.GreenThreadPoolExecutor(1) as e:
diff -pruN 5.12.0-2/taskflow/tests/unit/action_engine/test_process_executor.py 6.0.2-2/taskflow/tests/unit/action_engine/test_process_executor.py
--- 5.12.0-2/taskflow/tests/unit/action_engine/test_process_executor.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/action_engine/test_process_executor.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,106 +0,0 @@
-# -*- coding: utf-8 -*-
-
-#    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-import errno
-import socket
-import threading
-
-import testtools
-
-from taskflow import task
-from taskflow import test
-from taskflow.test import mock
-from taskflow.tests import utils as test_utils
-
-try:
-    import asyncore
-    from taskflow.engines.action_engine import process_executor as pe
-except ImportError:
-    asyncore = None
-    pe = None
-
-
-@testtools.skipIf(asyncore is None, 'process_executor is not available')
-class ProcessExecutorHelpersTest(test.TestCase):
-    def test_reader(self):
-        capture_buf = []
-
-        def do_capture(identity, message_capture_func):
-            capture_buf.append(message_capture_func())
-
-        r = pe.Reader(b"secret", do_capture)
-        for data in pe._encode_message(b"secret", ['hi'], b'me'):
-            self.assertEqual(len(data), r.bytes_needed)
-            r.feed(data)
-
-        self.assertEqual(1, len(capture_buf))
-        self.assertEqual(['hi'], capture_buf[0])
-
-    def test_bad_hmac_reader(self):
-        r = pe.Reader(b"secret-2", lambda ident, capture_func: capture_func())
-        in_data = b"".join(pe._encode_message(b"secret", ['hi'], b'me'))
-        self.assertRaises(pe.BadHmacValueError, r.feed, in_data)
-
-    @mock.patch("socket.socket")
-    def test_no_connect_channel(self, mock_socket_factory):
-        mock_sock = mock.MagicMock()
-        mock_socket_factory.return_value = mock_sock
-        mock_sock.connect.side_effect = socket.error(errno.ECONNREFUSED,
-                                                     'broken')
-        c = pe.Channel(2222, b"me", b"secret")
-        self.assertRaises(socket.error, c.send, "hi")
-        self.assertTrue(c.dead)
-        self.assertTrue(mock_sock.close.called)
-
-    def test_send_and_dispatch(self):
-        details_capture = []
-
-        t = test_utils.DummyTask("rcver")
-        t.notifier.register(
-            task.EVENT_UPDATE_PROGRESS,
-            lambda _event_type, details: details_capture.append(details))
-
-        d = pe.Dispatcher({}, b'secret', b'server-josh')
-        d.setup()
-        d.targets[b'child-josh'] = t
-
-        s = threading.Thread(target=asyncore.loop, kwargs={'map': d.map})
-        s.start()
-        self.addCleanup(s.join)
-
-        c = pe.Channel(d.port, b'child-josh', b'secret')
-        self.addCleanup(c.close)
-
-        send_what = [
-            {'progress': 0.1},
-            {'progress': 0.2},
-            {'progress': 0.3},
-            {'progress': 0.4},
-            {'progress': 0.5},
-            {'progress': 0.6},
-            {'progress': 0.7},
-            {'progress': 0.8},
-            {'progress': 0.9},
-        ]
-        e_s = pe.EventSender(c)
-        for details in send_what:
-            e_s(task.EVENT_UPDATE_PROGRESS, details)
-
-        # This forces the thread to shutdown (since the asyncore loop
-        # will exit when no more sockets exist to process...)
-        d.close()
-
-        self.assertEqual(len(send_what), len(details_capture))
-        self.assertEqual(send_what, details_capture)
diff -pruN 5.12.0-2/taskflow/tests/unit/action_engine/test_scoping.py 6.0.2-2/taskflow/tests/unit/action_engine/test_scoping.py
--- 5.12.0-2/taskflow/tests/unit/action_engine/test_scoping.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/action_engine/test_scoping.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -272,7 +270,7 @@ class MixedPatternScopingTest(test.TestC
         # This may be different after/if the following is resolved:
         #
         # https://github.com/networkx/networkx/issues/1181 (and a few others)
-        self.assertEqual(set(['customer', 'customer2']),
+        self.assertEqual({'customer', 'customer2'},
                          set(_get_scopes(c, washer)[0]))
         self.assertEqual([], _get_scopes(c, customer2))
         self.assertEqual([], _get_scopes(c, customer))
diff -pruN 5.12.0-2/taskflow/tests/unit/jobs/base.py 6.0.2-2/taskflow/tests/unit/jobs/base.py
--- 5.12.0-2/taskflow/tests/unit/jobs/base.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/jobs/base.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -37,7 +35,7 @@ def connect_close(*args):
             a.close()
 
 
-class BoardTestMixin(object):
+class BoardTestMixin:
 
     @contextlib.contextmanager
     def flush(self, client):
diff -pruN 5.12.0-2/taskflow/tests/unit/jobs/test_entrypoint.py 6.0.2-2/taskflow/tests/unit/jobs/test_entrypoint.py
--- 5.12.0-2/taskflow/tests/unit/jobs/test_entrypoint.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/jobs/test_entrypoint.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/jobs/test_redis_job.py 6.0.2-2/taskflow/tests/unit/jobs/test_redis_job.py
--- 5.12.0-2/taskflow/tests/unit/jobs/test_redis_job.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/jobs/test_redis_job.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -101,7 +99,7 @@ class RedisJobboardTest(test.TestCase, b
             self.assertEqual(0, len(possible_jobs))
 
     def setUp(self):
-        super(RedisJobboardTest, self).setUp()
+        super().setUp()
         self.client, self.board = self.create_board()
 
     def test__make_client(self):
diff -pruN 5.12.0-2/taskflow/tests/unit/jobs/test_zk_job.py 6.0.2-2/taskflow/tests/unit/jobs/test_zk_job.py
--- 5.12.0-2/taskflow/tests/unit/jobs/test_zk_job.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/jobs/test_zk_job.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -137,7 +135,7 @@ class ZookeeperJobboardTest(test.TestCas
         return (client, board)
 
     def setUp(self):
-        super(ZookeeperJobboardTest, self).setUp()
+        super().setUp()
         self.client, self.board = self.create_board()
 
 
@@ -152,7 +150,7 @@ class ZakeJobboardTest(test.TestCase, Zo
         return (client, board)
 
     def setUp(self):
-        super(ZakeJobboardTest, self).setUp()
+        super().setUp()
         self.client, self.board = self.create_board()
         self.bad_paths = [self.board.path, self.board.trash_path]
         self.bad_paths.extend(zake_utils.partition_path(self.board.path))
diff -pruN 5.12.0-2/taskflow/tests/unit/patterns/test_graph_flow.py 6.0.2-2/taskflow/tests/unit/patterns/test_graph_flow.py
--- 5.12.0-2/taskflow/tests/unit/patterns/test_graph_flow.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/patterns/test_graph_flow.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -76,8 +74,8 @@ class GraphFlowTest(test.TestCase):
         self.assertEqual(1, len(f))
         self.assertEqual([task], list(f))
         self.assertEqual([], list(f.iter_links()))
-        self.assertEqual(set(['a', 'b']), f.requires)
-        self.assertEqual(set(['c', 'd']), f.provides)
+        self.assertEqual({'a', 'b'}, f.requires)
+        self.assertEqual({'c', 'd'}, f.provides)
 
     def test_graph_flow_two_independent_tasks(self):
         task1 = _task(name='task1')
@@ -95,11 +93,11 @@ class GraphFlowTest(test.TestCase):
 
         self.assertEqual(2, len(f))
         self.assertCountEqual(f, [task1, task2])
-        self.assertEqual([(task1, task2, {'reasons': set(['a'])})],
+        self.assertEqual([(task1, task2, {'reasons': {'a'}})],
                          list(f.iter_links()))
 
         self.assertEqual(set(), f.requires)
-        self.assertEqual(set(['a']), f.provides)
+        self.assertEqual({'a'}, f.provides)
 
     def test_graph_flow_two_dependent_tasks_two_different_calls(self):
         task1 = _task(name='task1', provides=['a'])
@@ -108,7 +106,7 @@ class GraphFlowTest(test.TestCase):
 
         self.assertEqual(2, len(f))
         self.assertCountEqual(f, [task1, task2])
-        self.assertEqual([(task1, task2, {'reasons': set(['a'])})],
+        self.assertEqual([(task1, task2, {'reasons': {'a'}})],
                          list(f.iter_links()))
 
     def test_graph_flow_two_task_same_provide(self):
@@ -116,14 +114,14 @@ class GraphFlowTest(test.TestCase):
         task2 = _task(name='task2', provides=['a', 'c'])
         f = gf.Flow('test')
         f.add(task2, task1)
-        self.assertEqual(set(['a', 'b', 'c']), f.provides)
+        self.assertEqual({'a', 'b', 'c'}, f.provides)
 
     def test_graph_flow_ambiguous_provides(self):
         task1 = _task(name='task1', provides=['a', 'b'])
         task2 = _task(name='task2', provides=['a'])
         f = gf.Flow('test')
         f.add(task1, task2)
-        self.assertEqual(set(['a', 'b']), f.provides)
+        self.assertEqual({'a', 'b'}, f.provides)
         task3 = _task(name='task3', requires=['a'])
         self.assertRaises(exc.AmbiguousDependency, f.add, task3)
 
@@ -132,7 +130,7 @@ class GraphFlowTest(test.TestCase):
         task2 = _task(name='task2', requires=['a', 'b'])
         f = gf.Flow('test')
         f.add(task1, task2, resolve_requires=False)
-        self.assertEqual(set(['a', 'b']), f.requires)
+        self.assertEqual({'a', 'b'}, f.requires)
 
     def test_graph_flow_no_resolve_existing(self):
         task1 = _task(name='task1', requires=['a', 'b'])
@@ -140,7 +138,7 @@ class GraphFlowTest(test.TestCase):
         f = gf.Flow('test')
         f.add(task1)
         f.add(task2, resolve_existing=False)
-        self.assertEqual(set(['a', 'b']), f.requires)
+        self.assertEqual({'a', 'b'}, f.requires)
 
     def test_graph_flow_resolve_existing(self):
         task1 = _task(name='task1', requires=['a', 'b'])
@@ -148,7 +146,7 @@ class GraphFlowTest(test.TestCase):
         f = gf.Flow('test')
         f.add(task1)
         f.add(task2, resolve_existing=True)
-        self.assertEqual(set([]), f.requires)
+        self.assertEqual(set(), f.requires)
 
     def test_graph_flow_with_retry(self):
         ret = retry.AlwaysRevert(requires=['a'], provides=['b'])
@@ -156,11 +154,11 @@ class GraphFlowTest(test.TestCase):
         self.assertIs(f.retry, ret)
         self.assertEqual('test_retry', ret.name)
 
-        self.assertEqual(set(['a']), f.requires)
-        self.assertEqual(set(['b']), f.provides)
+        self.assertEqual({'a'}, f.requires)
+        self.assertEqual({'b'}, f.provides)
 
     def test_graph_flow_ordering(self):
-        task1 = _task('task1', provides=set(['a', 'b']))
+        task1 = _task('task1', provides={'a', 'b'})
         task2 = _task('task2', provides=['c'], requires=['a', 'b'])
         task3 = _task('task3', provides=[], requires=['c'])
         f = gf.Flow('test').add(task1, task2, task3)
@@ -168,8 +166,8 @@ class GraphFlowTest(test.TestCase):
         self.assertEqual(3, len(f))
 
         self.assertCountEqual(list(f.iter_links()), [
-            (task1, task2, {'reasons': set(['a', 'b'])}),
-            (task2, task3, {'reasons': set(['c'])})
+            (task1, task2, {'reasons': {'a', 'b'}}),
+            (task2, task3, {'reasons': {'c'}})
         ])
 
     def test_graph_flow_links(self):
@@ -190,7 +188,7 @@ class GraphFlowTest(test.TestCase):
         self.assertIs(linked, f)
         expected_meta = {
             'manual': True,
-            'reasons': set(['a'])
+            'reasons': {'a'}
         }
         self.assertCountEqual(list(f.iter_links()), [
             (task1, task2, expected_meta)
@@ -236,7 +234,7 @@ class GraphFlowTest(test.TestCase):
         task3 = _task('task3', provides=['c'])
         f1 = gf.Flow('nested')
         f1.add(task3)
-        tasks = set([task1, task2, f1])
+        tasks = {task1, task2, f1}
         f = gf.Flow('test').add(task1, task2, f1)
         for (n, data) in f.iter_nodes():
             self.assertIn(n, tasks)
@@ -248,7 +246,7 @@ class GraphFlowTest(test.TestCase):
         task3 = _task('task3')
         f1 = gf.Flow('nested')
         f1.add(task3)
-        tasks = set([task1, task2, f1])
+        tasks = {task1, task2, f1}
         f = gf.Flow('test').add(task1, task2, f1)
         for (u, v, data) in f.iter_links():
             self.assertIn(u, tasks)
diff -pruN 5.12.0-2/taskflow/tests/unit/patterns/test_linear_flow.py 6.0.2-2/taskflow/tests/unit/patterns/test_linear_flow.py
--- 5.12.0-2/taskflow/tests/unit/patterns/test_linear_flow.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/patterns/test_linear_flow.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -65,8 +63,8 @@ class LinearFlowTest(test.TestCase):
         self.assertEqual(1, len(f))
         self.assertEqual([task], list(f))
         self.assertEqual([], list(f.iter_links()))
-        self.assertEqual(set(['a', 'b']), f.requires)
-        self.assertEqual(set(['c', 'd']), f.provides)
+        self.assertEqual({'a', 'b'}, f.requires)
+        self.assertEqual({'c', 'd'}, f.provides)
 
     def test_linear_flow_two_independent_tasks(self):
         task1 = _task(name='task1')
@@ -89,7 +87,7 @@ class LinearFlowTest(test.TestCase):
                          list(f.iter_links()))
 
         self.assertEqual(set(), f.requires)
-        self.assertEqual(set(['a']), f.provides)
+        self.assertEqual({'a'}, f.provides)
 
     def test_linear_flow_two_dependent_tasks_two_different_calls(self):
         task1 = _task(name='task1', provides=['a'])
@@ -120,15 +118,15 @@ class LinearFlowTest(test.TestCase):
         self.assertIs(f.retry, ret)
         self.assertEqual('test_retry', ret.name)
 
-        self.assertEqual(set(['a']), f.requires)
-        self.assertEqual(set(['b']), f.provides)
+        self.assertEqual({'a'}, f.requires)
+        self.assertEqual({'b'}, f.provides)
 
     def test_iter_nodes(self):
         task1 = _task(name='task1')
         task2 = _task(name='task2')
         task3 = _task(name='task3')
         f = lf.Flow('test').add(task1, task2, task3)
-        tasks = set([task1, task2, task3])
+        tasks = {task1, task2, task3}
         for (node, data) in f.iter_nodes():
             self.assertIn(node, tasks)
             self.assertDictEqual({}, data)
@@ -138,7 +136,7 @@ class LinearFlowTest(test.TestCase):
         task2 = _task(name='task2')
         task3 = _task(name='task3')
         f = lf.Flow('test').add(task1, task2, task3)
-        tasks = set([task1, task2, task3])
+        tasks = {task1, task2, task3}
         for (u, v, data) in f.iter_links():
             self.assertIn(u, tasks)
             self.assertIn(v, tasks)
diff -pruN 5.12.0-2/taskflow/tests/unit/patterns/test_unordered_flow.py 6.0.2-2/taskflow/tests/unit/patterns/test_unordered_flow.py
--- 5.12.0-2/taskflow/tests/unit/patterns/test_unordered_flow.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/patterns/test_unordered_flow.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -65,8 +63,8 @@ class UnorderedFlowTest(test.TestCase):
         self.assertEqual(1, len(f))
         self.assertEqual([task], list(f))
         self.assertEqual([], list(f.iter_links()))
-        self.assertEqual(set(['a', 'b']), f.requires)
-        self.assertEqual(set(['c', 'd']), f.provides)
+        self.assertEqual({'a', 'b'}, f.requires)
+        self.assertEqual({'c', 'd'}, f.provides)
 
     def test_unordered_flow_two_tasks(self):
         task1 = _task(name='task1')
@@ -74,7 +72,7 @@ class UnorderedFlowTest(test.TestCase):
         f = uf.Flow('test').add(task1, task2)
 
         self.assertEqual(2, len(f))
-        self.assertEqual(set([task1, task2]), set(f))
+        self.assertEqual({task1, task2}, set(f))
         self.assertEqual([], list(f.iter_links()))
 
     def test_unordered_flow_two_tasks_two_different_calls(self):
@@ -83,16 +81,16 @@ class UnorderedFlowTest(test.TestCase):
         f = uf.Flow('test').add(task1)
         f.add(task2)
         self.assertEqual(2, len(f))
-        self.assertEqual(set(['a']), f.requires)
-        self.assertEqual(set(['a']), f.provides)
+        self.assertEqual({'a'}, f.requires)
+        self.assertEqual({'a'}, f.provides)
 
     def test_unordered_flow_two_tasks_reverse_order(self):
         task1 = _task(name='task1', provides=['a'])
         task2 = _task(name='task2', requires=['a'])
         f = uf.Flow('test').add(task2).add(task1)
         self.assertEqual(2, len(f))
-        self.assertEqual(set(['a']), f.requires)
-        self.assertEqual(set(['a']), f.provides)
+        self.assertEqual({'a'}, f.requires)
+        self.assertEqual({'a'}, f.provides)
 
     def test_unordered_flow_two_task_same_provide(self):
         task1 = _task(name='task1', provides=['a', 'b'])
@@ -107,8 +105,8 @@ class UnorderedFlowTest(test.TestCase):
         self.assertIs(f.retry, ret)
         self.assertEqual('test_retry', ret.name)
 
-        self.assertEqual(set(['a']), f.requires)
-        self.assertEqual(set(['b']), f.provides)
+        self.assertEqual({'a'}, f.requires)
+        self.assertEqual({'b'}, f.provides)
 
     def test_unordered_flow_with_retry_fully_satisfies(self):
         ret = retry.AlwaysRevert(provides=['b', 'a'])
@@ -116,13 +114,13 @@ class UnorderedFlowTest(test.TestCase):
         f.add(_task(name='task1', requires=['a']))
         self.assertIs(f.retry, ret)
         self.assertEqual('test_retry', ret.name)
-        self.assertEqual(set([]), f.requires)
-        self.assertEqual(set(['b', 'a']), f.provides)
+        self.assertEqual(set(), f.requires)
+        self.assertEqual({'b', 'a'}, f.provides)
 
     def test_iter_nodes(self):
         task1 = _task(name='task1', provides=['a', 'b'])
         task2 = _task(name='task2', provides=['a', 'c'])
-        tasks = set([task1, task2])
+        tasks = {task1, task2}
         f = uf.Flow('test')
         f.add(task2, task1)
         for (node, data) in f.iter_nodes():
diff -pruN 5.12.0-2/taskflow/tests/unit/persistence/base.py 6.0.2-2/taskflow/tests/unit/persistence/base.py
--- 5.12.0-2/taskflow/tests/unit/persistence/base.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/persistence/base.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Rackspace Hosting All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -24,7 +22,7 @@ from taskflow import states
 from taskflow.types import failure
 
 
-class PersistenceTestMixin(object):
+class PersistenceTestMixin:
     def _get_connection(self):
         raise NotImplementedError('_get_connection() implementation required')
 
@@ -73,7 +71,7 @@ class PersistenceTestMixin(object):
         lb_ids = {}
         for i in range(0, 10):
             lb_id = uuidutils.generate_uuid()
-            lb_name = 'lb-%s-%s' % (i, lb_id)
+            lb_name = 'lb-{}-{}'.format(i, lb_id)
             lb = models.LogBook(name=lb_name, uuid=lb_id)
             lb_ids[lb_id] = True
 
diff -pruN 5.12.0-2/taskflow/tests/unit/persistence/test_dir_persistence.py 6.0.2-2/taskflow/tests/unit/persistence/test_dir_persistence.py
--- 5.12.0-2/taskflow/tests/unit/persistence/test_dir_persistence.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/persistence/test_dir_persistence.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Rackspace Hosting All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -45,7 +43,7 @@ class DirPersistenceTest(testscenarios.T
         return self.backend.get_connection()
 
     def setUp(self):
-        super(DirPersistenceTest, self).setUp()
+        super().setUp()
         self.path = tempfile.mkdtemp()
         self.backend = impl_dir.DirBackend({
             'path': self.path,
@@ -55,7 +53,7 @@ class DirPersistenceTest(testscenarios.T
             conn.upgrade()
 
     def tearDown(self):
-        super(DirPersistenceTest, self).tearDown()
+        super().tearDown()
         if self.path and os.path.isdir(self.path):
             shutil.rmtree(self.path)
         self.path = None
diff -pruN 5.12.0-2/taskflow/tests/unit/persistence/test_memory_persistence.py 6.0.2-2/taskflow/tests/unit/persistence/test_memory_persistence.py
--- 5.12.0-2/taskflow/tests/unit/persistence/test_memory_persistence.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/persistence/test_memory_persistence.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Rackspace Hosting All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -25,7 +23,7 @@ from taskflow.tests.unit.persistence imp
 
 class MemoryPersistenceTest(test.TestCase, base.PersistenceTestMixin):
     def setUp(self):
-        super(MemoryPersistenceTest, self).setUp()
+        super().setUp()
         self._backend = impl_memory.MemoryBackend({})
 
     def _get_connection(self):
@@ -35,7 +33,7 @@ class MemoryPersistenceTest(test.TestCas
         conn = self._get_connection()
         conn.clear_all()
         self._backend = None
-        super(MemoryPersistenceTest, self).tearDown()
+        super().tearDown()
 
     def test_memory_backend_entry_point(self):
         conf = {'connection': 'memory:'}
diff -pruN 5.12.0-2/taskflow/tests/unit/persistence/test_sql_persistence.py 6.0.2-2/taskflow/tests/unit/persistence/test_sql_persistence.py
--- 5.12.0-2/taskflow/tests/unit/persistence/test_sql_persistence.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/persistence/test_sql_persistence.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Rackspace Hosting All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -61,7 +59,7 @@ def _get_connect_string(backend, user, p
         raise Exception("Unrecognized backend: '%s'" % backend)
     if not database:
         database = ''
-    return "%s://%s:%s@localhost/%s" % (backend, user, passwd, database)
+    return "{}://{}:{}@localhost/{}".format(backend, user, passwd, database)
 
 
 def _mysql_exists():
@@ -108,7 +106,7 @@ class SqlitePersistenceTest(test.TestCas
         return impl_sqlalchemy.SQLAlchemyBackend(conf).get_connection()
 
     def setUp(self):
-        super(SqlitePersistenceTest, self).setUp()
+        super().setUp()
         self.db_location = tempfile.mktemp(suffix='.db')
         self.db_uri = "sqlite:///%s" % (self.db_location)
         # Ensure upgraded to the right schema
@@ -116,7 +114,7 @@ class SqlitePersistenceTest(test.TestCas
             conn.upgrade()
 
     def tearDown(self):
-        super(SqlitePersistenceTest, self).tearDown()
+        super().tearDown()
         if self.db_location and os.path.isfile(self.db_location):
             os.unlink(self.db_location)
             self.db_location = None
@@ -146,7 +144,7 @@ class BackendPersistenceTestMixin(base.P
         """Cleans up by removing the database once the tests are done."""
 
     def setUp(self):
-        super(BackendPersistenceTestMixin, self).setUp()
+        super().setUp()
         self.backend = None
         try:
             self.db_uri = self._init_db()
@@ -175,7 +173,7 @@ class MysqlPersistenceTest(BackendPersis
             db_uri = _get_connect_string('mysql', USER, PASSWD)
             engine = sa.create_engine(db_uri)
             with contextlib.closing(engine.connect()) as conn:
-                conn.execute("CREATE DATABASE %s" % DATABASE)
+                conn.execute(sa.text("CREATE DATABASE %s" % DATABASE))
         except Exception as e:
             raise Exception('Failed to initialize MySQL db: %s' % (e))
         finally:
@@ -192,7 +190,7 @@ class MysqlPersistenceTest(BackendPersis
         try:
             engine = sa.create_engine(self.db_uri)
             with contextlib.closing(engine.connect()) as conn:
-                conn.execute("DROP DATABASE IF EXISTS %s" % DATABASE)
+                conn.execute(sa.text("DROP DATABASE IF EXISTS %s" % DATABASE))
         except Exception as e:
             raise Exception('Failed to remove temporary database: %s' % (e))
         finally:
@@ -217,7 +215,7 @@ class PostgresPersistenceTest(BackendPer
             engine = sa.create_engine(db_uri)
             with contextlib.closing(engine.connect()) as conn:
                 conn.connection.set_isolation_level(0)
-                conn.execute("CREATE DATABASE %s" % DATABASE)
+                conn.execute(sa.text("CREATE DATABASE %s" % DATABASE))
                 conn.connection.set_isolation_level(1)
         except Exception as e:
             raise Exception('Failed to initialize PostgreSQL db: %s' % (e))
@@ -241,7 +239,7 @@ class PostgresPersistenceTest(BackendPer
             engine = sa.create_engine(db_uri)
             with contextlib.closing(engine.connect()) as conn:
                 conn.connection.set_isolation_level(0)
-                conn.execute("DROP DATABASE IF EXISTS %s" % DATABASE)
+                conn.execute(sa.text("DROP DATABASE IF EXISTS %s" % DATABASE))
                 conn.connection.set_isolation_level(1)
         except Exception as e:
             raise Exception('Failed to remove temporary database: %s' % (e))
diff -pruN 5.12.0-2/taskflow/tests/unit/persistence/test_zk_persistence.py 6.0.2-2/taskflow/tests/unit/persistence/test_zk_persistence.py
--- 5.12.0-2/taskflow/tests/unit/persistence/test_zk_persistence.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/persistence/test_zk_persistence.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 AT&T Labs All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -56,7 +54,7 @@ class ZkPersistenceTest(test.TestCase, b
         return self.backend.get_connection()
 
     def setUp(self):
-        super(ZkPersistenceTest, self).setUp()
+        super().setUp()
         conf = test_utils.ZK_TEST_CONFIG.copy()
         # Create a unique path just for this test (so that we don't overwrite
         # what other tests are doing).
@@ -84,7 +82,7 @@ class ZakePersistenceTest(test.TestCase,
         return self._backend.get_connection()
 
     def setUp(self):
-        super(ZakePersistenceTest, self).setUp()
+        super().setUp()
         conf = {
             "path": "/taskflow",
         }
diff -pruN 5.12.0-2/taskflow/tests/unit/test_arguments_passing.py 6.0.2-2/taskflow/tests/unit/test_arguments_passing.py
--- 5.12.0-2/taskflow/tests/unit/test_arguments_passing.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_arguments_passing.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -23,11 +21,6 @@ from taskflow import test
 from taskflow.tests import utils
 from taskflow.utils import eventlet_utils as eu
 
-try:
-    from taskflow.engines.action_engine import process_executor as pe
-except ImportError:
-    pe = None
-
 
 class ArgumentsPassingTest(utils.EngineTestBase):
 
@@ -55,9 +48,9 @@ class ArgumentsPassingTest(utils.EngineT
         }, engine.storage.fetch_all())
 
     def test_save_dict(self):
-        flow = utils.TaskMultiDict(provides=set(['badger',
-                                                 'mushroom',
-                                                 'snake']))
+        flow = utils.TaskMultiDict(provides={'badger',
+                                             'mushroom',
+                                             'snake'})
         engine = self._make_engine(flow)
         engine.run()
         self.assertEqual({
@@ -224,18 +217,3 @@ class ParallelEngineWithEventletTest(Arg
                                      backend=self.backend,
                                      engine='parallel',
                                      executor=executor)
-
-
-@testtools.skipIf(pe is None, 'process_executor is not available')
-class ParallelEngineWithProcessTest(ArgumentsPassingTest, test.TestCase):
-    _EXECUTOR_WORKERS = 2
-
-    def _make_engine(self, flow, flow_detail=None, executor=None):
-        if executor is None:
-            executor = 'processes'
-        return taskflow.engines.load(flow,
-                                     flow_detail=flow_detail,
-                                     backend=self.backend,
-                                     engine='parallel',
-                                     executor=executor,
-                                     max_workers=self._EXECUTOR_WORKERS)
diff -pruN 5.12.0-2/taskflow/tests/unit/test_check_transition.py 6.0.2-2/taskflow/tests/unit/test_check_transition.py
--- 5.12.0-2/taskflow/tests/unit/test_check_transition.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_check_transition.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -50,7 +48,7 @@ class TransitionTest(test.TestCase):
 class CheckFlowTransitionTest(TransitionTest):
 
     def setUp(self):
-        super(CheckFlowTransitionTest, self).setUp()
+        super().setUp()
         self.check_transition = states.check_flow_transition
         self.transition_exc_regexp = '^Flow transition.*not allowed'
 
@@ -73,7 +71,7 @@ class CheckFlowTransitionTest(Transition
 class CheckTaskTransitionTest(TransitionTest):
 
     def setUp(self):
-        super(CheckTaskTransitionTest, self).setUp()
+        super().setUp()
         self.check_transition = states.check_task_transition
         self.transition_exc_regexp = '^Task transition.*not allowed'
 
@@ -122,7 +120,7 @@ class CheckTaskTransitionTest(Transition
 class CheckRetryTransitionTest(CheckTaskTransitionTest):
 
     def setUp(self):
-        super(CheckRetryTransitionTest, self).setUp()
+        super().setUp()
         self.check_transition = states.check_retry_transition
         self.transition_exc_regexp = '^Retry transition.*not allowed'
 
diff -pruN 5.12.0-2/taskflow/tests/unit/test_conductors.py 6.0.2-2/taskflow/tests/unit/test_conductors.py
--- 5.12.0-2/taskflow/tests/unit/test_conductors.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_conductors.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/test_deciders.py 6.0.2-2/taskflow/tests/unit/test_deciders.py
--- 5.12.0-2/taskflow/tests/unit/test_deciders.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_deciders.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -55,4 +53,4 @@ class TestDeciders(test.TestCase):
     def test_bad_pick_widest(self):
         self.assertRaises(ValueError, deciders.pick_widest, [])
         self.assertRaises(ValueError, deciders.pick_widest, ["a"])
-        self.assertRaises(ValueError, deciders.pick_widest, set(['b']))
+        self.assertRaises(ValueError, deciders.pick_widest, {'b'})
diff -pruN 5.12.0-2/taskflow/tests/unit/test_engine_helpers.py 6.0.2-2/taskflow/tests/unit/test_engine_helpers.py
--- 5.12.0-2/taskflow/tests/unit/test_engine_helpers.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_engine_helpers.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/test_engines.py 6.0.2-2/taskflow/tests/unit/test_engines.py
--- 5.12.0-2/taskflow/tests/unit/test_engines.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_engines.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -41,11 +39,6 @@ from taskflow.utils import eventlet_util
 from taskflow.utils import persistence_utils as p_utils
 from taskflow.utils import threading_utils as tu
 
-try:
-    from taskflow.engines.action_engine import process_executor as pe
-except ImportError:
-    pe = None
-
 
 # Expected engine transitions when empty workflows are ran...
 _EMPTY_TRANSITIONS = [
@@ -54,7 +47,7 @@ _EMPTY_TRANSITIONS = [
 ]
 
 
-class EngineTaskNotificationsTest(object):
+class EngineTaskNotificationsTest:
     def test_run_capture_task_notifications(self):
         captured = collections.defaultdict(list)
 
@@ -89,7 +82,7 @@ class EngineTaskNotificationsTest(object
             self.assertEqual(expected, captured[name])
 
 
-class EngineTaskTest(object):
+class EngineTaskTest:
 
     def test_run_task_as_flow(self):
         flow = utils.ProgressingTask(name='task1')
@@ -583,8 +576,8 @@ class EngineParallelFlowTest(utils.Engin
         engine = self._make_engine(flow)
         with utils.CaptureListener(engine, capture_flow=False) as capturer:
             engine.run()
-        expected = set(['task2.t SUCCESS(5)', 'task2.t RUNNING',
-                        'task1.t RUNNING', 'task1.t SUCCESS(5)'])
+        expected = {'task2.t SUCCESS(5)', 'task2.t RUNNING',
+                    'task1.t RUNNING', 'task1.t SUCCESS(5)'}
         self.assertEqual(expected, set(capturer.values))
 
     def test_parallel_revert(self):
@@ -858,8 +851,8 @@ class EngineGraphFlowTest(utils.EngineTe
         engine = self._make_engine(flow)
         with utils.CaptureListener(engine, capture_flow=False) as capturer:
             engine.run()
-        expected = set(['task2.t SUCCESS(5)', 'task2.t RUNNING',
-                        'task1.t RUNNING', 'task1.t SUCCESS(5)'])
+        expected = {'task2.t SUCCESS(5)', 'task2.t RUNNING',
+                    'task1.t RUNNING', 'task1.t SUCCESS(5)'}
         self.assertEqual(expected, set(capturer.values))
         self.assertEqual(2, len(flow))
 
@@ -1223,7 +1216,7 @@ class EngineGraphConditionalFlowTest(uti
         with utils.CaptureListener(engine, capture_flow=False) as capturer:
             engine.run()
 
-        expected = set([
+        expected = {
             'task1.t RUNNING',
             'task1.t SUCCESS(5)',
 
@@ -1232,7 +1225,7 @@ class EngineGraphConditionalFlowTest(uti
 
             'task3.t RUNNING',
             'task3.t SUCCESS(5)',
-        ])
+        }
         self.assertEqual(expected, set(capturer.values))
 
     def test_graph_flow_conditional_ignore_reset(self):
@@ -1251,7 +1244,7 @@ class EngineGraphConditionalFlowTest(uti
         with utils.CaptureListener(engine, capture_flow=False) as capturer:
             engine.run()
 
-        expected = set([
+        expected = {
             'task1.t RUNNING',
             'task1.t SUCCESS(5)',
 
@@ -1259,7 +1252,7 @@ class EngineGraphConditionalFlowTest(uti
             'task2.t SUCCESS(5)',
 
             'task3.t IGNORE',
-        ])
+        }
         self.assertEqual(expected, set(capturer.values))
         self.assertEqual(states.IGNORE,
                          engine.storage.get_atom_state('task3'))
@@ -1271,7 +1264,7 @@ class EngineGraphConditionalFlowTest(uti
         with utils.CaptureListener(engine, capture_flow=False) as capturer:
             engine.run()
 
-        expected = set([
+        expected = {
             'task1.t RUNNING',
             'task1.t SUCCESS(5)',
 
@@ -1280,7 +1273,7 @@ class EngineGraphConditionalFlowTest(uti
 
             'task3.t RUNNING',
             'task3.t SUCCESS(5)',
-        ])
+        }
         self.assertEqual(expected, set(capturer.values))
 
     def test_graph_flow_diamond_ignored(self):
@@ -1301,7 +1294,7 @@ class EngineGraphConditionalFlowTest(uti
         with utils.CaptureListener(engine, capture_flow=False) as capturer:
             engine.run()
 
-        expected = set([
+        expected = {
             'task1.t RUNNING',
             'task1.t SUCCESS(5)',
 
@@ -1312,7 +1305,7 @@ class EngineGraphConditionalFlowTest(uti
             'task3.t SUCCESS(5)',
 
             'task4.t IGNORE',
-        ])
+        }
         self.assertEqual(expected, set(capturer.values))
         self.assertEqual(states.IGNORE,
                          engine.storage.get_atom_state('task4'))
@@ -1350,12 +1343,12 @@ class EngineGraphConditionalFlowTest(uti
         with utils.CaptureListener(engine, capture_flow=False) as capturer:
             engine.run()
 
-        expected = set([
+        expected = {
             'task1.t RUNNING', 'task1.t SUCCESS(2)',
             'task3.t IGNORE', 'task3_3.t IGNORE',
             'task2.t RUNNING', 'task2.t SUCCESS(5)',
             'task2_2.t RUNNING', 'task2_2.t SUCCESS(5)',
-        ])
+        }
         self.assertEqual(expected, set(capturer.values))
 
         engine = self._make_engine(flow)
@@ -1363,12 +1356,12 @@ class EngineGraphConditionalFlowTest(uti
         with utils.CaptureListener(engine, capture_flow=False) as capturer:
             engine.run()
 
-        expected = set([
+        expected = {
             'task1.t RUNNING', 'task1.t SUCCESS(1)',
             'task2.t IGNORE', 'task2_2.t IGNORE',
             'task3.t RUNNING', 'task3.t SUCCESS(5)',
             'task3_3.t RUNNING', 'task3_3.t SUCCESS(5)',
-        ])
+        }
         self.assertEqual(expected, set(capturer.values))
 
 
@@ -1499,82 +1492,6 @@ class ParallelEngineWithEventletTest(Eng
                                      store=store, **kwargs)
 
 
-@testtools.skipIf(pe is None, 'process_executor is not available')
-class ParallelEngineWithProcessTest(EngineTaskTest,
-                                    EngineMultipleResultsTest,
-                                    EngineLinearFlowTest,
-                                    EngineParallelFlowTest,
-                                    EngineLinearAndUnorderedExceptionsTest,
-                                    EngineOptionalRequirementsTest,
-                                    EngineGraphFlowTest,
-                                    EngineResetTests,
-                                    EngineMissingDepsTest,
-                                    EngineGraphConditionalFlowTest,
-                                    EngineDeciderDepthTest,
-                                    EngineTaskNotificationsTest,
-                                    test.TestCase):
-    _EXECUTOR_WORKERS = 2
-
-    def test_correct_load(self):
-        engine = self._make_engine(utils.TaskNoRequiresNoReturns)
-        self.assertIsInstance(engine, eng.ParallelActionEngine)
-
-    def _make_engine(self, flow,
-                     flow_detail=None, executor=None, store=None,
-                     **kwargs):
-        if executor is None:
-            executor = 'processes'
-        return taskflow.engines.load(flow, flow_detail=flow_detail,
-                                     backend=self.backend,
-                                     engine='parallel',
-                                     executor=executor,
-                                     store=store,
-                                     max_workers=self._EXECUTOR_WORKERS,
-                                     **kwargs)
-
-    def test_update_progress_notifications_proxied(self):
-        captured = collections.defaultdict(list)
-
-        def notify_me(event_type, details):
-            captured[event_type].append(details)
-
-        a = utils.MultiProgressingTask('a')
-        a.notifier.register(a.notifier.ANY, notify_me)
-        progress_chunks = list(x / 10.0 for x in range(1, 10))
-        e = self._make_engine(a, store={'progress_chunks': progress_chunks})
-        e.run()
-
-        self.assertEqual(11, len(captured[task.EVENT_UPDATE_PROGRESS]))
-
-    def test_custom_notifications_proxied(self):
-        captured = collections.defaultdict(list)
-
-        def notify_me(event_type, details):
-            captured[event_type].append(details)
-
-        a = utils.EmittingTask('a')
-        a.notifier.register(a.notifier.ANY, notify_me)
-        e = self._make_engine(a)
-        e.run()
-
-        self.assertEqual(1, len(captured['hi']))
-        self.assertEqual(2, len(captured[task.EVENT_UPDATE_PROGRESS]))
-
-    def test_just_custom_notifications_proxied(self):
-        captured = collections.defaultdict(list)
-
-        def notify_me(event_type, details):
-            captured[event_type].append(details)
-
-        a = utils.EmittingTask('a')
-        a.notifier.register('hi', notify_me)
-        e = self._make_engine(a)
-        e.run()
-
-        self.assertEqual(1, len(captured['hi']))
-        self.assertEqual(0, len(captured[task.EVENT_UPDATE_PROGRESS]))
-
-
 class WorkerBasedEngineTest(EngineTaskTest,
                             EngineMultipleResultsTest,
                             EngineLinearFlowTest,
@@ -1589,7 +1506,7 @@ class WorkerBasedEngineTest(EngineTaskTe
                             EngineTaskNotificationsTest,
                             test.TestCase):
     def setUp(self):
-        super(WorkerBasedEngineTest, self).setUp()
+        super().setUp()
         shared_conf = {
             'exchange': 'test',
             'transport': 'memory',
diff -pruN 5.12.0-2/taskflow/tests/unit/test_exceptions.py 6.0.2-2/taskflow/tests/unit/test_exceptions.py
--- 5.12.0-2/taskflow/tests/unit/test_exceptions.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_exceptions.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -44,7 +42,7 @@ class TestExceptions(test.TestCase):
     def test_raise_with(self):
         capture = None
         try:
-            raise IOError('broken')
+            raise OSError('broken')
         except Exception:
             try:
                 exc.raise_with_cause(exc.TaskFlowException, 'broken')
@@ -73,8 +71,8 @@ class TestExceptions(test.TestCase):
         try:
             try:
                 try:
-                    raise IOError("Didn't work")
-                except IOError:
+                    raise OSError("Didn't work")
+                except OSError:
                     exc.raise_with_cause(exc.TaskFlowException,
                                          "It didn't go so well")
             except exc.TaskFlowException:
@@ -109,7 +107,7 @@ class TestExceptions(test.TestCase):
     def test_raise_with_cause(self):
         capture = None
         try:
-            raise IOError('broken')
+            raise OSError('broken')
         except Exception:
             try:
                 exc.raise_with_cause(exc.TaskFlowException, 'broken')
diff -pruN 5.12.0-2/taskflow/tests/unit/test_failure.py 6.0.2-2/taskflow/tests/unit/test_failure.py
--- 5.12.0-2/taskflow/tests/unit/test_failure.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_failure.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -37,7 +35,7 @@ def _make_exc_info(msg):
         return sys.exc_info()
 
 
-class GeneralFailureObjTestsMixin(object):
+class GeneralFailureObjTestsMixin:
 
     def test_captures_message(self):
         self.assertEqual('Woot!', self.fail_obj.exception_str)
@@ -72,7 +70,7 @@ class GeneralFailureObjTestsMixin(object
 class CaptureFailureTestCase(test.TestCase, GeneralFailureObjTestsMixin):
 
     def setUp(self):
-        super(CaptureFailureTestCase, self).setUp()
+        super().setUp()
         self.fail_obj = _captured_failure('Woot!')
 
     def test_captures_value(self):
@@ -91,7 +89,7 @@ class CaptureFailureTestCase(test.TestCa
 class ReCreatedFailureTestCase(test.TestCase, GeneralFailureObjTestsMixin):
 
     def setUp(self):
-        super(ReCreatedFailureTestCase, self).setUp()
+        super().setUp()
         fail_obj = _captured_failure('Woot!')
         self.fail_obj = failure.Failure(exception_str=fail_obj.exception_str,
                                         traceback_str=fail_obj.traceback_str,
@@ -124,7 +122,7 @@ class ReCreatedFailureTestCase(test.Test
 class FromExceptionTestCase(test.TestCase, GeneralFailureObjTestsMixin):
 
     def setUp(self):
-        super(FromExceptionTestCase, self).setUp()
+        super().setUp()
         self.fail_obj = failure.Failure.from_exception(RuntimeError('Woot!'))
 
     def test_pformat_no_traceback(self):
@@ -333,24 +331,24 @@ class NonAsciiExceptionsTestCase(test.Te
         excp = ValueError(bad_string)
         fail = failure.Failure.from_exception(excp)
         self.assertEqual(str(excp), fail.exception_str)
-        expected = u'Failure: ValueError: \xc8'
+        expected = 'Failure: ValueError: \xc8'
         self.assertEqual(expected, str(fail))
 
     def test_exception_non_ascii_unicode(self):
-        hi_ru = u'привет'
+        hi_ru = 'привет'
         fail = failure.Failure.from_exception(ValueError(hi_ru))
         self.assertEqual(hi_ru, fail.exception_str)
         self.assertIsInstance(fail.exception_str, str)
-        self.assertEqual(u'Failure: ValueError: %s' % hi_ru,
+        self.assertEqual('Failure: ValueError: %s' % hi_ru,
                          str(fail))
 
     def test_wrapped_failure_non_ascii_unicode(self):
-        hi_cn = u'嗨'
+        hi_cn = '嗨'
         fail = ValueError(hi_cn)
         self.assertEqual(hi_cn, str(fail))
         fail = failure.Failure.from_exception(fail)
         wrapped_fail = exceptions.WrappedFailure([fail])
-        expected_result = (u"WrappedFailure: "
+        expected_result = ("WrappedFailure: "
                            "[Failure: ValueError: %s]" % (hi_cn))
         self.assertEqual(expected_result, str(wrapped_fail))
 
@@ -361,7 +359,7 @@ class NonAsciiExceptionsTestCase(test.Te
         self.assertEqual(fail, copied)
 
     def test_failure_equality_non_ascii_unicode(self):
-        hi_ru = u'привет'
+        hi_ru = 'привет'
         fail = failure.Failure.from_exception(ValueError(hi_ru))
         copied = fail.copy()
         self.assertEqual(fail, copied)
diff -pruN 5.12.0-2/taskflow/tests/unit/test_flow_dependencies.py 6.0.2-2/taskflow/tests/unit/test_flow_dependencies.py
--- 5.12.0-2/taskflow/tests/unit/test_flow_dependencies.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_flow_dependencies.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -32,28 +30,28 @@ class FlowDependenciesTest(test.TestCase
 
     def test_task_requires_default_values(self):
         flow = utils.TaskMultiArg()
-        self.assertEqual(set(['x', 'y', 'z']), flow.requires)
+        self.assertEqual({'x', 'y', 'z'}, flow.requires)
         self.assertEqual(set(), flow.provides, )
 
     def test_task_requires_rebinded_mapped(self):
         flow = utils.TaskMultiArg(rebind={'x': 'a', 'y': 'b', 'z': 'c'})
-        self.assertEqual(set(['a', 'b', 'c']), flow.requires)
+        self.assertEqual({'a', 'b', 'c'}, flow.requires)
         self.assertEqual(set(), flow.provides)
 
     def test_task_requires_additional_values(self):
         flow = utils.TaskMultiArg(requires=['a', 'b'])
-        self.assertEqual(set(['a', 'b', 'x', 'y', 'z']), flow.requires)
+        self.assertEqual({'a', 'b', 'x', 'y', 'z'}, flow.requires)
         self.assertEqual(set(), flow.provides)
 
     def test_task_provides_values(self):
         flow = utils.TaskMultiReturn(provides=['a', 'b', 'c'])
         self.assertEqual(set(), flow.requires)
-        self.assertEqual(set(['a', 'b', 'c']), flow.provides)
+        self.assertEqual({'a', 'b', 'c'}, flow.provides)
 
     def test_task_provides_and_requires_values(self):
         flow = utils.TaskMultiArgMultiReturn(provides=['a', 'b', 'c'])
-        self.assertEqual(set(['x', 'y', 'z']), flow.requires)
-        self.assertEqual(set(['a', 'b', 'c']), flow.provides)
+        self.assertEqual({'x', 'y', 'z'}, flow.requires)
+        self.assertEqual({'a', 'b', 'c'}, flow.provides)
 
     def test_linear_flow_without_dependencies(self):
         flow = lf.Flow('lf').add(
@@ -66,14 +64,14 @@ class FlowDependenciesTest(test.TestCase
         flow = lf.Flow('lf').add(
             utils.TaskOneArg('task1'),
             utils.TaskMultiArg('task2'))
-        self.assertEqual(set(['x', 'y', 'z']), flow.requires)
+        self.assertEqual({'x', 'y', 'z'}, flow.requires)
         self.assertEqual(set(), flow.provides)
 
     def test_linear_flow_requires_rebind_values(self):
         flow = lf.Flow('lf').add(
             utils.TaskOneArg('task1', rebind=['q']),
             utils.TaskMultiArg('task2'))
-        self.assertEqual(set(['x', 'y', 'z', 'q']), flow.requires)
+        self.assertEqual({'x', 'y', 'z', 'q'}, flow.requires)
         self.assertEqual(set(), flow.provides)
 
     def test_linear_flow_provides_values(self):
@@ -81,14 +79,14 @@ class FlowDependenciesTest(test.TestCase
             utils.TaskOneReturn('task1', provides='x'),
             utils.TaskMultiReturn('task2', provides=['a', 'b', 'c']))
         self.assertEqual(set(), flow.requires)
-        self.assertEqual(set(['x', 'a', 'b', 'c']), flow.provides)
+        self.assertEqual({'x', 'a', 'b', 'c'}, flow.provides)
 
     def test_linear_flow_provides_required_values(self):
         flow = lf.Flow('lf').add(
             utils.TaskOneReturn('task1', provides='x'),
             utils.TaskOneArg('task2'))
         self.assertEqual(set(), flow.requires)
-        self.assertEqual(set(['x']), flow.provides)
+        self.assertEqual({'x'}, flow.provides)
 
     def test_linear_flow_multi_provides_and_requires_values(self):
         flow = lf.Flow('lf').add(
@@ -97,8 +95,8 @@ class FlowDependenciesTest(test.TestCase
                                           provides=['x', 'y', 'q']),
             utils.TaskMultiArgMultiReturn('task2',
                                           provides=['i', 'j', 'k']))
-        self.assertEqual(set(['a', 'b', 'c', 'z']), flow.requires)
-        self.assertEqual(set(['x', 'y', 'q', 'i', 'j', 'k']), flow.provides)
+        self.assertEqual({'a', 'b', 'c', 'z'}, flow.requires)
+        self.assertEqual({'x', 'y', 'q', 'i', 'j', 'k'}, flow.provides)
 
     def test_unordered_flow_without_dependencies(self):
         flow = uf.Flow('uf').add(
@@ -111,14 +109,14 @@ class FlowDependenciesTest(test.TestCase
         flow = uf.Flow('uf').add(
             utils.TaskOneArg('task1'),
             utils.TaskMultiArg('task2'))
-        self.assertEqual(set(['x', 'y', 'z']), flow.requires)
+        self.assertEqual({'x', 'y', 'z'}, flow.requires)
         self.assertEqual(set(), flow.provides)
 
     def test_unordered_flow_requires_rebind_values(self):
         flow = uf.Flow('uf').add(
             utils.TaskOneArg('task1', rebind=['q']),
             utils.TaskMultiArg('task2'))
-        self.assertEqual(set(['x', 'y', 'z', 'q']), flow.requires)
+        self.assertEqual({'x', 'y', 'z', 'q'}, flow.requires)
         self.assertEqual(set(), flow.provides)
 
     def test_unordered_flow_provides_values(self):
@@ -126,7 +124,7 @@ class FlowDependenciesTest(test.TestCase
             utils.TaskOneReturn('task1', provides='x'),
             utils.TaskMultiReturn('task2', provides=['a', 'b', 'c']))
         self.assertEqual(set(), flow.requires)
-        self.assertEqual(set(['x', 'a', 'b', 'c']), flow.provides)
+        self.assertEqual({'x', 'a', 'b', 'c'}, flow.provides)
 
     def test_unordered_flow_provides_required_values(self):
         flow = uf.Flow('uf')
@@ -134,23 +132,23 @@ class FlowDependenciesTest(test.TestCase
                  utils.TaskOneArg('task2'))
         flow.add(utils.TaskOneReturn('task1', provides='x'),
                  utils.TaskOneArg('task2'))
-        self.assertEqual(set(['x']), flow.provides)
-        self.assertEqual(set(['x']), flow.requires)
+        self.assertEqual({'x'}, flow.provides)
+        self.assertEqual({'x'}, flow.requires)
 
     def test_unordered_flow_requires_provided_value_other_call(self):
         flow = uf.Flow('uf')
         flow.add(utils.TaskOneReturn('task1', provides='x'))
         flow.add(utils.TaskOneArg('task2'))
-        self.assertEqual(set(['x']), flow.provides)
-        self.assertEqual(set(['x']), flow.requires)
+        self.assertEqual({'x'}, flow.provides)
+        self.assertEqual({'x'}, flow.requires)
 
     def test_unordered_flow_provides_required_value_other_call(self):
         flow = uf.Flow('uf')
         flow.add(utils.TaskOneArg('task2'))
         flow.add(utils.TaskOneReturn('task1', provides='x'))
         self.assertEqual(2, len(flow))
-        self.assertEqual(set(['x']), flow.provides)
-        self.assertEqual(set(['x']), flow.requires)
+        self.assertEqual({'x'}, flow.provides)
+        self.assertEqual({'x'}, flow.requires)
 
     def test_unordered_flow_multi_provides_and_requires_values(self):
         flow = uf.Flow('uf').add(
@@ -159,19 +157,19 @@ class FlowDependenciesTest(test.TestCase
                                           provides=['d', 'e', 'f']),
             utils.TaskMultiArgMultiReturn('task2',
                                           provides=['i', 'j', 'k']))
-        self.assertEqual(set(['a', 'b', 'c', 'x', 'y', 'z']), flow.requires)
-        self.assertEqual(set(['d', 'e', 'f', 'i', 'j', 'k']), flow.provides)
+        self.assertEqual({'a', 'b', 'c', 'x', 'y', 'z'}, flow.requires)
+        self.assertEqual({'d', 'e', 'f', 'i', 'j', 'k'}, flow.provides)
 
     def test_unordered_flow_provides_same_values(self):
         flow = uf.Flow('uf').add(utils.TaskOneReturn(provides='x'))
         flow.add(utils.TaskOneReturn(provides='x'))
-        self.assertEqual(set(['x']), flow.provides)
+        self.assertEqual({'x'}, flow.provides)
 
     def test_unordered_flow_provides_same_values_one_add(self):
         flow = uf.Flow('uf')
         flow.add(utils.TaskOneReturn(provides='x'),
                  utils.TaskOneReturn(provides='x'))
-        self.assertEqual(set(['x']), flow.provides)
+        self.assertEqual({'x'}, flow.provides)
 
     def test_nested_flows_requirements(self):
         flow = uf.Flow('uf').add(
@@ -184,21 +182,21 @@ class FlowDependenciesTest(test.TestCase
                                           rebind=['b'], provides=['z']),
                 utils.TaskOneArgOneReturn('task4', rebind=['c'],
                                           provides=['q'])))
-        self.assertEqual(set(['a', 'b', 'c']), flow.requires)
-        self.assertEqual(set(['x', 'y', 'z', 'q']), flow.provides)
+        self.assertEqual({'a', 'b', 'c'}, flow.requires)
+        self.assertEqual({'x', 'y', 'z', 'q'}, flow.provides)
 
     def test_graph_flow_requires_values(self):
         flow = gf.Flow('gf').add(
             utils.TaskOneArg('task1'),
             utils.TaskMultiArg('task2'))
-        self.assertEqual(set(['x', 'y', 'z']), flow.requires)
+        self.assertEqual({'x', 'y', 'z'}, flow.requires)
         self.assertEqual(set(), flow.provides)
 
     def test_graph_flow_requires_rebind_values(self):
         flow = gf.Flow('gf').add(
             utils.TaskOneArg('task1', rebind=['q']),
             utils.TaskMultiArg('task2'))
-        self.assertEqual(set(['x', 'y', 'z', 'q']), flow.requires)
+        self.assertEqual({'x', 'y', 'z', 'q'}, flow.requires)
         self.assertEqual(set(), flow.provides)
 
     def test_graph_flow_provides_values(self):
@@ -206,20 +204,20 @@ class FlowDependenciesTest(test.TestCase
             utils.TaskOneReturn('task1', provides='x'),
             utils.TaskMultiReturn('task2', provides=['a', 'b', 'c']))
         self.assertEqual(set(), flow.requires)
-        self.assertEqual(set(['x', 'a', 'b', 'c']), flow.provides)
+        self.assertEqual({'x', 'a', 'b', 'c'}, flow.provides)
 
     def test_graph_flow_provides_required_values(self):
         flow = gf.Flow('gf').add(
             utils.TaskOneReturn('task1', provides='x'),
             utils.TaskOneArg('task2'))
         self.assertEqual(set(), flow.requires)
-        self.assertEqual(set(['x']), flow.provides)
+        self.assertEqual({'x'}, flow.provides)
 
     def test_graph_flow_provides_provided_value_other_call(self):
         flow = gf.Flow('gf')
         flow.add(utils.TaskOneReturn('task1', provides='x'))
         flow.add(utils.TaskOneReturn('task2', provides='x'))
-        self.assertEqual(set(['x']), flow.provides)
+        self.assertEqual({'x'}, flow.provides)
 
     def test_graph_flow_multi_provides_and_requires_values(self):
         flow = gf.Flow('gf').add(
@@ -228,8 +226,8 @@ class FlowDependenciesTest(test.TestCase
                                           provides=['d', 'e', 'f']),
             utils.TaskMultiArgMultiReturn('task2',
                                           provides=['i', 'j', 'k']))
-        self.assertEqual(set(['a', 'b', 'c', 'x', 'y', 'z']), flow.requires)
-        self.assertEqual(set(['d', 'e', 'f', 'i', 'j', 'k']), flow.provides)
+        self.assertEqual({'a', 'b', 'c', 'x', 'y', 'z'}, flow.requires)
+        self.assertEqual({'d', 'e', 'f', 'i', 'j', 'k'}, flow.provides)
 
     def test_graph_cyclic_dependency(self):
         flow = gf.Flow('g-3-cyclic')
@@ -255,27 +253,27 @@ class FlowDependenciesTest(test.TestCase
 
     def test_retry_in_linear_flow_with_requirements(self):
         flow = lf.Flow('lf', retry.AlwaysRevert('rt', requires=['x', 'y']))
-        self.assertEqual(set(['x', 'y']), flow.requires)
+        self.assertEqual({'x', 'y'}, flow.requires)
         self.assertEqual(set(), flow.provides)
 
     def test_retry_in_linear_flow_with_provides(self):
         flow = lf.Flow('lf', retry.AlwaysRevert('rt', provides=['x', 'y']))
         self.assertEqual(set(), flow.requires)
-        self.assertEqual(set(['x', 'y']), flow.provides)
+        self.assertEqual({'x', 'y'}, flow.provides)
 
     def test_retry_in_linear_flow_requires_and_provides(self):
         flow = lf.Flow('lf', retry.AlwaysRevert('rt',
                                                 requires=['x', 'y'],
                                                 provides=['a', 'b']))
-        self.assertEqual(set(['x', 'y']), flow.requires)
-        self.assertEqual(set(['a', 'b']), flow.provides)
+        self.assertEqual({'x', 'y'}, flow.requires)
+        self.assertEqual({'a', 'b'}, flow.provides)
 
     def test_retry_requires_and_provides_same_value(self):
         flow = lf.Flow('lf', retry.AlwaysRevert('rt',
                                                 requires=['x', 'y'],
                                                 provides=['x', 'y']))
-        self.assertEqual(set(['x', 'y']), flow.requires)
-        self.assertEqual(set(['x', 'y']), flow.provides)
+        self.assertEqual({'x', 'y'}, flow.requires)
+        self.assertEqual({'x', 'y'}, flow.provides)
 
     def test_retry_in_unordered_flow_no_requirements_no_provides(self):
         flow = uf.Flow('uf', retry.AlwaysRevert('rt'))
@@ -284,20 +282,20 @@ class FlowDependenciesTest(test.TestCase
 
     def test_retry_in_unordered_flow_with_requirements(self):
         flow = uf.Flow('uf', retry.AlwaysRevert('rt', requires=['x', 'y']))
-        self.assertEqual(set(['x', 'y']), flow.requires)
+        self.assertEqual({'x', 'y'}, flow.requires)
         self.assertEqual(set(), flow.provides)
 
     def test_retry_in_unordered_flow_with_provides(self):
         flow = uf.Flow('uf', retry.AlwaysRevert('rt', provides=['x', 'y']))
         self.assertEqual(set(), flow.requires)
-        self.assertEqual(set(['x', 'y']), flow.provides)
+        self.assertEqual({'x', 'y'}, flow.provides)
 
     def test_retry_in_unordered_flow_requires_and_provides(self):
         flow = uf.Flow('uf', retry.AlwaysRevert('rt',
                                                 requires=['x', 'y'],
                                                 provides=['a', 'b']))
-        self.assertEqual(set(['x', 'y']), flow.requires)
-        self.assertEqual(set(['a', 'b']), flow.provides)
+        self.assertEqual({'x', 'y'}, flow.requires)
+        self.assertEqual({'a', 'b'}, flow.provides)
 
     def test_retry_in_graph_flow_no_requirements_no_provides(self):
         flow = gf.Flow('gf', retry.AlwaysRevert('rt'))
@@ -306,20 +304,20 @@ class FlowDependenciesTest(test.TestCase
 
     def test_retry_in_graph_flow_with_requirements(self):
         flow = gf.Flow('gf', retry.AlwaysRevert('rt', requires=['x', 'y']))
-        self.assertEqual(set(['x', 'y']), flow.requires)
+        self.assertEqual({'x', 'y'}, flow.requires)
         self.assertEqual(set(), flow.provides)
 
     def test_retry_in_graph_flow_with_provides(self):
         flow = gf.Flow('gf', retry.AlwaysRevert('rt', provides=['x', 'y']))
         self.assertEqual(set(), flow.requires)
-        self.assertEqual(set(['x', 'y']), flow.provides)
+        self.assertEqual({'x', 'y'}, flow.provides)
 
     def test_retry_in_graph_flow_requires_and_provides(self):
         flow = gf.Flow('gf', retry.AlwaysRevert('rt',
                                                 requires=['x', 'y'],
                                                 provides=['a', 'b']))
-        self.assertEqual(set(['x', 'y']), flow.requires)
-        self.assertEqual(set(['a', 'b']), flow.provides)
+        self.assertEqual({'x', 'y'}, flow.requires)
+        self.assertEqual({'a', 'b'}, flow.provides)
 
     def test_linear_flow_retry_and_task(self):
         flow = lf.Flow('lf', retry.AlwaysRevert('rt',
@@ -328,8 +326,8 @@ class FlowDependenciesTest(test.TestCase
         flow.add(utils.TaskMultiArgOneReturn(rebind=['a', 'x', 'c'],
                                              provides=['z']))
 
-        self.assertEqual(set(['x', 'y', 'c']), flow.requires)
-        self.assertEqual(set(['a', 'b', 'z']), flow.provides)
+        self.assertEqual({'x', 'y', 'c'}, flow.requires)
+        self.assertEqual({'a', 'b', 'z'}, flow.provides)
 
     def test_unordered_flow_retry_and_task(self):
         flow = uf.Flow('uf', retry.AlwaysRevert('rt',
@@ -338,25 +336,25 @@ class FlowDependenciesTest(test.TestCase
         flow.add(utils.TaskMultiArgOneReturn(rebind=['a', 'x', 'c'],
                                              provides=['z']))
 
-        self.assertEqual(set(['x', 'y', 'c']), flow.requires)
-        self.assertEqual(set(['a', 'b', 'z']), flow.provides)
+        self.assertEqual({'x', 'y', 'c'}, flow.requires)
+        self.assertEqual({'a', 'b', 'z'}, flow.provides)
 
     def test_unordered_flow_retry_and_task_same_requires_provides(self):
         flow = uf.Flow('uf', retry.AlwaysRevert('rt', requires=['x']))
         flow.add(utils.TaskOneReturn(provides=['x']))
-        self.assertEqual(set(['x']), flow.requires)
-        self.assertEqual(set(['x']), flow.provides)
+        self.assertEqual({'x'}, flow.requires)
+        self.assertEqual({'x'}, flow.provides)
 
     def test_unordered_flow_retry_and_task_provide_same_value(self):
         flow = uf.Flow('uf', retry.AlwaysRevert('rt', provides=['x']))
         flow.add(utils.TaskOneReturn('t1', provides=['x']))
-        self.assertEqual(set(['x']), flow.provides)
+        self.assertEqual({'x'}, flow.provides)
 
     def test_unordered_flow_retry_two_tasks_provide_same_value(self):
         flow = uf.Flow('uf', retry.AlwaysRevert('rt', provides=['y']))
         flow.add(utils.TaskOneReturn('t1', provides=['x']),
                  utils.TaskOneReturn('t2', provides=['x']))
-        self.assertEqual(set(['x', 'y']), flow.provides)
+        self.assertEqual({'x', 'y'}, flow.provides)
 
     def test_graph_flow_retry_and_task(self):
         flow = gf.Flow('gf', retry.AlwaysRevert('rt',
@@ -365,19 +363,19 @@ class FlowDependenciesTest(test.TestCase
         flow.add(utils.TaskMultiArgOneReturn(rebind=['a', 'x', 'c'],
                                              provides=['z']))
 
-        self.assertEqual(set(['x', 'y', 'c']), flow.requires)
-        self.assertEqual(set(['a', 'b', 'z']), flow.provides)
+        self.assertEqual({'x', 'y', 'c'}, flow.requires)
+        self.assertEqual({'a', 'b', 'z'}, flow.provides)
 
     def test_graph_flow_retry_and_task_dependency_provide_require(self):
         flow = gf.Flow('gf', retry.AlwaysRevert('rt', requires=['x']))
         flow.add(utils.TaskOneReturn(provides=['x']))
-        self.assertEqual(set(['x']), flow.provides)
-        self.assertEqual(set(['x']), flow.requires)
+        self.assertEqual({'x'}, flow.provides)
+        self.assertEqual({'x'}, flow.requires)
 
     def test_graph_flow_retry_and_task_provide_same_value(self):
         flow = gf.Flow('gf', retry.AlwaysRevert('rt', provides=['x']))
         flow.add(utils.TaskOneReturn('t1', provides=['x']))
-        self.assertEqual(set(['x']), flow.provides)
+        self.assertEqual({'x'}, flow.provides)
 
     def test_builtin_retry_args(self):
 
@@ -389,4 +387,4 @@ class FlowDependenciesTest(test.TestCase
                 pass
 
         flow = lf.Flow('lf', retry=FullArgsRetry(requires='a'))
-        self.assertEqual(set(['a']), flow.requires)
+        self.assertEqual({'a'}, flow.requires)
diff -pruN 5.12.0-2/taskflow/tests/unit/test_formatters.py 6.0.2-2/taskflow/tests/unit/test_formatters.py
--- 5.12.0-2/taskflow/tests/unit/test_formatters.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_formatters.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/test_functor_task.py 6.0.2-2/taskflow/tests/unit/test_functor_task.py
--- 5.12.0-2/taskflow/tests/unit/test_functor_task.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_functor_task.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -24,7 +22,7 @@ def add(a, b):
     return a + b
 
 
-class BunchOfFunctions(object):
+class BunchOfFunctions:
 
     def __init__(self, values):
         self.values = values
diff -pruN 5.12.0-2/taskflow/tests/unit/test_listeners.py 6.0.2-2/taskflow/tests/unit/test_listeners.py
--- 5.12.0-2/taskflow/tests/unit/test_listeners.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_listeners.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -52,7 +50,7 @@ _LOG_LEVELS = frozenset([
 
 class SleepyTask(task.Task):
     def __init__(self, name, sleep_for=0.0):
-        super(SleepyTask, self).__init__(name=name)
+        super().__init__(name=name)
         self._sleep_for = float(sleep_for)
 
     def execute(self):
@@ -62,7 +60,7 @@ class SleepyTask(task.Task):
             time.sleep(self._sleep_for)
 
 
-class EngineMakerMixin(object):
+class EngineMakerMixin:
     def _make_engine(self, flow, flow_detail=None, backend=None):
         e = taskflow.engines.load(flow,
                                   flow_detail=flow_detail,
@@ -80,7 +78,7 @@ class TestClaimListener(test.TestCase, E
         return f
 
     def setUp(self):
-        super(TestClaimListener, self).setUp()
+        super().setUp()
         self.client = fake_client.FakeClient()
         self.addCleanup(self.client.stop)
         self.board = jobs.fetch('test', 'zookeeper', client=self.client)
@@ -315,7 +313,7 @@ class TestLoggingListeners(test.TestCase
         with logging_listeners.LoggingListener(e, log=log):
             e.run()
         self.assertGreater(0, handler.counts[logging.DEBUG])
-        for levelno in _LOG_LEVELS - set([logging.DEBUG]):
+        for levelno in _LOG_LEVELS - {logging.DEBUG}:
             self.assertEqual(0, handler.counts[levelno])
         self.assertEqual([], handler.exc_infos)
 
@@ -329,7 +327,7 @@ class TestLoggingListeners(test.TestCase
         with listener:
             e.run()
         self.assertGreater(0, handler.counts[logging.INFO])
-        for levelno in _LOG_LEVELS - set([logging.INFO]):
+        for levelno in _LOG_LEVELS - {logging.INFO}:
             self.assertEqual(0, handler.counts[levelno])
         self.assertEqual([], handler.exc_infos)
 
@@ -341,7 +339,7 @@ class TestLoggingListeners(test.TestCase
         with logging_listeners.LoggingListener(e, log=log):
             self.assertRaises(RuntimeError, e.run)
         self.assertGreater(0, handler.counts[logging.DEBUG])
-        for levelno in _LOG_LEVELS - set([logging.DEBUG]):
+        for levelno in _LOG_LEVELS - {logging.DEBUG}:
             self.assertEqual(0, handler.counts[levelno])
         self.assertEqual(1, len(handler.exc_infos))
 
@@ -353,7 +351,7 @@ class TestLoggingListeners(test.TestCase
         with logging_listeners.DynamicLoggingListener(e, log=log):
             e.run()
         self.assertGreater(0, handler.counts[logging.DEBUG])
-        for levelno in _LOG_LEVELS - set([logging.DEBUG]):
+        for levelno in _LOG_LEVELS - {logging.DEBUG}:
             self.assertEqual(0, handler.counts[levelno])
         self.assertEqual([], handler.exc_infos)
 
@@ -367,7 +365,7 @@ class TestLoggingListeners(test.TestCase
         self.assertGreater(0, handler.counts[logging.WARNING])
         self.assertGreater(0, handler.counts[logging.DEBUG])
         self.assertEqual(1, len(handler.exc_infos))
-        for levelno in _LOG_LEVELS - set([logging.DEBUG, logging.WARNING]):
+        for levelno in _LOG_LEVELS - {logging.DEBUG, logging.WARNING}:
             self.assertEqual(0, handler.counts[levelno])
 
     def test_dynamic_failure_customized_level(self):
@@ -382,5 +380,5 @@ class TestLoggingListeners(test.TestCase
         self.assertGreater(0, handler.counts[logging.ERROR])
         self.assertGreater(0, handler.counts[logging.DEBUG])
         self.assertEqual(1, len(handler.exc_infos))
-        for levelno in _LOG_LEVELS - set([logging.DEBUG, logging.ERROR]):
+        for levelno in _LOG_LEVELS - {logging.DEBUG, logging.ERROR}:
             self.assertEqual(0, handler.counts[levelno])
diff -pruN 5.12.0-2/taskflow/tests/unit/test_mapfunctor_task.py 6.0.2-2/taskflow/tests/unit/test_mapfunctor_task.py
--- 5.12.0-2/taskflow/tests/unit/test_mapfunctor_task.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_mapfunctor_task.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright 2015 Hewlett-Packard Development Company, L.P.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -29,7 +27,7 @@ square = lambda x: x * x
 class MapFunctorTaskTest(test.TestCase):
 
     def setUp(self):
-        super(MapFunctorTaskTest, self).setUp()
+        super().setUp()
 
         self.flow_store = {
             'a': 1,
diff -pruN 5.12.0-2/taskflow/tests/unit/test_notifier.py 6.0.2-2/taskflow/tests/unit/test_notifier.py
--- 5.12.0-2/taskflow/tests/unit/test_notifier.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_notifier.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -57,7 +55,7 @@ class NotifierTest(test.TestCase):
         def call_me(state, details):
             pass
 
-        class A(object):
+        class A:
             def call_me_too(self, state, details):
                 pass
 
diff -pruN 5.12.0-2/taskflow/tests/unit/test_progress.py 6.0.2-2/taskflow/tests/unit/test_progress.py
--- 5.12.0-2/taskflow/tests/unit/test_progress.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_progress.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -26,7 +24,7 @@ from taskflow.utils import persistence_u
 
 class ProgressTask(task.Task):
     def __init__(self, name, segments):
-        super(ProgressTask, self).__init__(name=name)
+        super().__init__(name=name)
         self._segments = segments
 
     def execute(self):
@@ -57,7 +55,7 @@ class TestProgress(test.TestCase):
         return e
 
     def tearDown(self):
-        super(TestProgress, self).tearDown()
+        super().tearDown()
         with contextlib.closing(impl_memory.MemoryBackend({})) as be:
             with contextlib.closing(be.get_connection()) as conn:
                 conn.clear_all()
diff -pruN 5.12.0-2/taskflow/tests/unit/test_reducefunctor_task.py 6.0.2-2/taskflow/tests/unit/test_reducefunctor_task.py
--- 5.12.0-2/taskflow/tests/unit/test_reducefunctor_task.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_reducefunctor_task.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright 2015 Hewlett-Packard Development Company, L.P.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -29,7 +27,7 @@ multiply = lambda x, y: x * y
 class ReduceFunctorTaskTest(test.TestCase):
 
     def setUp(self):
-        super(ReduceFunctorTaskTest, self).setUp()
+        super().setUp()
 
         self.flow_store = {
             'a': 1,
diff -pruN 5.12.0-2/taskflow/tests/unit/test_retries.py 6.0.2-2/taskflow/tests/unit/test_retries.py
--- 5.12.0-2/taskflow/tests/unit/test_retries.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_retries.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -30,11 +28,6 @@ from taskflow.tests import utils
 from taskflow.types import failure
 from taskflow.utils import eventlet_utils as eu
 
-try:
-    from taskflow.engines.action_engine import process_executor as pe
-except ImportError:
-    pe = None
-
 
 class FailingRetry(retry.Retry):
 
@@ -740,7 +733,7 @@ class RetryTest(utils.EngineTestBase):
         self.assertEqual(expected, capturer.values)
 
     def test_for_each_with_set(self):
-        collection = set([3, 2, 5])
+        collection = {3, 2, 5}
         retry1 = retry.ForEach(collection, 'r1', provides='x')
         flow = lf.Flow('flow-1', retry1).add(utils.FailingTaskWithOneArg('t1'))
         engine = self._make_engine(flow)
@@ -1368,20 +1361,3 @@ class ParallelEngineWithEventletTest(Ret
                                      engine='parallel',
                                      executor=executor,
                                      defer_reverts=defer_reverts)
-
-
-@testtools.skipIf(pe is None, 'process_executor is not available')
-class ParallelEngineWithProcessTest(RetryTest, test.TestCase):
-    _EXECUTOR_WORKERS = 2
-
-    def _make_engine(self, flow, defer_reverts=None, flow_detail=None,
-                     executor=None):
-        if executor is None:
-            executor = 'processes'
-        return taskflow.engines.load(flow,
-                                     flow_detail=flow_detail,
-                                     engine='parallel',
-                                     backend=self.backend,
-                                     executor=executor,
-                                     max_workers=self._EXECUTOR_WORKERS,
-                                     defer_reverts=defer_reverts)
diff -pruN 5.12.0-2/taskflow/tests/unit/test_states.py 6.0.2-2/taskflow/tests/unit/test_states.py
--- 5.12.0-2/taskflow/tests/unit/test_states.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_states.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/test_storage.py 6.0.2-2/taskflow/tests/unit/test_storage.py
--- 5.12.0-2/taskflow/tests/unit/test_storage.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_storage.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -30,9 +28,9 @@ from taskflow.types import failure
 from taskflow.utils import persistence_utils as p_utils
 
 
-class StorageTestMixin(object):
+class StorageTestMixin:
     def setUp(self):
-        super(StorageTestMixin, self).setUp()
+        super().setUp()
         self.backend = None
         self.thread_count = 50
 
@@ -40,7 +38,7 @@ class StorageTestMixin(object):
         with contextlib.closing(self.backend) as be:
             with contextlib.closing(be.get_connection()) as conn:
                 conn.clear_all()
-        super(StorageTestMixin, self).tearDown()
+        super().tearDown()
 
     @staticmethod
     def _run_many_threads(threads):
@@ -357,14 +355,14 @@ class StorageTestMixin(object):
         s.inject({'foo': 'bar', 'spam': 'eggs'})
         self.assertEqual({'viking': 'eggs'},
                          s.fetch_mapped_args({'viking': 'spam'},
-                                             optional_args=set(['viking'])))
+                                             optional_args={'viking'}))
 
     def test_fetch_optional_args_not_found(self):
         s = self._get_storage()
         s.inject({'foo': 'bar', 'spam': 'eggs'})
         self.assertEqual({},
                          s.fetch_mapped_args({'viking': 'helmet'},
-                                             optional_args=set(['viking'])))
+                                             optional_args={'viking'}))
 
     def test_set_and_get_task_state(self):
         s = self._get_storage()
@@ -437,7 +435,7 @@ class StorageTestMixin(object):
 
     def test_result_is_checked(self):
         s = self._get_storage()
-        s.ensure_atom(test_utils.NoopTask('my task', provides=set(['result'])))
+        s.ensure_atom(test_utils.NoopTask('my task', provides={'result'}))
         s.save('my task', {})
         self.assertRaisesRegex(exceptions.NotFound,
                                '^Unable to find result', s.fetch, 'result')
@@ -539,7 +537,7 @@ class StorageTestMixin(object):
         s = self._get_storage()
         s.ensure_atom(t)
         missing = s.fetch_unsatisfied_args(t.name, t.rebind)
-        self.assertEqual(set(['x']), missing)
+        self.assertEqual({'x'}, missing)
         s.inject_atom_args(t.name, {'x': 2}, transient=False)
         missing = s.fetch_unsatisfied_args(t.name, t.rebind)
         self.assertEqual(set(), missing)
@@ -551,7 +549,7 @@ class StorageTestMixin(object):
         s = self._get_storage()
         s.ensure_atom(t)
         missing = s.fetch_unsatisfied_args(t.name, t.rebind)
-        self.assertEqual(set(['x']), missing)
+        self.assertEqual({'x'}, missing)
         s.inject_atom_args(t.name, {'x': 2}, transient=False)
         s.inject_atom_args(t.name, {'x': 3}, transient=True)
         missing = s.fetch_unsatisfied_args(t.name, t.rebind)
@@ -589,13 +587,13 @@ class StorageTestMixin(object):
 
 class StorageMemoryTest(StorageTestMixin, test.TestCase):
     def setUp(self):
-        super(StorageMemoryTest, self).setUp()
+        super().setUp()
         self.backend = backends.fetch({'connection': 'memory://'})
 
 
 class StorageSQLTest(StorageTestMixin, test.TestCase):
     def setUp(self):
-        super(StorageSQLTest, self).setUp()
+        super().setUp()
         self.backend = backends.fetch({'connection': 'sqlite://'})
         with contextlib.closing(self.backend.get_connection()) as conn:
             conn.upgrade()
diff -pruN 5.12.0-2/taskflow/tests/unit/test_suspend.py 6.0.2-2/taskflow/tests/unit/test_suspend.py
--- 5.12.0-2/taskflow/tests/unit/test_suspend.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_suspend.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -25,23 +23,18 @@ from taskflow import test
 from taskflow.tests import utils
 from taskflow.utils import eventlet_utils as eu
 
-try:
-    from taskflow.engines.action_engine import process_executor as pe
-except ImportError:
-    pe = None
-
 
 class SuspendingListener(utils.CaptureListener):
 
     def __init__(self, engine,
                  task_name, task_state, capture_flow=False):
-        super(SuspendingListener, self).__init__(
+        super().__init__(
             engine,
             capture_flow=capture_flow)
         self._revert_match = (task_name, task_state)
 
     def _task_receiver(self, state, details):
-        super(SuspendingListener, self)._task_receiver(state, details)
+        super()._task_receiver(state, details)
         if (details['task_name'], state) == self._revert_match:
             self._engine.suspend()
 
@@ -227,17 +220,3 @@ class ParallelEngineWithEventletTest(Sus
         return taskflow.engines.load(flow, flow_detail=flow_detail,
                                      backend=self.backend, engine='parallel',
                                      executor=executor)
-
-
-@testtools.skipIf(pe is None, 'process_executor is not available')
-class ParallelEngineWithProcessTest(SuspendTest, test.TestCase):
-    _EXECUTOR_WORKERS = 2
-
-    def _make_engine(self, flow, flow_detail=None, executor=None):
-        if executor is None:
-            executor = 'processes'
-        return taskflow.engines.load(flow, flow_detail=flow_detail,
-                                     engine='parallel',
-                                     backend=self.backend,
-                                     executor=executor,
-                                     max_workers=self._EXECUTOR_WORKERS)
diff -pruN 5.12.0-2/taskflow/tests/unit/test_task.py 6.0.2-2/taskflow/tests/unit/test_task.py
--- 5.12.0-2/taskflow/tests/unit/test_task.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_task.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright 2015 Hewlett-Packard Development Company, L.P.
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
@@ -81,7 +79,7 @@ class TaskTest(test.TestCase):
 
     def test_generated_name(self):
         my_task = MyTask()
-        self.assertEqual('%s.%s' % (__name__, 'MyTask'),
+        self.assertEqual('{}.{}'.format(__name__, 'MyTask'),
                          my_task.name)
 
     def test_task_str(self):
@@ -121,7 +119,7 @@ class TaskTest(test.TestCase):
         }
         self.assertEqual(expected,
                          my_task.rebind)
-        self.assertEqual(set(['spam', 'eggs', 'context']),
+        self.assertEqual({'spam', 'eggs', 'context'},
                          my_task.requires)
 
     def test_requires_amended(self):
@@ -150,12 +148,12 @@ class TaskTest(test.TestCase):
 
     def test_requires_ignores_optional(self):
         my_task = DefaultArgTask()
-        self.assertEqual(set(['spam']), my_task.requires)
-        self.assertEqual(set(['eggs']), my_task.optional)
+        self.assertEqual({'spam'}, my_task.requires)
+        self.assertEqual({'eggs'}, my_task.optional)
 
     def test_requires_allows_optional(self):
         my_task = DefaultArgTask(requires=('spam', 'eggs'))
-        self.assertEqual(set(['spam', 'eggs']), my_task.requires)
+        self.assertEqual({'spam', 'eggs'}, my_task.requires)
         self.assertEqual(set(), my_task.optional)
 
     def test_rebind_includes_optional(self):
@@ -174,7 +172,7 @@ class TaskTest(test.TestCase):
             'context': 'c'
         }
         self.assertEqual(expected, my_task.rebind)
-        self.assertEqual(set(['a', 'b', 'c']),
+        self.assertEqual({'a', 'b', 'c'},
                          my_task.requires)
 
     def test_rebind_partial(self):
@@ -185,7 +183,7 @@ class TaskTest(test.TestCase):
             'context': 'context'
         }
         self.assertEqual(expected, my_task.rebind)
-        self.assertEqual(set(['a', 'b', 'context']),
+        self.assertEqual({'a', 'b', 'context'},
                          my_task.requires)
 
     def test_rebind_unknown(self):
@@ -208,7 +206,7 @@ class TaskTest(test.TestCase):
             'eggs': 'c'
         }
         self.assertEqual(expected, my_task.rebind)
-        self.assertEqual(set(['a', 'b', 'c']),
+        self.assertEqual({'a', 'b', 'c'},
                          my_task.requires)
 
     def test_rebind_list_partial(self):
@@ -219,7 +217,7 @@ class TaskTest(test.TestCase):
             'eggs': 'eggs'
         }
         self.assertEqual(expected, my_task.rebind)
-        self.assertEqual(set(['a', 'b', 'eggs']),
+        self.assertEqual({'a', 'b', 'eggs'},
                          my_task.requires)
 
     def test_rebind_list_more(self):
@@ -234,7 +232,7 @@ class TaskTest(test.TestCase):
             'c': 'c'
         }
         self.assertEqual(expected, my_task.rebind)
-        self.assertEqual(set(['a', 'b', 'c']),
+        self.assertEqual({'a', 'b', 'c'},
                          my_task.requires)
 
     def test_rebind_list_bad_value(self):
@@ -243,12 +241,12 @@ class TaskTest(test.TestCase):
 
     def test_default_provides(self):
         my_task = DefaultProvidesTask()
-        self.assertEqual(set(['def']), my_task.provides)
+        self.assertEqual({'def'}, my_task.provides)
         self.assertEqual({'def': None}, my_task.save_as)
 
     def test_default_provides_can_be_overridden(self):
         my_task = DefaultProvidesTask(provides=('spam', 'eggs'))
-        self.assertEqual(set(['spam', 'eggs']), my_task.provides)
+        self.assertEqual({'spam', 'eggs'}, my_task.provides)
         self.assertEqual({'spam': 0, 'eggs': 1}, my_task.save_as)
 
     def test_update_progress_within_bounds(self):
@@ -366,7 +364,7 @@ class TaskTest(test.TestCase):
         my_task = SeparateRevertTask(rebind=('a',), revert_rebind=('b',))
         self.assertEqual({'execute_arg': 'a'}, my_task.rebind)
         self.assertEqual({'revert_arg': 'b'}, my_task.revert_rebind)
-        self.assertEqual(set(['a', 'b']),
+        self.assertEqual({'a', 'b'},
                          my_task.requires)
 
         my_task = SeparateRevertTask(requires='execute_arg',
@@ -374,13 +372,13 @@ class TaskTest(test.TestCase):
 
         self.assertEqual({'execute_arg': 'execute_arg'}, my_task.rebind)
         self.assertEqual({'revert_arg': 'revert_arg'}, my_task.revert_rebind)
-        self.assertEqual(set(['execute_arg', 'revert_arg']),
+        self.assertEqual({'execute_arg', 'revert_arg'},
                          my_task.requires)
 
     def test_separate_revert_optional_args(self):
         my_task = SeparateRevertOptionalTask()
-        self.assertEqual(set(['execute_arg']), my_task.optional)
-        self.assertEqual(set(['revert_arg']), my_task.revert_optional)
+        self.assertEqual({'execute_arg'}, my_task.optional)
+        self.assertEqual({'revert_arg'}, my_task.revert_optional)
 
     def test_revert_kwargs(self):
         my_task = RevertKwargsTask()
@@ -389,7 +387,7 @@ class TaskTest(test.TestCase):
         self.assertEqual(expected_rebind, my_task.rebind)
         expected_rebind = {'execute_arg1': 'execute_arg1'}
         self.assertEqual(expected_rebind, my_task.revert_rebind)
-        self.assertEqual(set(['execute_arg1', 'execute_arg2']),
+        self.assertEqual({'execute_arg1', 'execute_arg2'},
                          my_task.requires)
 
 
diff -pruN 5.12.0-2/taskflow/tests/unit/test_types.py 6.0.2-2/taskflow/tests/unit/test_types.py
--- 5.12.0-2/taskflow/tests/unit/test_types.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_types.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -66,9 +64,9 @@ class GraphTest(test.TestCase):
         g.add_node("b")
         g.add_node("c")
         g.add_edge("b", "c")
-        self.assertEqual(set(['a', 'b']),
+        self.assertEqual({'a', 'b'},
                          set(g.no_predecessors_iter()))
-        self.assertEqual(set(['a', 'c']),
+        self.assertEqual({'a', 'c'},
                          set(g.no_successors_iter()))
 
     def test_directed(self):
@@ -534,8 +532,8 @@ CEO
     def test_dfs_itr(self):
         root = self._make_species()
         things = list([n.item for n in root.dfs_iter(include_self=True)])
-        self.assertEqual(set(['animal', 'reptile', 'mammal', 'horse',
-                              'primate', 'monkey', 'human']), set(things))
+        self.assertEqual({'animal', 'reptile', 'mammal', 'horse',
+                          'primate', 'monkey', 'human'}, set(things))
 
     def test_dfs_itr_left_to_right(self):
         root = self._make_species()
diff -pruN 5.12.0-2/taskflow/tests/unit/test_utils.py 6.0.2-2/taskflow/tests/unit/test_utils.py
--- 5.12.0-2/taskflow/tests/unit/test_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_utils.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -30,7 +28,7 @@ from taskflow.utils import threading_uti
 class CachedPropertyTest(test.TestCase):
     def test_attribute_caching(self):
 
-        class A(object):
+        class A:
             def __init__(self):
                 self.call_counter = 0
 
@@ -46,7 +44,7 @@ class CachedPropertyTest(test.TestCase):
 
     def test_custom_property(self):
 
-        class A(object):
+        class A:
             @misc.cachedproperty('_c')
             def b(self):
                 return 'b'
@@ -60,7 +58,7 @@ class CachedPropertyTest(test.TestCase):
         def try_del(a):
             del a.b
 
-        class A(object):
+        class A:
             @misc.cachedproperty
             def b(self):
                 return 'b'
@@ -75,7 +73,7 @@ class CachedPropertyTest(test.TestCase):
         def try_set(a):
             a.b = 'c'
 
-        class A(object):
+        class A:
             @misc.cachedproperty
             def b(self):
                 return 'b'
@@ -87,7 +85,7 @@ class CachedPropertyTest(test.TestCase):
 
     def test_documented_property(self):
 
-        class A(object):
+        class A:
             @misc.cachedproperty
             def b(self):
                 """I like bees."""
@@ -97,7 +95,7 @@ class CachedPropertyTest(test.TestCase):
 
     def test_undocumented_property(self):
 
-        class A(object):
+        class A:
             @misc.cachedproperty
             def b(self):
                 return 'b'
@@ -107,7 +105,7 @@ class CachedPropertyTest(test.TestCase):
     def test_threaded_access_property(self):
         called = collections.deque()
 
-        class A(object):
+        class A:
             @misc.cachedproperty
             def b(self):
                 called.append(1)
@@ -311,7 +309,7 @@ class TestClamping(test.TestCase):
 class TestIterable(test.TestCase):
     def test_string_types(self):
         self.assertFalse(misc.is_iterable('string'))
-        self.assertFalse(misc.is_iterable(u'string'))
+        self.assertFalse(misc.is_iterable('string'))
 
     def test_list(self):
         self.assertTrue(misc.is_iterable(list()))
@@ -347,7 +345,7 @@ class TestSafeCopyDictRaises(testscenari
     scenarios = [
         ('list', {'original': [1, 2], 'exception': TypeError}),
         ('tuple', {'original': (1, 2), 'exception': TypeError}),
-        ('set', {'original': set([1, 2]), 'exception': TypeError}),
+        ('set', {'original': {1, 2}, 'exception': TypeError}),
     ]
 
     def test_exceptions(self):
diff -pruN 5.12.0-2/taskflow/tests/unit/test_utils_async_utils.py 6.0.2-2/taskflow/tests/unit/test_utils_async_utils.py
--- 5.12.0-2/taskflow/tests/unit/test_utils_async_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_utils_async_utils.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/test_utils_binary.py 6.0.2-2/taskflow/tests/unit/test_utils_binary.py
--- 5.12.0-2/taskflow/tests/unit/test_utils_binary.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_utils_binary.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -38,15 +36,15 @@ class BinaryEncodeTest(test.TestCase):
         self._check(data, data)
 
     def test_simple_text(self):
-        self._check(u'hello', _bytes('hello'))
+        self._check('hello', _bytes('hello'))
 
     def test_unicode_text(self):
-        self._check(u'привет', _bytes('привет'))
+        self._check('привет', _bytes('привет'))
 
     def test_unicode_other_encoding(self):
-        result = misc.binary_encode(u'mañana', 'latin-1')
+        result = misc.binary_encode('mañana', 'latin-1')
         self.assertIsInstance(result, bytes)
-        self.assertEqual(u'mañana'.encode('latin-1'), result)
+        self.assertEqual('mañana'.encode('latin-1'), result)
 
 
 class BinaryDecodeTest(test.TestCase):
@@ -57,24 +55,24 @@ class BinaryDecodeTest(test.TestCase):
         self.assertEqual(expected_result, result)
 
     def test_simple_text(self):
-        data = u'hello'
+        data = 'hello'
         self._check(data, data)
 
     def test_unicode_text(self):
-        data = u'привет'
+        data = 'привет'
         self._check(data, data)
 
     def test_simple_binary(self):
-        self._check(_bytes('hello'), u'hello')
+        self._check(_bytes('hello'), 'hello')
 
     def test_unicode_binary(self):
-        self._check(_bytes('привет'), u'привет')
+        self._check(_bytes('привет'), 'привет')
 
     def test_unicode_other_encoding(self):
-        data = u'mañana'.encode('latin-1')
+        data = 'mañana'.encode('latin-1')
         result = misc.binary_decode(data, 'latin-1')
         self.assertIsInstance(result, str)
-        self.assertEqual(u'mañana', result)
+        self.assertEqual('mañana', result)
 
 
 class DecodeJsonTest(test.TestCase):
@@ -85,11 +83,11 @@ class DecodeJsonTest(test.TestCase):
 
     def test_it_works_with_unicode(self):
         data = _bytes('{"foo": "фуу"}')
-        self.assertEqual({"foo": u'фуу'}, misc.decode_json(data))
+        self.assertEqual({"foo": 'фуу'}, misc.decode_json(data))
 
     def test_handles_invalid_unicode(self):
         self.assertRaises(ValueError, misc.decode_json,
-                          '{"\xf1": 1}'.encode('latin-1'))
+                          b'{"\xf1": 1}')
 
     def test_handles_bad_json(self):
         self.assertRaises(ValueError, misc.decode_json,
diff -pruN 5.12.0-2/taskflow/tests/unit/test_utils_iter_utils.py 6.0.2-2/taskflow/tests/unit/test_utils_iter_utils.py
--- 5.12.0-2/taskflow/tests/unit/test_utils_iter_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_utils_iter_utils.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -140,7 +138,7 @@ class IterUtilsTest(test.TestCase):
         self.assertRaises(ValueError, iter_utils.while_is_not, 2, 'a')
 
     def test_while_is_not(self):
-        class Dummy(object):
+        class Dummy:
             def __init__(self, char):
                 self.char = char
         dummy_list = [Dummy(a)
diff -pruN 5.12.0-2/taskflow/tests/unit/test_utils_kazoo_utils.py 6.0.2-2/taskflow/tests/unit/test_utils_kazoo_utils.py
--- 5.12.0-2/taskflow/tests/unit/test_utils_kazoo_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_utils_kazoo_utils.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) Red Hat
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/test_utils_threading_utils.py 6.0.2-2/taskflow/tests/unit/test_utils_threading_utils.py
--- 5.12.0-2/taskflow/tests/unit/test_utils_threading_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/test_utils_threading_utils.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -53,7 +51,7 @@ class TestThreadBundle(test.TestCase):
     thread_count = 5
 
     def setUp(self):
-        super(TestThreadBundle, self).setUp()
+        super().setUp()
         self.bundle = tu.ThreadBundle()
         self.death = threading.Event()
         self.addCleanup(self.bundle.stop)
diff -pruN 5.12.0-2/taskflow/tests/unit/worker_based/test_creation.py 6.0.2-2/taskflow/tests/unit/worker_based/test_creation.py
--- 5.12.0-2/taskflow/tests/unit/worker_based/test_creation.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/worker_based/test_creation.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/worker_based/test_dispatcher.py 6.0.2-2/taskflow/tests/unit/worker_based/test_dispatcher.py
--- 5.12.0-2/taskflow/tests/unit/worker_based/test_dispatcher.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/worker_based/test_dispatcher.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/worker_based/test_endpoint.py 6.0.2-2/taskflow/tests/unit/worker_based/test_endpoint.py
--- 5.12.0-2/taskflow/tests/unit/worker_based/test_endpoint.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/worker_based/test_endpoint.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -25,7 +23,7 @@ from taskflow.tests import utils
 class Task(task.Task):
 
     def __init__(self, a, *args, **kwargs):
-        super(Task, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
 
     def execute(self, *args, **kwargs):
         pass
@@ -34,7 +32,7 @@ class Task(task.Task):
 class TestEndpoint(test.TestCase):
 
     def setUp(self):
-        super(TestEndpoint, self).setUp()
+        super().setUp()
         self.task_cls = utils.TaskOneReturn
         self.task_uuid = 'task-uuid'
         self.task_args = {'context': 'context'}
diff -pruN 5.12.0-2/taskflow/tests/unit/worker_based/test_executor.py 6.0.2-2/taskflow/tests/unit/worker_based/test_executor.py
--- 5.12.0-2/taskflow/tests/unit/worker_based/test_executor.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/worker_based/test_executor.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -29,7 +27,7 @@ from taskflow.types import failure
 class TestWorkerTaskExecutor(test.MockTestCase):
 
     def setUp(self):
-        super(TestWorkerTaskExecutor, self).setUp()
+        super().setUp()
         self.task = test_utils.DummyTask()
         self.task_uuid = 'task-uuid'
         self.task_args = {'a': 'a'}
diff -pruN 5.12.0-2/taskflow/tests/unit/worker_based/test_message_pump.py 6.0.2-2/taskflow/tests/unit/worker_based/test_message_pump.py
--- 5.12.0-2/taskflow/tests/unit/worker_based/test_message_pump.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/worker_based/test_message_pump.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/worker_based/test_pipeline.py 6.0.2-2/taskflow/tests/unit/worker_based/test_pipeline.py
--- 5.12.0-2/taskflow/tests/unit/worker_based/test_pipeline.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/worker_based/test_pipeline.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/worker_based/test_protocol.py 6.0.2-2/taskflow/tests/unit/worker_based/test_protocol.py
--- 5.12.0-2/taskflow/tests/unit/worker_based/test_protocol.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/worker_based/test_protocol.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -25,7 +23,7 @@ from taskflow.tests import utils
 from taskflow.types import failure
 
 
-class Unserializable(object):
+class Unserializable:
     pass
 
 
@@ -98,7 +96,7 @@ class TestProtocolValidation(test.TestCa
 class TestProtocol(test.TestCase):
 
     def setUp(self):
-        super(TestProtocol, self).setUp()
+        super().setUp()
         self.task = utils.DummyTask()
         self.task_uuid = 'task-uuid'
         self.task_action = 'execute'
diff -pruN 5.12.0-2/taskflow/tests/unit/worker_based/test_proxy.py 6.0.2-2/taskflow/tests/unit/worker_based/test_proxy.py
--- 5.12.0-2/taskflow/tests/unit/worker_based/test_proxy.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/worker_based/test_proxy.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -25,7 +23,7 @@ from taskflow.utils import threading_uti
 class TestProxy(test.MockTestCase):
 
     def setUp(self):
-        super(TestProxy, self).setUp()
+        super().setUp()
         self.topic = 'test-topic'
         self.broker_url = 'test-url'
         self.exchange = 'test-exchange'
@@ -72,7 +70,7 @@ class TestProxy(test.MockTestCase):
         self.resetMasterMock()
 
     def _queue_name(self, topic):
-        return "%s_%s" % (self.exchange, topic)
+        return "{}_{}".format(self.exchange, topic)
 
     def proxy_start_calls(self, calls, exc_type=mock.ANY):
         return [
diff -pruN 5.12.0-2/taskflow/tests/unit/worker_based/test_server.py 6.0.2-2/taskflow/tests/unit/worker_based/test_server.py
--- 5.12.0-2/taskflow/tests/unit/worker_based/test_server.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/worker_based/test_server.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -27,7 +25,7 @@ from taskflow.types import failure
 class TestServer(test.MockTestCase):
 
     def setUp(self):
-        super(TestServer, self).setUp()
+        super().setUp()
         self.server_topic = 'server-topic'
         self.server_exchange = 'server-exchange'
         self.broker_url = 'test-url'
@@ -140,8 +138,8 @@ class TestServer(test.MockTestCase):
         self.assertEqual(
             (self.task.name, self.task.name, 'revert',
              dict(arguments=self.task_args,
-                  failures=dict((i, utils.FailureMatcher(f))
-                                for i, f in failures.items()))),
+                  failures={i: utils.FailureMatcher(f)
+                            for i, f in failures.items()})),
             (task_cls, task_name, action, task_args))
 
     @mock.patch("taskflow.engines.worker_based.server.LOG.critical")
diff -pruN 5.12.0-2/taskflow/tests/unit/worker_based/test_types.py 6.0.2-2/taskflow/tests/unit/worker_based/test_types.py
--- 5.12.0-2/taskflow/tests/unit/worker_based/test_types.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/worker_based/test_types.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/worker_based/test_worker.py 6.0.2-2/taskflow/tests/unit/worker_based/test_worker.py
--- 5.12.0-2/taskflow/tests/unit/worker_based/test_worker.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/unit/worker_based/test_worker.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -27,7 +25,7 @@ from taskflow.tests import utils
 class TestWorker(test.MockTestCase):
 
     def setUp(self):
-        super(TestWorker, self).setUp()
+        super().setUp()
         self.task_cls = utils.DummyTask
         self.task_name = reflection.get_class_name(self.task_cls)
         self.broker_url = 'test-url'
diff -pruN 5.12.0-2/taskflow/tests/utils.py 6.0.2-2/taskflow/tests/utils.py
--- 5.12.0-2/taskflow/tests/utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/tests/utils.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -151,7 +149,7 @@ class GiveBackRevert(task.Task):
             return result + 1
 
 
-class FakeTask(object):
+class FakeTask:
 
     def execute(self, **kwargs):
         pass
@@ -169,16 +167,14 @@ RUNTIME_ERROR_CLASSES = ['RuntimeError',
 
 class ProvidesRequiresTask(task.Task):
     def __init__(self, name, provides, requires, return_tuple=True):
-        super(ProvidesRequiresTask, self).__init__(name=name,
-                                                   provides=provides,
-                                                   requires=requires)
+        super().__init__(name=name, provides=provides, requires=requires)
         self.return_tuple = isinstance(provides, (tuple, list))
 
     def execute(self, *args, **kwargs):
         if self.return_tuple:
             return tuple(range(len(self.provides)))
         else:
-            return dict((k, k) for k in self.provides)
+            return {k: k for k in self.provides}
 
 
 # Used to format the captured values into strings (which are easier to
@@ -197,7 +193,7 @@ class CaptureListener(capturing.CaptureL
         name_postfix, name_key = LOOKUP_NAME_POSTFIX[kind]
         name = details[name_key] + name_postfix
         if 'result' in details:
-            name += ' %s(%s)' % (state, details['result'])
+            name += ' {}({})'.format(state, details['result'])
         else:
             name += " %s" % state
         return name
@@ -387,9 +383,9 @@ class SleepTask(task.Task):
         time.sleep(duration)
 
 
-class EngineTestBase(object):
+class EngineTestBase:
     def setUp(self):
-        super(EngineTestBase, self).setUp()
+        super().setUp()
         self.backend = impl_memory.MemoryBackend(conf={})
 
     def tearDown(self):
@@ -397,7 +393,7 @@ class EngineTestBase(object):
         with contextlib.closing(self.backend) as be:
             with contextlib.closing(be.get_connection()) as conn:
                 conn.clear_all()
-        super(EngineTestBase, self).tearDown()
+        super().tearDown()
 
     def _make_engine(self, flow, **kwargs):
         raise exceptions.NotImplementedError("_make_engine() must be"
@@ -405,7 +401,7 @@ class EngineTestBase(object):
                                              " desired")
 
 
-class FailureMatcher(object):
+class FailureMatcher:
     """Needed for failure objects comparison."""
 
     def __init__(self, failure):
@@ -433,7 +429,7 @@ class OneReturnRetry(retry.AlwaysRevert)
 class ConditionalTask(ProgressingTask):
 
     def execute(self, x, y):
-        super(ConditionalTask, self).execute()
+        super().execute()
         if x != y:
             raise RuntimeError('Woot!')
 
@@ -441,7 +437,7 @@ class ConditionalTask(ProgressingTask):
 class WaitForOneFromTask(ProgressingTask):
 
     def __init__(self, name, wait_for, wait_states, **kwargs):
-        super(WaitForOneFromTask, self).__init__(name, **kwargs)
+        super().__init__(name, **kwargs)
         if isinstance(wait_for, str):
             self.wait_for = [wait_for]
         else:
@@ -458,7 +454,7 @@ class WaitForOneFromTask(ProgressingTask
                                'for %s to change state to %s'
                                % (WAIT_TIMEOUT, self.wait_for,
                                   self.wait_states))
-        return super(WaitForOneFromTask, self).execute()
+        return super().execute()
 
     def callback(self, state, details):
         name = details.get('task_name', None)
diff -pruN 5.12.0-2/taskflow/types/entity.py 6.0.2-2/taskflow/types/entity.py
--- 5.12.0-2/taskflow/types/entity.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/types/entity.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Rackspace Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -15,7 +13,7 @@
 #    under the License.
 
 
-class Entity(object):
+class Entity:
     """Entity object that identifies some resource/item/other.
 
     :ivar kind: **immutable** type/kind that identifies this
diff -pruN 5.12.0-2/taskflow/types/failure.py 6.0.2-2/taskflow/types/failure.py
--- 5.12.0-2/taskflow/types/failure.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/types/failure.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -433,8 +431,8 @@ class Failure():
         if not self._exc_type_names:
             buf.write('Failure: %s' % (self._exception_str))
         else:
-            buf.write('Failure: %s: %s' % (self._exc_type_names[0],
-                                           self._exception_str))
+            buf.write('Failure: {}: {}'.format(self._exc_type_names[0],
+                                               self._exception_str))
         if traceback:
             if self._traceback_str is not None:
                 traceback_str = self._traceback_str.rstrip()
@@ -452,8 +450,7 @@ class Failure():
 
     def __iter__(self):
         """Iterate over exception type names."""
-        for et in self._exc_type_names:
-            yield et
+        yield from self._exc_type_names
 
     def __getstate__(self):
         dct = self.to_dict()
diff -pruN 5.12.0-2/taskflow/types/graph.py 6.0.2-2/taskflow/types/graph.py
--- 5.12.0-2/taskflow/types/graph.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/types/graph.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -31,15 +29,15 @@ def _common_format(g, edge_notation):
     lines.append("Nodes: %s" % g.number_of_nodes())
     for n, n_data in g.nodes(data=True):
         if n_data:
-            lines.append("  - %s (%s)" % (n, n_data))
+            lines.append("  - {} ({})".format(n, n_data))
         else:
             lines.append("  - %s" % n)
     lines.append("Edges: %s" % g.number_of_edges())
     for (u, v, e_data) in g.edges(data=True):
         if e_data:
-            lines.append("  %s %s %s (%s)" % (u, edge_notation, v, e_data))
+            lines.append("  {} {} {} ({})".format(u, edge_notation, v, e_data))
         else:
-            lines.append("  %s %s %s" % (u, edge_notation, v))
+            lines.append("  {} {} {}".format(u, edge_notation, v))
     return lines
 
 
@@ -47,8 +45,7 @@ class Graph(nx.Graph):
     """A graph subclass with useful utility functions."""
 
     def __init__(self, incoming_graph_data=None, name=''):
-        super(Graph, self).__init__(incoming_graph_data=incoming_graph_data,
-                                    name=name)
+        super().__init__(incoming_graph_data=incoming_graph_data, name=name)
         self.frozen = False
 
     def freeze(self):
@@ -68,14 +65,14 @@ class Graph(nx.Graph):
     def add_edge(self, u, v, attr_dict=None, **attr):
         """Add an edge between u and v."""
         if attr_dict is not None:
-            return super(Graph, self).add_edge(u, v, **attr_dict)
-        return super(Graph, self).add_edge(u, v, **attr)
+            return super().add_edge(u, v, **attr_dict)
+        return super().add_edge(u, v, **attr)
 
     def add_node(self, n, attr_dict=None, **attr):
         """Add a single node n and update node attributes."""
         if attr_dict is not None:
-            return super(Graph, self).add_node(n, **attr_dict)
-        return super(Graph, self).add_node(n, **attr)
+            return super().add_node(n, **attr_dict)
+        return super().add_node(n, **attr)
 
     def fresh_copy(self):
         """Return a fresh copy graph with the same data structure.
@@ -91,8 +88,7 @@ class DiGraph(nx.DiGraph):
     """A directed graph subclass with useful utility functions."""
 
     def __init__(self, incoming_graph_data=None, name=''):
-        super(DiGraph, self).__init__(incoming_graph_data=incoming_graph_data,
-                                      name=name)
+        super().__init__(incoming_graph_data=incoming_graph_data, name=name)
         self.frozen = False
 
     def freeze(self):
@@ -165,7 +161,7 @@ class DiGraph(nx.DiGraph):
         NOTE(harlowja): predecessor cycles (if they exist) will not be iterated
         over more than once (this prevents infinite iteration).
         """
-        visited = set([n])
+        visited = {n}
         queue = collections.deque(self.predecessors(n))
         while queue:
             pred = queue.popleft()
@@ -179,14 +175,14 @@ class DiGraph(nx.DiGraph):
     def add_edge(self, u, v, attr_dict=None, **attr):
         """Add an edge between u and v."""
         if attr_dict is not None:
-            return super(DiGraph, self).add_edge(u, v, **attr_dict)
-        return super(DiGraph, self).add_edge(u, v, **attr)
+            return super().add_edge(u, v, **attr_dict)
+        return super().add_edge(u, v, **attr)
 
     def add_node(self, n, attr_dict=None, **attr):
         """Add a single node n and update node attributes."""
         if attr_dict is not None:
-            return super(DiGraph, self).add_node(n, **attr_dict)
-        return super(DiGraph, self).add_node(n, **attr)
+            return super().add_node(n, **attr_dict)
+        return super().add_node(n, **attr)
 
     def fresh_copy(self):
         """Return a fresh copy graph with the same data structure.
diff -pruN 5.12.0-2/taskflow/types/latch.py 6.0.2-2/taskflow/types/latch.py
--- 5.12.0-2/taskflow/types/latch.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/types/latch.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -19,7 +17,7 @@ import threading
 from oslo_utils import timeutils
 
 
-class Latch(object):
+class Latch:
     """A class that ensures N-arrivals occur before unblocking.
 
     TODO(harlowja): replace with http://bugs.python.org/issue8777 when we no
diff -pruN 5.12.0-2/taskflow/types/notifier.py 6.0.2-2/taskflow/types/notifier.py
--- 5.12.0-2/taskflow/types/notifier.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/types/notifier.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -24,7 +22,7 @@ from oslo_utils import reflection
 LOG = logging.getLogger(__name__)
 
 
-class Listener(object):
+class Listener:
     """Immutable helper that represents a notification listener/target."""
 
     def __init__(self, callback, args=None, kwargs=None, details_filter=None):
@@ -89,7 +87,7 @@ class Listener(object):
         self._callback(event_type, *self._args, **kwargs)
 
     def __repr__(self):
-        repr_msg = "%s object at 0x%x calling into '%r'" % (
+        repr_msg = "{} object at 0x{:x} calling into '{!r}'".format(
             reflection.get_class_name(self, fully_qualified=False),
             id(self), self._callback)
         if self._details_filter is not None:
@@ -126,7 +124,7 @@ class Listener(object):
         return not self.__eq__(other)
 
 
-class Notifier(object):
+class Notifier:
     """A notification (`pub/sub`_ *like*) helper class.
 
     It is intended to be used to subscribe to notifications of events
@@ -151,7 +149,7 @@ class Notifier(object):
     ANY = '*'
 
     #: Events which can *not* be used to trigger notifications
-    _DISALLOWED_NOTIFICATION_EVENTS = set([ANY])
+    _DISALLOWED_NOTIFICATION_EVENTS = {ANY}
 
     def __init__(self):
         self._topics = collections.defaultdict(list)
@@ -321,7 +319,7 @@ class RestrictedNotifier(Notifier):
     """
 
     def __init__(self, watchable_events, allow_any=True):
-        super(RestrictedNotifier, self).__init__()
+        super().__init__()
         self._watchable_events = frozenset(watchable_events)
         self._allow_any = allow_any
 
@@ -332,8 +330,7 @@ class RestrictedNotifier(Notifier):
         meta-type is not a specific event but is a capture-all that does not
         imply the same meaning as specific event types.
         """
-        for event_type in self._watchable_events:
-            yield event_type
+        yield from self._watchable_events
 
     def can_be_registered(self, event_type):
         """Checks if the event can be registered/subscribed to.
diff -pruN 5.12.0-2/taskflow/types/sets.py 6.0.2-2/taskflow/types/sets.py
--- 5.12.0-2/taskflow/types/sets.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/types/sets.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -57,8 +55,7 @@ class OrderedSet(abc.Set, abc.Hashable):
         return len(self._data)
 
     def __iter__(self):
-        for value in self._data.keys():
-            yield value
+        yield from self._data.keys()
 
     def __setstate__(self, items):
         self.__init__(iterable=iter(items))
@@ -67,7 +64,7 @@ class OrderedSet(abc.Set, abc.Hashable):
         return tuple(self)
 
     def __repr__(self):
-        return "%s(%s)" % (type(self).__name__, list(self))
+        return "{}({})".format(type(self).__name__, list(self))
 
     def copy(self):
         """Return a shallow copy of a set."""
diff -pruN 5.12.0-2/taskflow/types/timing.py 6.0.2-2/taskflow/types/timing.py
--- 5.12.0-2/taskflow/types/timing.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/types/timing.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -17,7 +15,7 @@
 import threading
 
 
-class Timeout(object):
+class Timeout:
     """An object which represents a timeout.
 
     This object has the ability to be interrupted before the actual timeout
diff -pruN 5.12.0-2/taskflow/types/tree.py 6.0.2-2/taskflow/types/tree.py
--- 5.12.0-2/taskflow/types/tree.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/types/tree.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -28,10 +26,10 @@ class FrozenNode(Exception):
     """Exception raised when a frozen node is modified."""
 
     def __init__(self):
-        super(FrozenNode, self).__init__("Frozen node(s) can't be modified")
+        super().__init__("Frozen node(s) can't be modified")
 
 
-class _DFSIter(object):
+class _DFSIter:
     """Depth first iterator (non-recursive) over the child nodes."""
 
     def __init__(self, root, include_self=False, right_to_left=True):
@@ -60,7 +58,7 @@ class _DFSIter(object):
                 stack.extend(iter(node))
 
 
-class _BFSIter(object):
+class _BFSIter:
     """Breadth first iterator (non-recursive) over the child nodes."""
 
     def __init__(self, root, include_self=False, right_to_left=False):
@@ -89,7 +87,7 @@ class _BFSIter(object):
                 q.extend(node.reverse_iter())
 
 
-class Node(object):
+class Node:
     """A n-ary node class that can be used to create tree structures."""
 
     #: Default string prefix used in :py:meth:`.pformat`.
@@ -358,13 +356,11 @@ class Node(object):
 
     def __iter__(self):
         """Iterates over the direct children of this node (right->left)."""
-        for c in self._children:
-            yield c
+        yield from self._children
 
     def reverse_iter(self):
         """Iterates over the direct children of this node (left->right)."""
-        for c in reversed(self._children):
-            yield c
+        yield from reversed(self._children)
 
     def index(self, item):
         """Finds the child index of a given item, searches in added order."""
diff -pruN 5.12.0-2/taskflow/utils/async_utils.py 6.0.2-2/taskflow/utils/async_utils.py
--- 5.12.0-2/taskflow/utils/async_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/utils/async_utils.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/utils/banner.py 6.0.2-2/taskflow/utils/banner.py
--- 5.12.0-2/taskflow/utils/banner.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/utils/banner.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2016 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -74,12 +72,13 @@ def make_banner(what, chapters):
             section_names = sorted(chapter_contents.keys())
             for j, section_name in enumerate(section_names):
                 if j + 1 < len(section_names):
-                    buf.write_nl("  %s => %s"
-                                 % (section_name,
-                                    chapter_contents[section_name]))
+                    buf.write_nl("  {} => {}".format(
+                        section_name,
+                        chapter_contents[section_name]))
                 else:
-                    buf.write("  %s => %s" % (section_name,
-                                              chapter_contents[section_name]))
+                    buf.write("  {} => {}".format(
+                        section_name,
+                        chapter_contents[section_name]))
         elif isinstance(chapter_contents, (list, tuple, set)):
             if isinstance(chapter_contents, set):
                 sections = sorted(chapter_contents)
@@ -87,9 +86,9 @@ def make_banner(what, chapters):
                 sections = chapter_contents
             for j, section in enumerate(sections):
                 if j + 1 < len(sections):
-                    buf.write_nl("  %s. %s" % (j + 1, section))
+                    buf.write_nl("  {}. {}".format(j + 1, section))
                 else:
-                    buf.write("  %s. %s" % (j + 1, section))
+                    buf.write("  {}. {}".format(j + 1, section))
         else:
             raise TypeError("Unsupported chapter contents"
                             " type: one of dict, list, tuple, set expected"
diff -pruN 5.12.0-2/taskflow/utils/eventlet_utils.py 6.0.2-2/taskflow/utils/eventlet_utils.py
--- 5.12.0-2/taskflow/utils/eventlet_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/utils/eventlet_utils.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/utils/iter_utils.py 6.0.2-2/taskflow/utils/iter_utils.py
--- 5.12.0-2/taskflow/utils/iter_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/utils/iter_utils.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/utils/kazoo_utils.py 6.0.2-2/taskflow/utils/kazoo_utils.py
--- 5.12.0-2/taskflow/utils/kazoo_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/utils/kazoo_utils.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -41,7 +39,7 @@ def _parse_hosts(hosts):
     if isinstance(hosts, (dict)):
         host_ports = []
         for (k, v) in hosts.items():
-            host_ports.append("%s:%s" % (k, v))
+            host_ports.append("{}:{}".format(k, v))
         hosts = host_ports
     if isinstance(hosts, (list, set, tuple)):
         return ",".join([str(h) for h in hosts])
@@ -65,7 +63,7 @@ def prettify_failures(failures, limit=-1
             pass
         pretty_op += "(%s)" % (", ".join(selected_attrs))
         pretty_cause = reflection.get_class_name(r, fully_qualified=False)
-        prettier.append("%s@%s" % (pretty_cause, pretty_op))
+        prettier.append("{}@{}".format(pretty_cause, pretty_op))
     if limit <= 0 or len(prettier) <= limit:
         return ", ".join(prettier)
     else:
@@ -78,7 +76,7 @@ class KazooTransactionException(k_exc.Ka
     """Exception raised when a checked commit fails."""
 
     def __init__(self, message, failures):
-        super(KazooTransactionException, self).__init__(message)
+        super().__init__(message)
         self._failures = tuple(failures)
 
     @property
@@ -134,8 +132,8 @@ def check_compatible(client, min_version
     """
     server_version = None
     if min_version:
-        server_version = tuple((int(a) for a in client.server_version()))
-        min_version = tuple((int(a) for a in min_version))
+        server_version = tuple(int(a) for a in client.server_version())
+        min_version = tuple(int(a) for a in min_version)
         if server_version < min_version:
             pretty_server_version = ".".join([str(a) for a in server_version])
             min_version = ".".join([str(a) for a in min_version])
@@ -145,8 +143,8 @@ def check_compatible(client, min_version
                                                          min_version))
     if max_version:
         if server_version is None:
-            server_version = tuple((int(a) for a in client.server_version()))
-        max_version = tuple((int(a) for a in max_version))
+            server_version = tuple(int(a) for a in client.server_version())
+        max_version = tuple(int(a) for a in max_version)
         if server_version > max_version:
             pretty_server_version = ".".join([str(a) for a in server_version])
             max_version = ".".join([str(a) for a in max_version])
diff -pruN 5.12.0-2/taskflow/utils/kombu_utils.py 6.0.2-2/taskflow/utils/kombu_utils.py
--- 5.12.0-2/taskflow/utils/kombu_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/utils/kombu_utils.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -22,7 +20,7 @@ _MSG_PROPERTIES = tuple([
 ])
 
 
-class DelayedPretty(object):
+class DelayedPretty:
     """Wraps a message and delays prettifying it until requested.
 
     TODO(harlowja): remove this when https://github.com/celery/kombu/pull/454/
@@ -70,7 +68,7 @@ def _prettify_message(message):
                 properties[segments[-1]] = value
     if message.body is not None:
         properties['body_length'] = len(message.body)
-    return "%(delivery_tag)s: %(properties)s" % {
-        'delivery_tag': message.delivery_tag,
-        'properties': properties,
-    }
+    return "{delivery_tag}: {properties}".format(
+        delivery_tag=message.delivery_tag,
+        properties=properties,
+    )
diff -pruN 5.12.0-2/taskflow/utils/misc.py 6.0.2-2/taskflow/utils/misc.py
--- 5.12.0-2/taskflow/utils/misc.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/utils/misc.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #    Copyright (C) 2013 Rackspace Hosting All Rights Reserved.
 #
@@ -55,7 +53,7 @@ class StrEnum(str, enum.Enum):
             if not isinstance(a, str):
                 raise TypeError("Enumeration '%s' (%s) is not"
                                 " a string" % (a, type(a).__name__))
-        return super(StrEnum, cls).__new__(cls, *args, **kwargs)
+        return super().__new__(cls, *args, **kwargs)
 
 
 class StringIO(io.StringIO):
@@ -82,7 +80,7 @@ def get_hostname(unknown_hostname=UNKNOW
             return unknown_hostname
         else:
             return hostname
-    except socket.error:
+    except OSError:
         return unknown_hostname
 
 
@@ -189,7 +187,7 @@ def find_subclasses(locations, base_cls,
             except ValueError:
                 module = importutils.import_module(item)
             else:
-                obj = importutils.import_class('%s.%s' % (pkg, cls))
+                obj = importutils.import_class('{}.{}'.format(pkg, cls))
                 if not reflection.is_subclass(obj, base_cls):
                     raise TypeError("Object '%s' (%s) is not a '%s' subclass"
                                     % (item, type(item), base_cls))
@@ -343,7 +341,7 @@ def decode_json(raw_data, root_types=(di
         return _check_decoded_type(data, root_types=root_types)
 
 
-class cachedproperty(object):
+class cachedproperty:
     """A *thread-safe* descriptor property that is only evaluated once.
 
     This caching descriptor can be placed on instance methods to translate
diff -pruN 5.12.0-2/taskflow/utils/persistence_utils.py 6.0.2-2/taskflow/utils/persistence_utils.py
--- 5.12.0-2/taskflow/utils/persistence_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/utils/persistence_utils.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/utils/redis_utils.py 6.0.2-2/taskflow/utils/redis_utils.py
--- 5.12.0-2/taskflow/utils/redis_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/utils/redis_utils.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -41,7 +39,7 @@ class RedisClient(redis.Redis):
     """
 
     def __init__(self, *args, **kwargs):
-        super(RedisClient, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
         self.closed = False
 
     def close(self):
@@ -71,7 +69,7 @@ class UnknownExpire(enum.IntEnum):
 DOES_NOT_EXPIRE = UnknownExpire.DOES_NOT_EXPIRE
 KEY_NOT_FOUND = UnknownExpire.KEY_NOT_FOUND
 
-_UNKNOWN_EXPIRE_MAPPING = dict((e.value, e) for e in list(UnknownExpire))
+_UNKNOWN_EXPIRE_MAPPING = {e.value: e for e in list(UnknownExpire)}
 
 
 def get_expiry(client, key, prior_version=None):
diff -pruN 5.12.0-2/taskflow/utils/schema_utils.py 6.0.2-2/taskflow/utils/schema_utils.py
--- 5.12.0-2/taskflow/utils/schema_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/utils/schema_utils.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/utils/threading_utils.py 6.0.2-2/taskflow/utils/threading_utils.py
--- 5.12.0-2/taskflow/utils/threading_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/utils/threading_utils.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -73,7 +71,7 @@ def no_op(*args, **kwargs):
     """Function that does nothing."""
 
 
-class ThreadBundle(object):
+class ThreadBundle:
     """A group/bundle of threads that start/stop together."""
 
     def __init__(self):
diff -pruN 5.12.0-2/taskflow/version.py 6.0.2-2/taskflow/version.py
--- 5.12.0-2/taskflow/version.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/taskflow/version.py	2025-08-14 03:01:40.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -28,4 +26,4 @@ def version_string_with_package():
     if TASK_PACKAGE is None:
         return version_string()
     else:
-        return "%s-%s" % (version_string(), TASK_PACKAGE)
+        return "{}-{}".format(version_string(), TASK_PACKAGE)
diff -pruN 5.12.0-2/tools/speed_test.py 6.0.2-2/tools/speed_test.py
--- 5.12.0-2/tools/speed_test.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/tools/speed_test.py	2025-08-14 03:01:40.000000000 +0000
@@ -36,7 +36,7 @@ def print_header(name):
         print(header_footer)
 
 
-class ProfileIt(object):
+class ProfileIt:
     stats_ordering = ('cumulative', 'calls',)
 
     def __init__(self, name, args):
@@ -65,7 +65,7 @@ class ProfileIt(object):
             print("")
 
 
-class TimeIt(object):
+class TimeIt:
     def __init__(self, name, args):
         self.watch = timeutils.StopWatch()
         self.name = name
diff -pruN 5.12.0-2/tools/state_graph.py 6.0.2-2/tools/state_graph.py
--- 5.12.0-2/tools/state_graph.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/tools/state_graph.py	2025-08-14 03:01:40.000000000 +0000
@@ -34,7 +34,7 @@ from taskflow import states
 
 # This is just needed to get at the machine object (we will not
 # actually be running it...).
-class DummyRuntime(object):
+class DummyRuntime:
     def __init__(self):
         self.analyzer = mock.MagicMock()
         self.completer = mock.MagicMock()
@@ -185,7 +185,7 @@ def main():
     print(g.to_string().strip())
 
     g.write(options.filename, format=options.format)
-    print("Created %s at '%s'" % (options.format, options.filename))
+    print("Created {} at '{}'".format(options.format, options.filename))
 
     # To make the svg more pretty use the following:
     # $ xsltproc ../diagram-tools/notugly.xsl ./states.svg > pretty-states.svg
diff -pruN 5.12.0-2/tools/subunit_trace.py 6.0.2-2/tools/subunit_trace.py
--- 5.12.0-2/tools/subunit_trace.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/tools/subunit_trace.py	2025-08-14 03:01:40.000000000 +0000
@@ -36,7 +36,7 @@ RESULTS = {}
 class Starts(testtools.StreamResult):
 
     def __init__(self, output):
-        super(Starts, self).__init__()
+        super().__init__()
         self._output = output
 
     def startTestRun(self):
@@ -46,7 +46,7 @@ class Starts(testtools.StreamResult):
     def status(self, test_id=None, test_status=None, test_tags=None,
                runnable=True, file_name=None, file_bytes=None, eof=False,
                mime_type=None, route_code=None, timestamp=None):
-        super(Starts, self).status(
+        super().status(
             test_id, test_status,
             test_tags=test_tags, runnable=runnable, file_name=file_name,
             file_bytes=file_bytes, eof=eof, mime_type=mime_type,
@@ -146,7 +146,7 @@ def print_attachments(stream, test, all_
             detail.content_type.type = 'text'
         if (all_channels or name in channels) and detail.as_text():
             title = "Captured %s:" % name
-            stream.write("\n%s\n%s\n" % (title, ('~' * len(title))))
+            stream.write("\n{}\n{}\n".format(title, ('~' * len(title))))
             # indent attachment lines 4 spaces to make them visually
             # offset
             for line in detail.as_text().split('\n'):
@@ -174,20 +174,20 @@ def show_outcome(stream, test, print_fai
 
     if status == 'fail':
         FAILS.append(test)
-        stream.write('{%s} %s [%s] ... FAILED\n' % (
+        stream.write('{{{}}} {} [{}] ... FAILED\n'.format(
             worker, name, duration))
         if not print_failures:
             print_attachments(stream, test, all_channels=True)
     elif not failonly:
         if status == 'success':
-            stream.write('{%s} %s [%s] ... ok\n' % (
+            stream.write('{{{}}} {} [{}] ... ok\n'.format(
                 worker, name, duration))
             print_attachments(stream, test)
         elif status == 'skip':
-            stream.write('{%s} %s ... SKIPPED: %s\n' % (
+            stream.write('{{{}}} {} ... SKIPPED: {}\n'.format(
                 worker, name, test['details']['reason'].as_text()))
         else:
-            stream.write('{%s} %s [%s] ... %s\n' % (
+            stream.write('{{{}}} {} [{}] ... {}\n'.format(
                 worker, name, duration, test['status']))
             if not print_failures:
                 print_attachments(stream, test, all_channels=True)
@@ -240,8 +240,8 @@ def worker_stats(worker):
 
 def print_summary(stream):
     stream.write("\n======\nTotals\n======\n")
-    stream.write("Run: %s in %s sec.\n" % (count_tests('status', '.*'),
-                                           run_time()))
+    stream.write("Run: {} in {} sec.\n".format(count_tests('status', '.*'),
+                                               run_time()))
     stream.write(" - Passed: %s\n" % count_tests('status', 'success'))
     stream.write(" - Skipped: %s\n" % count_tests('status', 'skip'))
     stream.write(" - Failed: %s\n" % count_tests('status', 'fail'))
diff -pruN 5.12.0-2/tox.ini 6.0.2-2/tox.ini
--- 5.12.0-2/tox.ini	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-2/tox.ini	2025-08-14 03:01:40.000000000 +0000
@@ -27,6 +27,8 @@ commands =
 allowlist_externals =
   find
   ./setup-etcd-env.sh
+passenv =
+  ETCD_VERSION
 
 [testenv:update-states]
 deps =
