diff -pruN 5.12.0-2/.gitignore 6.0.2-0ubuntu1/.gitignore
--- 5.12.0-2/.gitignore	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/.gitignore	1970-01-01 00:00:00.000000000 +0000
@@ -1,72 +0,0 @@
-*.py[cod]
-
-# C extensions
-*.so
-
-# Packages
-*.egg
-*.eggs
-*.egg-info
-dist
-build
-eggs
-parts
-bin
-var
-sdist
-develop-eggs
-.installed.cfg
-lib
-lib64
-
-# Installer logs
-pip-log.txt
-
-# Unit test / coverage reports
-.coverage*
-.diagram-tools/*
-.tox
-nosetests.xml
-.venv
-cover
-.stestr/
-htmlcov
-
-# Translations
-*.mo
-
-# Mr Developer
-.mr.developer.cfg
-.project
-.pydevproject
-.settings
-
-# DS_STORE
-.DS_Store
-
-# Sqlite databases
-*.sqlite
-
-# Modified Files
-*.swp
-
-# PBR
-build
-AUTHORS
-ChangeLog
-
-# doc
-doc/build/
-.diagram-tools/
-
-.idea
-env
-
-# files created by releasenotes build
-RELEASENOTES.rst
-releasenotes/notes/reno.cache
-releasenotes/build
-
-# Generated by etcd
-etcd-v*
-default.etcd
diff -pruN 5.12.0-2/.gitreview 6.0.2-0ubuntu1/.gitreview
--- 5.12.0-2/.gitreview	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/.gitreview	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-[gerrit]
-host=review.opendev.org
-port=29418
-project=openstack/taskflow.git
diff -pruN 5.12.0-2/.pre-commit-config.yaml 6.0.2-0ubuntu1/.pre-commit-config.yaml
--- 5.12.0-2/.pre-commit-config.yaml	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/.pre-commit-config.yaml	2025-08-25 12:49:32.000000000 +0000
@@ -24,3 +24,8 @@ repos:
       - id: hacking
         additional_dependencies: []
         exclude: '^(doc|releasenotes|tools)/.*$'
+  - repo: https://github.com/asottile/pyupgrade
+    rev: v3.18.0
+    hooks:
+      - id: pyupgrade
+        args: [--py3-only]
diff -pruN 5.12.0-2/.zuul.yaml 6.0.2-0ubuntu1/.zuul.yaml
--- 5.12.0-2/.zuul.yaml	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/.zuul.yaml	2025-08-25 12:49:32.000000000 +0000
@@ -6,6 +6,7 @@
     vars:
       tox_envlist: functional
     irrelevant-files:
+      - ^\.gitreview$
       - ^.*\.rst$
       - ^doc/.*$
       - ^LICENSE$
diff -pruN 5.12.0-2/AUTHORS 6.0.2-0ubuntu1/AUTHORS
--- 5.12.0-2/AUTHORS	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-0ubuntu1/AUTHORS	2025-08-25 12:49:58.000000000 +0000
@@ -0,0 +1,122 @@
+Adam Harwell <flux.adam@gmail.com>
+Alexander Gorodnev <agorodnev@griddynamics.com>
+Anastasia Karpinska <akarpinska at griddynamics.com>
+Anastasia Karpinska <akarpinska@griddynamics.com>
+Andreas Jaeger <aj@suse.com>
+Angus Salkeld <asalkeld@redhat.com>
+Ann Kamyshnikova <akamyshnikova@mirantis.com>
+Ann Taraday <akamyshnikova@mirantis.com>
+Atsushi SAKAI <sakaia@jp.fujitsu.com>
+Balaji Narayanan <lists@balajin.net>
+Ben Nemec <bnemec@redhat.com>
+Brett Delle Grazie <brett.dellegrazie@gmail.com>
+Brian Jarrett <celttechie@gmail.com>
+ChangBo Guo(gcb) <eric.guo@easystack.cn>
+Changbin Liu <changbl@research.att.com>
+Christian Berendt <berendt@b1-systems.de>
+Chuck Short <chuck.short@canonical.com>
+Corey Bryant <corey.bryant@canonical.com>
+Cyril Roelandt <cyril.roelandt@enovance.com>
+Cyril Roelandt <cyril@redhat.com>
+Dan Krause <dan.krause@rackspace.com>
+Daniel Bengtsson <dbengt@redhat.com>
+Davanum Srinivas <davanum@gmail.com>
+Dirk Mueller <dirk@dmllr.de>
+Doug Hellmann <doug.hellmann@dreamhost.com>
+Doug Hellmann <doug@doughellmann.com>
+Elod Illes <elod.illes@est.tech>
+Eric Harney <eharney@redhat.com>
+Flavio Percoco <flaper87@gmail.com>
+Fredrik Bergroth <fbergroth@gmail.com>
+Gevorg Davoian <gdavoian@mirantis.com>
+Ghanshyam <gmann@ghanshyammann.com>
+Ghanshyam Mann <gmann@ghanshyammann.com>
+Greg Hill <greg.hill@rackspace.com>
+Gregory Thiemonge <gthiemon@redhat.com>
+Ha Manh Dong <donghm@vn.fujitsu.com>
+Hervé Beraud <hberaud@redhat.com>
+Ihar Hrachyshka <ihrachys@redhat.com>
+Ivan A. Melnikov <imelnikov@griddynamics.com>
+Ivan A. Melnikov <iv@altlinux.org>
+Ivan Kolodyazhny <e0ne@e0ne.info>
+Ivan Melnikov <iv@altlinux.org>
+James Page <james.page@ubuntu.com>
+Jay S. Bryant <jsbryant@us.ibm.com>
+Jeremy Stanley <fungi@yuggoth.org>
+Jessica Lucci <jessica.lucci@rackspace.com>
+Ji-Wei <ji.wei3@zte.com.cn>
+Joe Gordon <joe.gordon0@gmail.com>
+Jose Castro Leon <jose.castro.leon@cern.ch>
+Joshua Harlow <harlowja@gmail.com>
+Joshua Harlow <harlowja@yahoo-inc.com>
+Joshua Harlow <jxharlow@godaddy.com>
+Kevin Chen <kevin.chen@rackspace.com>
+Luong Anh Tuan <tuanla@vn.fujitsu.com>
+Manish Godara <manishg@yahoo-inc.com>
+Matthew Thode <mthode@mthode.org>
+Michael Johnson <johnsomor@gmail.com>
+Michal Arbet <michal.arbet@ultimum.io>
+Min Pae <sputnik13@gmail.com>
+Monty Taylor <mordred@inaugust.com>
+Olga Kopylova <olkonami@gmail.com>
+Ondřej Nový <ondrej.novy@firma.seznam.cz>
+OpenStack Release Bot <infra-root@openstack.org>
+Pablo Iranzo Gómez <Pablo.Iranzo@gmail.com>
+Pavlo Shchelokovskyy <shchelokovskyy@gmail.com>
+Rafael Rivero <rafael@cloudscaling.com>
+Rick van de Loo <rickvandeloo@gmail.com>
+Sahid Orentino Ferdjaoui <sahid.ferdjaoui@canonical.com>
+Sascha Peilicke <speilicke@suse.com>
+Sean McGinnis <sean.mcginnis@gmail.com>
+Sriram Madapusi Vasudevan <sriram@klusterkloud.com>
+Stanislav Kudriashev <skudriashev@griddynamics.com>
+Stanislav Kudriashev <stas.kudriashev@gmail.com>
+Stephen Finucane <stephenfin@redhat.com>
+Suneel Bomminayuni <suneelb@yahoo-inc.com>
+Takashi Kajinami <kajinamit@oss.nttdata.com>
+Takashi Kajinami <tkajinam@redhat.com>
+Theodoros Tsioutsias <theodoros.tsioutsias@cern.ch>
+Thomas Bechtold <tbechtold@suse.com>
+Thomas Bechtold <thomasbechtold@jpberlin.de>
+Timofey Durakov <tdurakov@mirantis.com>
+Tobias Urdin <tobias.urdin@binero.com>
+Tony Breeds <tony@bakeyournoodle.com>
+Victor Rodionov <vito.ordaz@gmail.com>
+Vilobh Meshram <vilobhmm@yahoo-inc.com>
+Vu Cong Tuan <tuanvc@vn.fujitsu.com>
+XiaojueGuan <guanalbertjone@gmail.com>
+YAMAMOTO Takashi <yamamoto@valinux.co.jp>
+Zhao Lei <zhaolei@cn.fujitsu.com>
+Zhihai Song <zhihai.song@easystack.cn>
+baiwenteng <baiwenteng@inspur.com>
+chenghuiyu <yuchenghui@unionpay.com>
+gecong1973 <ge.cong@zte.com.cn>
+gengchc2 <geng.changcai2@zte.com.cn>
+haobing1 <hao.bing1@zte.com.cn>
+howardlee <lihongweibj@inspur.com>
+ji-xuepeng <ji.xuepeng@zte.com.cn>
+jiansong <jiansong19941206@gmail.com>
+leizhang <lei.a.zhang@intel.com>
+lin-hua-cheng <os.lcheng@gmail.com>
+liuqing <jing.liuqing@99cloud.net>
+liuwei <liuw147@chinaunicom.cn>
+ljhuang <huang.liujie@99cloud.net>
+luke.li <lilu7189@fiberhome.com>
+maaoyu <maaoyu@inspur.com>
+melissaml <ma.lei@99cloud.net>
+niuke <niuke19970315@163.com>
+qinchunhua <qin.chunhua@zte.com.cn>
+rahulram <rahurama@cisco.com>
+skudriashev <skudriashev@griddynamics.com>
+sunjia <sunjia@inspur.com>
+ting.wang <ting.wang@easystack.cn>
+tonytan4ever <tonytan198211@gmail.com>
+venkatamahesh <venkatamaheshkotha@gmail.com>
+wangqi <wang.qi@99cloud.net>
+weiweigu <gu.weiwei@zte.com.cn>
+wu.shiming <wushiming@yovole.com>
+xhzhf <guoyongxhzhf@163.com>
+xuanyandong <xuanyandong@inspur.com>
+yangxurong <yangxurong@huawei.com>
+zhang.lei <zhang.lei@99cloud.net>
+zhangzs <zhangzhaoshan@inspur.com>
diff -pruN 5.12.0-2/ChangeLog 6.0.2-0ubuntu1/ChangeLog
--- 5.12.0-2/ChangeLog	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-0ubuntu1/ChangeLog	2025-08-25 12:49:58.000000000 +0000
@@ -0,0 +1,2179 @@
+CHANGES
+=======
+
+6.0.2
+-----
+
+* Bump etcd to 3.5.x
+* Allow overriding etcd version in tests
+* fix(conductor): add stop and wait methods to base
+
+6.0.1
+-----
+
+* Fix skipped database persistence tests
+* Fix duplicate table\_name argument
+
+6.0.0
+-----
+
+* add pyproject.toml to support pip 23.1
+* Add release note for mask keys
+* Add mask keys parameters to failure logging
+* Run pyupgrade to clean up Python 2 syntaxes
+* redis: Omit ssl options when ssl is disabled
+* Skip functional tests for .gitreview update
+* Remove process executor
+* Keep taskflow doc job on Ubuntu Jammy
+* Update master for stable/2025.1
+* Use oslo.utils implementation to parse sentinel address
+* Sets indexes for all tables in taskflow as unique
+
+5.12.0
+------
+
+
+5.11.0
+------
+
+* Skip installation to speed up pep8
+* Deprecate utility for eventlet
+* fix typo
+* Skip functional jobs for license file update
+* pre-commit: Bump versions
+* Remove "test" extra
+* Sync test requirements
+* Remove zake from runtime dependency
+* Read pifpaf environments to detect service ports
+* Drop unused constants
+* Bump etcd in tests
+* Replace oslo\_utils.encodeutils.exception\_to\_unicode
+* Bump pylint
+* Skip functional tests for doc update
+* reno: Update master for unmaintained/2023.1
+
+5.10.0
+------
+
+* Add note about requirements lower bounds
+* Remove Python 3.8 support
+* Declare Python 3.12 support
+* Remove unused usage of pkg\_resources
+* Drop SQLALCHEMY\_WARN\_20
+* Update master for stable/2024.2
+
+5.9.1
+-----
+
+
+5.9.0
+-----
+
+* Follow-up of "Add Etcd backend for jobboard"
+* Add functional test with etcd
+* Add Etcd backend for jobboard
+
+5.8.0
+-----
+
+* Python3.12: disable process\_executor, which depends on asyncore
+* Fix REVERT\_ALL with Retries in unordered Flows
+* Remove SQLAlchemy tips jobs
+* Remove old excludes
+* Remove assertRaisesRegex
+* Replace deprecated Engine.execute
+
+5.7.0
+-----
+
+* Add functional job with redis enabled
+* db: Don't rely on branched connections
+* Fix broken unit tests for redis jobboard driver
+* Fix update-states target
+* Revert "Use consistent credential for Redis and Redis Sentinel"
+* Update master for stable/2024.1
+* reno: Update master for unmaintained/victoria
+
+5.5.0
+-----
+
+* Prevent potential ReDoS attack
+* redis: Support fallback servers
+* Use consistent credential for Redis and Redis Sentinel
+* redis: Enable SSL for sentinel
+* redis: Add username
+* Bump hacking
+* Replace deprecated perl-style regex
+* Cleanup setup.py and requirements
+* Update python classifier in setup.cfg
+* coveragerc: Remove non-existent path
+* Fix python shebang
+* Avoid endless backtraces on StorageFailure
+* Fix incorrect handling of storage exceptions
+* bindep: Use new mysql-\* package names
+* Update master for stable/2023.2
+
+5.4.0
+-----
+
+* Update redis dep to >=3.0.0
+
+5.3.0
+-----
+
+* Add job to test with SQLAlchemy master (2.x)
+* db: Don't rely on autocommit behavior
+* tests: Enable SQLAlchemy 2.0 deprecation warnings
+* tests: Use valid IPv6 address
+
+5.2.0
+-----
+
+* Revert "Moves supported python runtimes from version 3.8 to 3.10"
+* Moves supported python runtimes from version 3.8 to 3.10
+* Upgrade development status
+* Fix doc building with Sphinx 6.0
+* Prepare taskflow for sqlalchemy2
+* Update master for stable/2023.1
+* Fix parsing of zookeeper jobboard backend options
+
+5.1.0
+-----
+
+* Fix test\_while\_is\_not with python 3.11
+* Change StrictRedis usage to Redis
+* remove unicode prefix from code
+* Adapt to new jsonschema versions
+* Replace abc.abstractproperty with property and abc.abstractmethod
+
+5.0.0
+-----
+
+* Quote string representations
+* Fix formattiing of release list
+* Remove six
+* Drop python3.6/3.7 support in testing runtime
+* Delete the job from backend if it cannot be consumed
+
+4.7.0
+-----
+
+* Update CI to use unversioned jobs template
+* Fix atomdetails failure column size
+* Fix unit tests
+
+4.6.4
+-----
+
+* Handle invalid redis entries in RedisJobBoard
+* Fix minor typo in ActionEngine exception message
+* Use LOG.warning instead of deprecated LOG.warn
+
+4.6.3
+-----
+
+
+4.6.2
+-----
+
+* Replace deprecated import of ABCs from collections
+* Use custom JSONType columns
+
+4.6.1
+-----
+
+* Updating for OFTC IRC network
+* Fix flowdetails meta size
+* Use unittest.mock instead of mock
+* setup.cfg: Replace dashes with underscores
+* Move flake8 as a pre-commit local target
+* Remove lower-constraints remnants
+
+4.6.0
+-----
+
+* Fix deprecated Alembic function args
+* Dropping lower constraints testing
+* Use TOX\_CONSTRAINTS\_FILE
+* Use py3 as the default runtime for tox
+* Add Python3 wallaby unit tests
+* Update master for stable/victoria
+* ignore reno generated artifacts
+* Adding pre-commit
+
+4.5.0
+-----
+
+* [goal] Migrate testing to ubuntu focal
+
+4.4.0
+-----
+
+* Avoid endless loop on StorageFailure
+* Add sentinel redis support
+* Switch from unittest2 compat methods to Python 3.x methods
+
+4.3.1
+-----
+
+* Make test-setup.sh compatible with mysql8
+
+4.3.0
+-----
+
+* Stop to use the \_\_future\_\_ module
+
+4.2.0
+-----
+
+* Switch to newer openstackdocstheme and reno versions
+* Cap jsonschema 3.2.0 as the minimal version
+* Import modules, not classes
+* Bump default tox env from py37 to py38
+* Add py38 package metadata
+* Add release notes links to doc index
+* Drop use of deprecated collections classes
+* Add Python3 victoria unit tests
+* Update master for stable/ussuri
+
+4.1.0
+-----
+
+* Zookeeper backend SSL support
+
+4.0.0
+-----
+
+* [ussuri][goal] Drop python 2.7 support and testing
+
+3.8.0
+-----
+
+* Switch to Ussuri jobs
+* Update TaskFlow for networkx 2.x
+* Update master for stable/train
+* Fix python3.8 hmac compatibility
+
+3.7.1
+-----
+
+* Use mysql LONGTEXT for atomdetails results
+* Add Python 3 Train unit tests
+* Add local bindep.txt
+* Remove unused tools/tox\_install.sh
+
+3.7.0
+-----
+
+* update git.openstack.org to opendev
+* Dropping the py35 testing
+* Remove debtcollector requirement
+* Update Sphinx requirement
+
+3.6.0
+-----
+
+* Remove unsused tools/tox\_install.sh
+* Handle collections.abc deprecations
+* Uncap jsonschema
+* OpenDev Migration Patch
+* Update master for stable/stein
+* add python 3.7 unit test job
+
+3.4.0
+-----
+
+* Move test requirements out of runtime requirements
+* Change openstack-dev to openstack-discuss
+
+3.3.1
+-----
+
+* Update doc/conf.py to avoid warnings with sphinx 1.8
+* Use templates for cover and lower-constraints
+* Remove the duplicated word
+* Fix a symbol error
+* Create KazooClient with taskflow logger
+* add lib-forward-testing-python3 test job
+* add python 3.6 unit test job
+* add proper pydot3 dependency
+* import zuul job settings from project-config
+* Switch to use stestr for unit test
+* Add pydot test dependency
+* Remove PyPI downloads
+* Update reno for stable/rocky
+* Update various links in docs
+
+3.2.0
+-----
+
+* Remove unused link target
+* Fix code to support networkx > 1.0
+* add release notes to README.rst
+* replace http with https
+* Update links in README
+* fix tox python3 overrides
+* Drop py34 target in tox.ini
+* Uncap networkx
+* give pep8 and docs environments all of the dependencies they need
+* Trivial: update pypi url to new url
+* Fix doc build
+* Trivial: Update pypi url to new url
+* stop using tox\_install.sh
+* only run doc8 as part of pep8 test job
+* standardize indentation in tox.ini
+* set default python to python3
+* don't let tox\_install.sh error if there is nothing to do
+* Updated from global requirements
+* add lower-constraints job
+* Updated from global requirements
+* Fix invalid json unit test
+* Update reno for stable/queens
+* Updated from global requirements
+* Updated from global requirements
+* Updated from global requirements
+* Updated from global requirements
+
+3.1.0
+-----
+
+* Updated from global requirements
+* Add doc8 to pep8 environment
+* Use doc/requirements.txt
+
+3.0.1
+-----
+
+
+3.0.0
+-----
+
+* Remove setting of version/release from releasenotes
+* Updated from global requirements
+* Updated from global requirements
+* Updated from global requirements
+* Remove class StopWatch from timing
+
+2.17.0
+------
+
+
+2.16.0
+------
+
+* Updated from global requirements
+* Updated from global requirements
+* Update "indentify" to "identify" in comments
+
+2.15.0
+------
+
+* Updated from global requirements
+* Remove method blather in log adapter
+* Remove kwarg timeout in executor conductor
+* Updated from global requirements
+* Avoid log warning when closing is underway (on purpose)
+* Update reno for stable/pike
+* Updated from global requirements
+
+2.14.0
+------
+
+* Updated from global requirements
+* Update URLs in documents according to document migration
+* Updated from global requirements
+* Fix process based executor task proxying-back events
+* turn on warning-is-error in doc build
+* switch from oslosphinx to openstackdocstheme
+* rearrange existing documentation into the new standard layout
+* Updated from global requirements
+
+2.13.0
+------
+
+* Updated from global requirements
+* Fix html\_last\_updated\_fmt for Python3
+* Replace assertRaisesRegexp with assertRaisesRegex
+
+2.12.0
+------
+
+* Updated from global requirements
+* Updated from global requirements
+* do not allow redis job reclaim by same owner
+
+2.11.0
+------
+
+* Fix py35 test failure
+* Stop using oslotest.mockpatch
+* Updated from global requirements
+* python3.0 has deprecated LOG.warn
+
+2.10.0
+------
+
+* Updated from global requirements
+* Updated from global requirements
+* Updated from global requirements
+* Prepare for using standard python tests
+* Use https instead of http for git.openstack.org
+* Updated from global requirements
+* Update reno for stable/ocata
+* Protect storage better against external concurrent access
+
+2.9.0
+-----
+
+* Remove dep on monotonic
+* Rename engine analyzer to be named selector
+* Update author and author-email
+* Updated from global requirements
+* Updated from global requirements
+* Add Constraints support
+* Show team and repo badges on README
+
+2.8.0
+-----
+
+* Replaces uuid.uuid4 with uuidutils.generate\_uuid()
+* Updated from global requirements
+* Remove vim header from source files
+* Fix release notes gate job failure
+* Updated from global requirements
+* Use assertIs(Not)None to check for None
+* Fix typo in tox.ini
+* Fix broken link
+* Replace retrying with tenacity
+* Updated from global requirements
+* Add reno for release notes management
+* Updated from global requirements
+
+2.7.0
+-----
+
+* Changed the home-page link
+* Using assertIsNone() instead of assertIs(None, ..)
+* Updated from global requirements
+* Fix a typo in documentation
+* Fix typo: remove redundant 'that'
+* Updated from global requirements
+* Fix a typo in logging.py
+* Use method ensure\_tree from oslo.utils
+* Make failure formatter configurable for DynamicLoggingListener
+* Updated from global requirements
+* Some classes not define \_\_ne\_\_() built-in function
+
+2.6.0
+-----
+
+
+2.5.0
+-----
+
+* Updated from global requirements
+* Add logging around metadata, ignore tallying + history
+
+2.4.0
+-----
+
+* Updated from global requirements
+* Start to add a location for contributed useful tasks/flows/more
+* Change dependency to use flavors
+* Updated from global requirements
+* Remove white space between print and ()
+* Updated from global requirements
+* Add Python 3.5 classifier and venv
+* Replace assertEqual(None, \*) with assertIsNone in tests
+
+2.3.0
+-----
+
+* Updated from global requirements
+* remove unused LOG
+* Fixes: typo error in comments
+* Updated from global requirements
+* Fix some misspellings in the function name and descriptions
+* Updated from global requirements
+
+2.2.0
+-----
+
+* Don't use deprecated method timeutils.isotime
+* Add tests to verify kwargs behavior on revert validation
+* Make tests less dependent on transient state
+
+2.1.0
+-----
+
+* Updated from global requirements
+* Ensure the fetching jobs does not fetch anything when in bad state
+* Updated from global requirements
+* Use the full 'get\_execute\_failures' vs the shortname
+* Split revert/execute missing args messages
+* Updated from global requirements
+* Instead of a multiprocessing queue use sockets via asyncore
+* Add a simple sanity test for pydot outputting
+
+2.0.0
+-----
+
+* Updated from global requirements
+* Fix documentation related to missing BaseTask class
+* Remove deprecated things for 2.0 release
+* Always used the library packaged mock
+
+1.32.0
+------
+
+* Attempt to cancel active futures when suspending is underway
+* Allow for specifying green threaded to parallel engine
+* Make conductor.stop stop the running engine gracefully
+
+1.31.0
+------
+
+* Updated from global requirements
+* Don't set html\_last\_updated\_fmt without git
+* Updated from global requirements
+* Add the ability to skip resolving from activating
+* Fix export\_to\_dot for networkx package changes
+* Ensure upgrade for sqlalchemy is protected by a lock
+* Add periodic jobboard refreshing (incase of sync issues)
+* Fallback if git is absent
+* Allow for revert to have a different argument list from execute
+
+1.30.0
+------
+
+* Updated from global requirements
+* Use a automaton machine for WBE request state machine
+* Sqlalchemy-utils double entry (already in test-requirements.txt)
+
+1.29.0
+------
+
+* Updated from global requirements
+* Refactor Atom/BaseTask/Task/Retry class hierarchy
+* Add missing direct dependency for sqlalchemy-utils
+
+1.28.0
+------
+
+* Add WBE worker expiry
+* Some WBE protocol/executor cleanups
+* Remove need for separate notify thread
+* Updated from global requirements
+* Don't bother scanning for workers if no new messages arrived
+* Updated from global requirements
+* Updated from global requirements
+* Updated from global requirements
+* Allow cachedproperty to avoid locking
+* Spice up WBE banner and add simple worker \_\_main\_\_ entrypoint
+
+1.27.0
+------
+
+* Updated from global requirements
+* Fix for WBE sporadic timeout of tasks
+* Add some basic/initial engine statistics
+* Handle cases where exc\_args can't be serialized as JSON in the WBE
+* Enable OS\_LOG\_CAPTURE so that logs can be seen (on error)
+* Retrieve the store from flowdetails as well, if it exists
+* Disable oslotest LOG capturing
+* Updated from global requirements
+* Updated from global requirements
+* Use helper function for post-atom-completion work
+* Ensure that the engine finishes up even under sent-in failures
+* 99 bottles example trace logging was not being output
+* Add useful/helpful comment to retry scheduler
+* Updated from global requirements
+* Updated from global requirements
+* Replace clear zookeeper python with clear zookeeper bash
+* Remove stray LOG.blather
+
+1.26.0
+------
+
+* Some additional engine logging
+* Replace deprecated library function os.popen() with subprocess
+* Add comment as to why we continue when tallying edge decider nay voters
+* Add rundimentary and/or non-optimized job priorities
+* Allow for alterations in decider 'area of influence'
+* Fix wrong usage of iter\_utils.unique\_seen
+* Updated from global requirements
+* Updated from global requirements
+* Updated from global requirements
+* Use the retrying lib. to do basic sqlalchemy engine validation
+* For taskflow patterns don't show taskflow.patterns prefix
+* Rename '\_emit' -> '\_try\_emit' since it is best-effort (not ensured)
+* Cache atom name -> actions and provide accessor function
+* Quote/standardize atom name output
+* Use shared util helper for driver name + config extraction
+* Fix currently broken and inactive mysql tests
+* Trap and expose exception any 'args'
+* Revert "Remove failure version number"
+* Move all internal blather usage/calls to trace usage/calls
+* Start rename of BLATHER -> TRACE
+* Add ability of job poster/job iterator to wait for jobs to complete
+* Updated from global requirements
+* Use 'match\_type' utility function instead of staticmethod
+* Remove failure version number
+* Translate kazoo exceptions into job equivalents if register\_entity fails
+* Change name of misc.ensure\_dict to misc.safe\_copy\_dict
+* Avoid recreating notify details for each dispatch iteration
+* fix doc change caused by the change of tooz
+* Deprecated tox -downloadcache option removed
+* Updated from global requirements
+* Add some useful commentary on rebinding processes
+* Use small helper routine to fetch atom metadata entries
+* Remove 'MANIFEST.in'
+* Pass through run timeout in engine run()
+* Change engine 'self.\_check' into a decorator
+
+1.25.0
+------
+
+* Move validation of compiled unit out of compiler
+* Allow provided flow to be empty
+* Move engine options extraction to \_\_init\_\_ methods
+* Updated from global requirements
+* Updated from global requirements
+* Convert executor proxied engine options into their correct type
+* Enable conversion of the tree nodes into a digraph
+* Use the misc.ensure\_dict helper in conductor engine options saving
+* Add optional 'defer\_reverts' behavior
+* Add public property from storage to flowdetail.meta
+* Adding notification points for job completion
+* Remove python 2.6 and cleanup tox.ini
+* Correctly apply deciders across flow boundaries
+* Move 'convert\_to\_timeout' to timing type as a helper function
+* Use conductor entity class constant instead of raw string
+* Add a executor backed conductor and have existing impl. use it
+* Add flow durations to DurationListener
+* Update docstrings on entity type
+* Move 'fill\_iter' to 'iter\_utils.fill'
+
+1.24.0
+------
+
+* Updated from global requirements
+* Updated from global requirements
+* Register conductor information on jobboard
+* Add atom priority ability
+* Add validation of base exception type(s) in failure type
+* Fix order of assertEqual for unit.test\_\*
+* Fix order of assertEqual for unit.worker\_based
+* Fix order of assertEqual for unit.persistence
+* Fix order of assertEqual for unit.patterns
+* Fix order of assertEqual for unit.jobs
+* Fix order of assertEqual for unit.action\_engine
+
+1.23.0
+------
+
+* Updated from global requirements
+* feat: add max\_dispatches arg to conductor's run
+* Ensure node 'remove' and 'disassociate' can not be called when frozen
+* Add in-memory backend delete() in recursive/non-recursive modes
+* Use batch 'get\_atoms\_states' where we can
+* Use automaton's converters/pydot
+* Make more of the WBE logging and '\_\_repr\_\_' message more useful
+* Fix bad sphinx module reference
+* Relabel internal engine 'event' -> 'outcome'
+* No need for Oslo Incubator Sync
+* Use the node built-in 'dfs\_iter' instead of recursion
+
+1.22.0
+------
+
+* Simplify flow action engine compilation
+* Fix 'dependened upon' spelling error
+* docs - Set pbr warnerrors option for doc build
+* Rename 'history' -> 'Release notes'
+* Remove dummy/placeholder 'ChangeLog' as its not needed
+* Remove ./taskflow/openstack/common as it no longer exists
+* Remove quotes from subshell call in bash script
+* Refactor common parts of 'get\_maybe\_ready\_for' methods
+* Fix the sphinx build path in .gitignore file
+* Change ignore-errors to ignore\_errors
+* Use graphs as the underlying structure of patterns
+* Updated from global requirements
+* Fix '\_cache\_get' multiple keyword argument name overlap
+* Use the sqlalchemy-utils json type instead of our own
+
+1.21.0
+------
+
+* Updated from global requirements
+* Fix how the dir persistence backend was not listing logbooks
+* Explain that jobs arch. diagram is only for zookeeper
+
+1.20.0
+------
+
+* Updated from global requirements
+* iter\_nodes method added to flows
+* Updated from global requirements
+* Use 'iter\_utils.count' to determine how many unfinished nodes left
+* Fix flow states link
+* Avoid running this example if zookeeper is not found
+* Updated from global requirements
+* Have the storage class provide a 'change\_flow\_state' method
+
+1.19.0
+------
+
+* Updated from global requirements
+* Updated from global requirements
+* Add nicely made task structural diagram
+* Updated from global requirements
+* Remove some temporary variables not needed
+* Only remove all 'next\_nodes' that were done
+* Fix busted stevedore doc(s) link
+* Extend and improve failure logging
+* Improve docstrings in graph flow to denote exceptions raised
+* Enable testr OS\_DEBUG to be TRACE(blather) by default
+* Updated from global requirements
+* Show intermediary compilation(s) when BLATHER is enabled
+
+1.18.0
+------
+
+* Give the GC more of a break with regard to cycles
+* Base class for deciders
+* Remove extra runner layer and just use use machine in engine
+* Updated from global requirements
+* .gitignore update
+* Avoid adding 1 to a failure (if it gets triggered)
+* Replace the tree 'pformat()' recursion with non-recursive variant
+* Fix seven typos and one readability on taskflow documentation
+
+1.17.0
+------
+
+* Bump futurist and remove waiting code in taskflow
+* Use the action engine '\_check' helper method
+* Modify listeners to handle the results now possible from revert()
+* Remove no longer used '\_was\_failure' static method
+* Remove legacy py2.6 backwards logging compat. code
+* Updated from global requirements
+* Fix lack of space between functions
+* Create and use a serial retry executor
+* Just link to the worker engine docs instead of including a TOC inline
+* Link to run() method in engines doc
+* Add ability to reset an engine via a \`reset\` method
+
+1.16.0
+------
+
+* Updated from global requirements
+* Use 'addCleanup' instead of 'tearDown' in engine(s) test
+* Update 'make\_client' kazoo docs and link to them
+* Remove \*\*most\*\* usage of taskflow.utils in examples
+* Move doc8 to being a normal test requirement in test-requirements.txt
+* Updated from global requirements
+* Found another removal\_version=? that should be removal\_version=2.0
+* Add deprecated module(s) for prior FSM/table code-base
+* Replace internal fsm + table with automaton library
+* Remove direct usage of timeutils overrides and use fixture
+
+1.15.0
+------
+
+* Provide a deprecated alias for the now removed stop watch class
+* Update all removal\_version from being ? to being 2.0
+* Add deprecated and only alias modules for the moved types
+* Unify the zookeeper/redis jobboard iterators
+* Updated from global requirements
+* Run the '99\_bottles.py' demo at a fast rate when activated
+* Use io.open vs raw open
+* Retain atom 'revert' result (or failure)
+* Update the version on the old/deprecated logbook module
+* Add docs for u, v, decider on graph flow link method
+* Fix mock calls
+* Remove setup.cfg 'requires-python' incorrect entry
+* Compile lists of retry/task atoms at runtime compile time
+* Integrate futurist (and \*\*remove\*\* taskflow originating code)
+* Allow the 99\_bottles.py demo to run in BLATHER mode
+* Make currently implemented jobs use @functools.total\_ordering
+* Add more useful \`\_\_str\_\_\` to redis job
+* Show job posted and goodbye in 99\_bottles.py example
+* Rename logbook module -> models module
+* Notify on the individual engine steps
+
+1.14.0
+------
+
+* Expose strategies so doc generation can easily pick them up
+* Denote mail subject should be '[Oslo][TaskFlow]'
+* Add support for conditional execution
+* Use encodeutils for exception -> string function
+* Updated from global requirements
+* Build-out + test a redis backed jobboard
+
+0.13.0
+------
+
+* Just make the compiler object at \_\_init\_\_ time
+* Remove kazoo hack/fix for issue no longer needed
+* Add history.rst that uses generated 'ChangeLog' file
+* Add docstrings on runtime objects methods and link to them in docs
+
+0.12.0
+------
+
+* Updated from global requirements
+* Update states comment to refer to task section
+* Updated from global requirements
+* Remove 2.6 classifier + 2.6 compatibility code
+* Remove reference to 'requirements-pyN.txt' files
+* Add smarter/better/faster impl. of \`ensure\_atoms\`
+* Add bulk \`ensure\_atoms\` method to storage
+* Make it possible to see the queries executed (in BLATHER mode)
+* Add doc warning to engine components
+* Perform a few optimizations to decrease persistence interactions
+* Handle conductor ctrl-c more appropriately
+* Cache the individual atom schedulers at compile time
+* Split-off the additional retry states from the task states
+* Use the \`excutils.raise\_with\_cause\` after doing our type check
+* Updated from global requirements
+* Use monotonic lib. to avoid finding monotonic time function
+* Document more of the retry subclasses special keyword arguments
+
+0.11.0
+------
+
+* Address concurrent mutation of sqlalchemy backend
+* Add indestructible 99 bottles of beer example
+* Use alembic upgrade function/command directly
+* Updated from global requirements
+* Remove usage of deprecated 'task\_notifier' property in build\_car example
+* Add \`simple\_linear\_listening\` example to generated docs
+* Handy access to INFO level
+* Switch badges from 'pypip.in' to 'shields.io'
+* Adding a revert\_all option to retry controllers
+* A few jobboard documentation tweaks
+* Use sphinx deprecated docstring markup
+* Use a class constant for the default path based backend path
+* Updated from global requirements
+* Remove example not tested
+* Make the default file encoding a class constant with a docstring
+* Use a lru cache to limit the size of the internal file cache
+* Updated from global requirements
+* Use hash path lookup vs path finding
+* Remove all 'lock\_utils' now that fasteners provides equivalents
+* Add a new \`ls\_r\` method
+* Updated from global requirements
+* Refactor machine builder + runner into single unit
+* Replace lock\_utils lock(s) with fasteners package
+* Updated from global requirements
+* Use shared '\_check' function to check engine stages
+* Remove a couple more useless 'pass' keywords found
+* Add a test that checks for task result visibility
+* Remove testing using persistence sqlalchemy backend with 'mysqldb'
+* Remove customized pyX.Y tox requirements
+* Updated from global requirements
+* Allow same deps for requires and provides in task
+* Remove 'pass' usage not needed
+* Only show state transitions to logging when in BLATHER mode
+* Fix updated\_at column of sqlalchemy tables
+* Remove script already nuked from oslo-incubator
+* Ensure path\_based abstract base class is included in docs
+* Beef up docs on the logbook/flow detail/atom details models
+* Remove custom py26/py27 tox venvs no longer used
+* Executors come in via options config, not keyword arguments
+* Use newer versions of futures that adds exception tracebacks
+* Ensure empty paths raise a value error
+* Remove listener stack and replace with exit stack
+* Expose action engine no reraising states constants
+* Chain a few more exception raises that were previously missed
+* Expose in memory backend split staticmethod
+* Updated from global requirements
+* Remove tox py33 environment no longer used
+* Avoid creating temporary removal lists
+
+0.10.1
+------
+
+* Avoid trying to copy tasks results when cloning/copying
+* Avoid re-normalizing paths when following links
+* Add a profiling context manager that can be easily enabled
+* Updated from global requirements
+
+0.10.0
+------
+
+* Remove validation of state on state read property access
+* Make the default path a constant and tweak class docstring
+* Avoid duplicating exception message
+* Add speed-test tools script
+* Speed up memory backend via a path -> node reverse mapping
+* Updated from global requirements
+* Fix a typo in taskflow docs
+* Small refactoring of 'merge\_uri' utility function
+* Fix post coverage job option not recognized
+* Refactor/reduce shared 'ensure(task/retry)' code
+* Move implementations into there own sub-sections
+* Remove run\_cross\_tests.sh
+* Move zookeeper jobboard constants to class level
+* Retain chain of missing dependencies
+* Expose fake filesystem 'join' and 'normpath'
+* Add + use diagram explaining retry controller area of influence
+* Add openclipart.org conductor image to conductor docs
+* Use oslo\_utils eventletutils to warn about eventlet patching
+* Test more engine types in argument passing unit test
+* Add a conductor running example
+* Replace more instance(s) of exception chaining with helper
+* Avoid attribute error by checking executor for being non-none
+
+0.9.0
+-----
+
+* Validate correct exception subclass in 'raise\_with\_cause'
+* Remove link to kazoo eventlet handler
+* Add states generating venv and use pydot2
+* Add strict job state transition checking
+* Uncap library requirements for liberty
+* Have reset state handlers go through a shared list
+* Add job states in docs + states in python
+* Expose r/o listener callback + details filter callback
+* Expose listener notification type + docs
+* Ensure listener args are always a tuple/immutable
+* Include the 'dump\_memory\_backend' example in the docs
+* Make resolution/retry strategies more clear and better
+* Rename notifier 'listeners' to 'topics'
+* Mention link to states doc in notify state transitions
+* Ensure we don't get stuck in formatting loops
+* Add note about thread safety of fake filesystem
+* Have the notification/listener docs match other sections
+* Put semantics preservation section into note block
+* Note that the traditional mode also avoids this truncation issue
+* Avoid going into causes of non-taskflow exceptions
+* Use the ability to chain exceptions correctly
+* Add a example showing how to share an executor
+* Shrink the bookshelf description
+* Remove link about implementing job garbage binning
+* Make the storage layer more resilent to failures
+* Put the examples/misc/considerations under a new section
+* Add a suspension engine section
+
+0.8.1
+-----
+
+* Switch back to maxdepth 2
+* Allow ls() to list recursively (using breadth-first)
+* Make an attempt at having taskflow exceptions print causes better
+* fix renamed class to call super correctly
+* Turn 'check\_who' into a decorator
+* Use 'node' terminology instead of 'item' terminology
+* Remove 11635 bug reference
+* Allow providing a node stringify function to tree pformat
+* Add in memory filesystem clearing
+* Just unify having a single requirements.txt file
+* Fix a couple of spelling and grammar errors
+* Add memory backend get() support
+* Make the graph '\_unsatisfied\_requires' be a staticmethod
+* Add more comments to fake in-memory filesystem
+* Add a set of tests to the in memory fake filesystem
+
+0.8.0
+-----
+
+* Adding test to improve CaptureListener coverage
+* Prefer posixpath to os.path
+* By default use a in memory backend (when none is provided)
+* Allow using shallow copy instead of deep copy
+* Move to the newer debtcollector provided functions
+* Move to using the oslo.utils stop watch
+* Updated from global requirements
+* Ensure thread-safety of persistence dir backend
+* Ensure we are really setup before being connected
+* Ensure docstring on storage properties
+* Expose the storage backend being used
+* Use iteration instead of list(s) when extracting scopes
+* Use binary/encode decode helper routines in dir backend
+* Rename memory backend filesystem -> fake filesystem
+* Just let the future executors handle the max workers
+* Always return scope walker instances from \`fetch\_scopes\_for\`
+* Give the GC a break
+* Use the class name instead of the TYPE property in \_\_str\_\_
+* Just use the class name instead of TYPE constant
+* Ensure we have a 'coverage-package-name'
+* Attempt to extract traceback from exception
+* Use compatible map and update map/reduce task docs
+* Update engine docs with new validation stage
+* Ensure we register & deregister conductor listeners
+* Rename attribute '\_graph' to '\_execution\_graph'
+* Add a log statement pre-validation that dumps graph info
+* Have this example exit non-zero if incorrect results
+* Use a collections.namedtuple for the request work unit
+* Some small wbe engine doc tweaks
+* Add newline to avoid sphinx warning
+* Allow passing 'many\_handler' to fetch\_all function
+* Ensure event time listener is in listeners docs
+* Add a in-memory backend dumping example
+* Added a map and a reduce task
+* Restructure the in-memory node usage
+* Switch to non-namespaced module imports
+* Allow the storage unit to use the right scoping strategy
+* Just use the local conf variable
+* Put underscore in-front of alchemist helper
+* lazy loading for logbooks and flowdetails
+* Allow backend connection config (via fetch) to be a string
+* Add + use failure json schema validation
+* Use ordered[set/dict] to retain ordering
+* Allow injected atom args to be persisted
+* add \_listeners\_from\_job method to Conductor base
+* update uses of TimingListener to DurationListener
+* Added EventTimeListner to record when events occur
+* added update\_flow\_metadata method to Storage class
+* Retain nested causes where/when we can
+* Denote issue 17911 has been merged/accepted
+* Persistence backend refactor
+* Remove support for 3.3
+* Writers can now claim a read lock in ReaderWriterLock
+* Add another probabilistic rw-lock test
+* Add + use read/write lock decorators
+* Add no double writers thread test
+* Use condition context manager instead of acquire/release
+* Remove condition acquiring for read-only ops
+* Set a no-op functor when none is provided
+* Ensure needed locks is used when reading/setting intention
+* Specialize checking for overlaps
+* Use links instead of raw block quotes
+* Rename the timing listeners to duration listeners
+* Add a bookshelf developer section
+* Ensure the thread bundle stops in last to first order
+* Add warning about transient arguments and worker-based-engines
+* Ensure ordered set is pickleable
+* Add node removal/disassociate functions
+* Add a fully functional orderedset
+* Make the worker banner template part of the worker class
+* Use compilation helper objects
+* Allow node finding to not do a deep search
+* Add a frozen checking decorator
+* Tweak functor used to find flatteners/storage routines
+* Add specific scoping documentation
+* add jobboard trash method
+* Provide more contextual information about invalid periodics
+* Fix lookup scoping multi-match ordering
+* Stick to one space after a period
+* Refactor parts of the periodic worker
+* Use oslo.utils encodeutils for encode/decode functions
+* Bring over pretty\_tox.sh from nova/heat/others
+* Tweak some of the types thread safety docstrings
+* Add pypi link badges
+* Switch the note about process pool executor to warning
+* Chain exceptions correctly on py3.x
+* Updated from global requirements
+* Remove WBE experimental documentation note
+* Use the enum library for the retry strategy enumerations
+* Use debtcollector library to replace internal utility
+* add get\_flow\_details and get\_atom\_details to all backends
+* Tweaks to atom documentation
+* Update Flow::\_\_str\_\_
+* Add todo note for kombu pull request
+* Move 'provides' and 'name' to instance attributes
+* Allow loading conductors via entrypoints
+
+0.7.1
+-----
+
+* Revert "Add retries to fetching the zookeeper server version"
+* Allow turning off the version check
+* adding check for str/unicode type in requires
+* Make the dispatcher handler be an actual type
+* Add retries to fetching the zookeeper server version
+* Remove duplicate 'the' and link to worker engine section
+* Remove delayed decorator and replace with nicer method
+* Fix log statement
+* Make the atom class an abstract class
+* Improve multilock class and its associated unit test
+* Mark conductor 'stop' method deprecation kwarg with versions
+* Move to hacking 0.10
+* catch NotFound errors when consuming or abandoning
+* Use the new table length constants
+* Improve upon/adjust/move around new optional example
+* Clarify documentation related to inputs
+* Docstrings should document parameters return values
+* Let the multi-lock convert the provided value to a tuple
+* Map optional arguments as well as required arguments
+* Add a BFS tree iterator
+* DFS in right order when not starting at the provided node
+* Rework the sqlalchemy backend
+* Modify stop and add wait on conductor to prevent lockups
+* Default to using a thread-safe storage unit
+* Add warning to sqlalchemy backend size limit docs
+* Updated from global requirements
+* Use a thread-identifier that can't easily be recycled
+* Use a notifier instead of a direct property assignment
+* Tweak the WBE diagram (and present it as an svg)
+* Remove duplicate code
+* Improved diagram for Taskflow
+* Bump up the env\_builder.sh to 2.7.9
+* Add a capturing listener (for test or other usage)
+* Add + use a staticmethod to fetch the immediate callables
+* Just directly access the callback attributes
+* Use class constants during pformatting a tree node
+
+0.7.0
+-----
+
+* Abstract out the worker finding from the WBE engine
+* Add and use a nicer kombu message formatter
+* Remove duplicated 'do' in types documentation
+* Use the class defined constant instead of raw strings
+* Use kombu socket.timeout alias instead of socket.timeout
+* Stopwatch usage cleanup/tweak
+* Add note about publicly consumable types
+* Add docstring to wbe proxy to denote not for public use
+* Use monotonic time when/if available
+* Updated from global requirements
+* Link WBE docs together better (especially around arguments)
+* Emit a warning when no routing keys provided on publish()
+* Center SVG state diagrams
+* Use importutils.try\_import for optional eventlet imports
+* Shrink the WBE request transition SVG image size
+* Add a thread bundle helper utility + tests
+* Make all/most usage of type errors follow a similar pattern
+* Leave use-cases out of WBE developer documentation
+* Allow just specifying 'workers' for WBE entrypoint
+* Add comments to runner state machine reaction functions
+* Fix coverage environment
+* Use explicit WBE worker object arguments (instead of kwargs)
+* WBE documentation tweaks/adjustments
+* Add a WBE request state diagram + explanation
+* Tidy up the WBE cache (now WBE types) module
+* Fix leftover/remaining 'oslo.utils' usage
+* Show the failure discarded (and the future intention)
+* Use a class provided logger before falling back to module
+* Use explicit WBE object arguments (instead of kwargs)
+* Fix persistence doc inheritance hierarchy
+* The gathered runtime is for failures/not failures
+* add clarification re parallel engine
+* Increase robustness of WBE producer/consumers
+* Move implementation(s) to there own sections
+* Move the jobboard/job bases to a jobboard/base module
+* Have the serial task executor shutdown/restart its executor
+* Mirror the task executor methods in the retry action
+* Add back a 'eventlet\_utils' helper utility module
+* Use constants for runner state machine event names
+* Remove 'SaveOrderTask' and test state in class variables
+* Provide the stopwatch elapsed method a maximum
+* Fix unused and conflicting variables
+* Switch to using 'oslo\_serialization' vs 'oslo.serialization'
+* Switch to using 'oslo\_utils' vs 'oslo.utils'
+* Add executor statistics
+* Use oslo.utils reflection for class name
+* Add split time capturing to the stop watch
+* Use platform neutral line separator(s)
+* Create and use a multiprocessing sync manager subclass
+* Use a single sender
+* Updated from global requirements
+* Include the 'old\_state' in all currently provided listeners
+* Update the README.rst with accurate requirements
+* Include docstrings for parallel engine types/strings supported
+* The taskflow logger module does not provide a logging adapter
+* Send in the prior atom state on notification of a state change
+* Pass a string as executor in the example instead of an executor
+* Updated from global requirements
+* Fix for job consumption example using wrong object
+
+0.6.1
+-----
+
+* Remove need to inherit/adjust netutils split
+* Allow specifying the engine 'executor' as a string
+* Disallowing starting the executor when worker running
+* Use a single shared queue for an executors lifecycle
+* Avoid creating a temporary list(s) for tree type
+* Update statement around stopwatch thread safety
+* Register with 'ANY' in the cloned process
+* Add edge labels for engine states
+* Remove less than useful action\_engine \_\_str\_\_
+* Ensure manager started/shutdown/joined and reset
+* Return the same namedtuple that the future module returns
+* Add a simplistic hello world example
+* Get event/notification sending working correctly
+* Move the engine scoping test to its engines test folder
+* Get the basics of a process executor working
+* Move the persistence base to the parent directory
+* Correctly trigger 'on\_exit' of starting/initial state
+
+0.6.0
+-----
+
+* Add an example which shows how to send events out from tasks
+* Move over to using oslo.utils [reflection, uuidutils]
+* Rework the in-memory backend
+* Updated from global requirements
+* Add a basic map/reduce example to show how this can be done
+* Add a parallel table mutation example
+* Add a 'can\_be\_registered' method that checks before notifying
+* Base task executor should provide 'wait\_for\_any'
+* Replace autobind with a notifier module helper function
+* Cleanup some doc warnings/bad/broken links
+* Use the notifier type in the task class/module directly
+* Use a tiny clamp helper to clamp the 'on\_progress' value
+* Retain the existence of a 'EngineBase' until 0.7 or later
+* Remove the base postfix from the internal task executor
+* Remove usage of listener base postfix
+* Add a moved\_inheritable\_class deprecation helper
+* Avoid holding the lock while scanning for existing jobs
+* Remove the base postfix for engine abstract base class
+* Avoid popping while another entity is iterating
+* Updated from global requirements
+* Use explict 'attr\_dict' when adding provider->consumer edge
+* Properly handle and skip empty intermediary flows
+* Ensure message gets processed correctly
+* Just assign a empty collection instead of copy/clear
+* Remove rtype from task clone() doc
+* Add and use a new simple helper logging module
+* Have the sphinx copyright date be dynamic
+* Add appropriate links into README.rst
+* Use condition variables using 'with'
+* Use an appropriate \`\`extract\_traceback\`\` limit
+* Allow all deprecation helpers to take a stacklevel
+* Correctly identify stack level in \`\`\_extract\_engine\`\`
+* Stop returning atoms from execute/revert methods
+* Have tasks be able to provide copy() methods
+* Allow stopwatches to be restarted
+* Ensure that failures can be pickled
+* Rework pieces of the task callback capability
+* Just use 4 spaces for classifier indents
+* Move atom action handlers to there own subfolder/submodule
+* Workflow documentation is now in infra-manual
+* Ensure frozen attribute is set in fsm clones/copies
+* Fix split on "+" for connection strings that specify dialects
+* Update listeners to ensure they correctly handle all atoms
+* Allow for the notifier to provide a 'details\_filter'
+* Be explicit about publish keyword arguments
+* Some package additions and adjustments to the env\_builder.sh
+* Cache immutable visible scopes in the runtime component
+* Raise value errors instead of asserts
+* Add a claims listener that connects job claims to engines
+* Split the scheduler into sub-schedulers
+* Use a module level constant to provide the DEFAULT\_LISTEN\_FOR
+* Move the \_pformat() method to be a classmethod
+* Add link to issue 17911
+* Avoid deepcopying exception values
+* Include documentation of the utility modules
+* Use a metaclass to dynamically add testcases to example runner
+* Remove default setting of 'mysql\_traditional\_mode'
+* Move scheduler and completer classes to there own modules
+* Ensure that the zookeeper backend creates missing atoms
+* Use the deprecation utility module instead of warnings
+* Tweaks to setup.cfg
+* Add a jobboard high level architecture diagram
+* Mark 'task\_notifier' as renamed to 'atom\_notifier'
+* Revert wrapt usage until further notice
+* Updated from global requirements
+* Add a history retry object, makes retry histories easier to use
+* Format failures via a static method
+* When creating daemon threads use the bundled threading\_utils
+* Ensure failure types contain only immutable items
+* Mark 'task\_notifier' as renamed to 'atom\_notifier'
+* Use wrapt to provide the deprecated class proxy
+* Updated from global requirements
+* Updated from global requirements
+* Updated from global requirements
+* Reduce the worker-engine joint testing time
+* Link bug in requirements so people understand why pbr is listed
+* Updated from global requirements
+* Use standard threading locks in the cache types
+* Handle the case where '\_exc\_type\_names' is empty
+* Add pbr to installation requirements
+* Updated from global requirements
+* Remove direct usage of the deprecated failure location
+* Fix the example 'default\_provides'
+* Use constants for retry automatically provided kwargs
+* Remove direct usage of the deprecated notifier location
+* Remove attrdict and just use existing types
+* Use the mock that finds a working implementation
+* Add a futures type that can unify our future functionality
+* Bump the deprecation version number
+* Use and verify event and latch wait() return using timeouts
+* Deprecate \`engine\_conf\` and prefer \`engine\` instead
+* Use constants for link metadata keys
+* Bump up the sqlalchemy version for py26
+* Hoist the notifier to its own module
+* Move failure to its own type specific module
+* Use constants for revert automatically provided kwargs
+* Improve some of the task docstrings
+* We can now use PyMySQL in py3.x tests
+* Updated from global requirements
+* Add the database schema to the sqlalchemy docs
+* Change messaging from handler connection timeouts -> operation timeouts
+* Switch to a custom NotImplementedError derivative
+* Allow the worker banner to be written to an arbitrary location
+* Update engine class names to better reflect there usage
+
+0.5.0
+-----
+
+* Avoid usage of six.moves in local functions
+* Refactor parts of the job lock/job condition zookeeper usage
+* Make it so that the import works for older versions of kombu
+* Rework the state documentation
+* Updated from global requirements
+* Add a more dynamic/useful logging listener
+* Use timeutils functions instead of misc.wallclock
+* Expose only \`ensure\_atom\` from storage
+* Adjust docs+venv tox environments requirements/dependencies
+* Increase robustness of WBE message and request processing
+* Adjust the WBE log levels
+* Use the features that the oslotest mock base class provides
+* Use oslotest to provide our base test case class
+* Jobboard example that show jobs + workers + producers
+* Adjust on\_job\_posting to not hold the lock while investigating
+* Bring in a newer optional eventlet
+* Move some of the custom requirements out of tox.ini
+* Document more function/class/method params
+* Stop using intersphinx
+* Expand toctree to three levels
+* Documentation cleanups and tweaks
+* Fix multilock concurrency when shared by > 1 threads
+* Increase/adjust the logging of the WBE response/send activities
+* Color some of the states depending on there meaning
+* Switch to using oslo.utils and oslo.serialization
+* Typos "searchs"
+* Update the requirements-py2.txt file
+* Remove no longer needed r/w lock interface base class
+* Updated from global requirements
+* Better handle the tree freeze method
+* Ensure state machine can be frozen
+* Link a few of the classes to implemented features/bugs in python
+* Add a timing listener that also prints the results
+* Remove useless \_\_exit\_\_ return
+* Example which shows how to move values from one task to another
+* Mention issue with more than one thread and reduce workers
+* Add a mandelbrot parallel calculation WBE example
+* Add existing types to generated documentation
+* Remove the dependency on prettytable
+* Work toward Python 3.4 support and testing
+* Add a state machine copy() method
+* Update the state graph builder to use state machine type
+* Add a docs virtualenv
+* Reduce unused tox environments
+
+0.4.0
+-----
+
+* Add a couple of scope shadowing test cases
+* Relax the graph flow symbol constraints
+* Relax the unordered flow symbol constraints
+* Relax the linear flow symbol constraints
+* Revamp the symbol lookup mechanism
+* Be smarter about required flow symbols
+* Update oslo-incubator to 32e7f0b56f52742754
+* Translate the engine runner into a well defined state-machine
+* Raise a runtime error when mixed green/non-green futures
+* Ensure the cachedproperty creation/setting is thread-safe
+* warn against sorting requirements
+* Updated from global requirements
+* Update transitioning function name to be more understandable
+* Move parts of action engine tests to a subdirectory
+* Tweak engine iteration 'close-up shop' runtime path
+* Use explicit WBE request state transitions
+* Reject WBE messages if they can't be put in an ack state
+* Make version.py handle pbr not being installed
+* Cleanup WBE example to be simpler to understand
+* Use \_\_qualname\_\_ where appropriate
+* Updated from global requirements
+* Updated from global requirements
+* Make the WBE worker banner information more meaningful
+* Have the dispatch\_job function return a future
+* Expand documention on failures and wrapped failures types
+* Allow worker count to be specified when no executor provided
+* Remove sphinx examples emphasize-lines
+* Split requirements into py2 and py3 files
+* Update oslo-incubator to 037dee004c3e2239
+* Remove db locks and use random db names for tests
+* Allow WBE request transition timeout to be dynamic
+* Avoid naming time type module the same as a builtin
+* LOG which requeue filter callback failed
+* Add a pformat() failure method and use it in the conductor
+* add pre/post execute/retry callbacks to tasks
+* Use checked\_commit() around consume() and abandon()
+* Use a check + create transaction when claiming a job
+* Improve WBE testing coverage
+* Add basic WBE validation sanity tests
+* WBE request message validation
+* WBE response message validation
+* WBE notification message validation
+* Allow handlers to provide validation callables
+* Use a common message dispatcher
+* Use checked commit when committing kazoo transactions
+* Enable hacking checks H305 and H307 in tox.ini template
+* Fixes unsorted dicts and sets in doctests
+* README.rst: Avoid using non-ascii character
+* Updated from global requirements
+* Add a sample script that can be used to build a test environment
+* Enabled hacking checks H305 and H307
+* Bump hacking to version 0.9.2
+* Allow a jobs posted book to be none by default
+* Cleanup some of the example code & docs
+* Make greenexecutor not keep greenthreads active
+* Add the arch/big picture omnigraffle diagram
+* Remove pbr as a runtime dependency
+* Use the \`state\_graph.py\` for all states diagrams
+* Make the examples documentation more relevant
+* Raise NotImplementedError instead of NotImplemented
+* Move the stopwatch tests to test\_types
+* Remove need to do special exception catching in parse\_uri
+* Update oslo incubator code to commit 0b02fc0f36814968
+* Fix the section name in CONTRIBUTING.rst
+* Add a conductor considerations section
+* Make the expiring cache a top level cache type
+* Use \`flow\_uuid\` and \`flow\_name\` from storage
+* Fix traces left in zookeeper
+* Clarify locked decorator is for instance methods
+* Extract the state changes from the ensure storage method
+* Create a top level time type
+* Simplify identity transition handling for tasks and retries
+* Remove check\_doc.py and use doc8
+* Remove functions created for pre-six 1.7.0
+* Add a tree type
+* Make intentions a tuple (to denote immutability)
+* Updated from global requirements
+* Add example for pseudo-scoping
+* Fix E265 hacking warnings
+* Fix doc which should state fetch() usage
+* Adjust sphinx requirement
+* Upgrade hacking version and fix some of the issues
+* Denote that other projects can use this library
+* Remove misc.as\_bool as oslo provides an equivalent
+* Update zake to requirements version
+
+0.3.21
+------
+
+* Rename additional to general/higher-level
+* Sync our version of the interprocess lock
+* Increase usefulness of the retry component compile errors
+* Switch to a restructuredtext README file
+* Create a considerations section
+* Include the function name on internal errors
+* Add in default transaction isolation levels
+* Allow the mysql mode to be more than just TRADITIONAL
+* Make the runner a runtime provided property
+* Rename inject\_task\_args to inject\_atom\_args
+* Rename the graph analyzer to analyzer
+* Provide the compilation object instead of just a part of it
+* Ensure cachedproperty descriptor picks up docstrings
+
+0.3
+---
+
+* Warn about internal helper/utility usage
+* Rename to atom from task
+* Invert the conductor stop() returned result
+* Move flattening to the action engine compiler
+* Increase the level of usefulness of the dispatching logging
+* Avoid forcing engine\_conf to a dict
+* Allow for two ways to find a flow detail in a job for a conductor
+* Add docs related to the new conductor feature
+* Add docstring describing the inject instance variable
+* Finish factoring apart the graph\_action module
+* Update sphinx pin from global requirements
+* Fix docstring list format
+* Allow indent text to be passed in
+* Factor out the on\_failure to a mixin type
+* Use a name property setter instead of a set\_name method
+* Adds a single threaded flow conductor
+* add the ability to inject arguments into tasks at task creation
+* Synced jsonutils from oslo-incubator
+* Remove wording issue (track does not make sense here)
+* Fix case of taskflow in docs
+* Put the job external wiki link in a note section
+* Rework atom documentation
+* Add doc link to examples
+* Rework the overview of the notification mechanism
+* Standardize on the same capitalization pattern
+* Regenerate engine-state sequence diagram
+* Add source of engine-state sequence diagram
+* Add kwarg check\_pending argument to fake lock
+* Add a example which uses the run\_iter function in a for loop
+* Fix error string interpolation
+* Rename t\_storage to atom\_storage
+* Create and use a new compilation module
+* Add engine state diagram
+* Add tests for the misc.cachedproperty descriptor
+* Complete the cachedproperty descriptor protocol
+* Don't create fake LogBook when we can not fetch one
+* Use futures wait() when possible
+* Use /taskflow/flush-test in the flush function
+* Add a reset nodes function
+* Default the impl\_memory conf to none
+* Fix spelling mistake
+* Add a helper tool which clears zookeeper test dirs
+* Add a zookeeper jobboard integration test
+* Cleanup zookeeper integration testing
+* Use a more stable flush method
+* Remove the \_clear method and do not reset the job\_watcher
+* Allow command and connection retry configuration
+* Check documentation for simple style requirements
+* Add an example which uses the run iteration functionality
+* Implement run iterations
+* Put provides and requires code to basic Flow
+* Allow the watcher to re-register if the session is lost
+* Add a new wait() method that waits for jobs to arrive
+* Add a cachedproperty descriptor
+* Add an example for the job board feature
+* Engine \_cls postfix is not correct
+* Pass executor via kwargs instead of config
+* Allow the WBE to use a preexisting executor
+* Tweaks to object hiearchy diagrams
+* Adjust doc linking
+* Medium-level docs on engines
+* Add docs for the worker based engine (WBE)
+* Updated from global requirements
+* Move from generator to iterator for iterjobs
+* Add a jobboard fetching context manager
+* Wrap the failure to load in the not found exception
+* Update jobboard docs
+* Synced jsonutils from oslo-incubator
+* Remove persistence wiki page link
+* Load engines with defined args and provided kwargs
+* Integrate urlparse for configuration augmentation
+* Fix "occured" -> "occurred"
+* Documentation tune-ups
+* Fix spelling error
+* Add a resumption strategy doc
+* Docs and cleanups for test\_examples runner
+* Skip loading (and failing to load) lock files
+* Add a persistence backend fetching context manager
+* Add a example that activates a future when a result is ready
+* Fix documentation spelling errors
+* Add a job consideration doc
+* Add last\_modified & created\_on attributes to jobs
+* Allow jobboard event notification
+* Use sequencing when posting jobs
+* Add a directed graph type (new types module)
+* Add persistence docs + adjustments
+* Updated from global requirements
+* Stings -> Strings
+* Be better at failure tolerance
+* Ensure example abandons job when it fails
+* Add docs for jobs and jobboards
+* Get persistence backend via kwargs instead of conf
+* Allow fetching jobboard implementations
+* Reuse already defined variable
+* More keywords & classifier topics
+* Allow transient values to be stored in storage
+* Doc adjustments
+* Move the daemon thread helper function
+* Create a periodic worker helper class
+* Fix not found being raised when iterating
+* Allow for only iterating over the most 'fresh' jobs
+* Updated from global requirements
+* Update oslo-incubator to 46f2b697b6aacc67
+* import run\_cross\_tests.sh from incubator
+* Exception in worker queue thread
+* Avoid holding the state lock while notifying
+
+0.2
+---
+
+* Allow atoms to save their own state/result
+* Use correct exception in the timing listener
+* Add a engine preparation stage
+* Decrease extraneous logging
+* Handle retry last\_results/last\_failure better
+* Improve documentation for engines
+* Worker executor adjustments
+* Revert "Move taskflow.utils.misc.Failure to its own module"
+* Move taskflow.utils.misc.Failure to its own module
+* Leave the execution\_graph as none until compiled
+* Move state link to developer docs
+* Raise error if atom asked to schedule with unknown intention
+* Removed unused TIMED\_OUT state
+* Rework documentation of notifications
+* Test retry fails on revert
+* Exception when scheduling task with invalid state
+* Fix race in worker-based executor result processing
+* Set logbook/flowdetail/atomdetail meta to empty dict
+* Move 'inputs and outputs' to developers docs
+* tests: Discover absence of zookeeper faster
+* Fix spelling mistake
+* Should be greater or equal to zero and not greater than
+* Persistence cleanup part one
+* Run worker-based engine tests faster
+* SQLAlchemy requirements put in order
+* Add timeout to WaitForOneFromTask
+* Use same code to reset flow and parts of it
+* Optimize dependency links in flattening
+* Adjust the exception hierachy
+* docs: Links to methods on arguments and results page
+* Add \_\_repr\_\_ method to Atom
+* Flattening improvements
+* tests: Fix WaitForOneFromTask constructor parameter introspection
+* Rework graph flow unit tests
+* Rewrite assertion for same elements in sequences
+* Unit tests for unordered flow
+* Linear flow: mark links and rework unit tests
+* Drop indexing operator from linear flow
+* Drop obsolete test\_unordered\_flow
+* Iteration over links in flow interface
+* Add a timeout object that can be interrupted
+* Avoid shutting down of a passed executor
+* Add more tests for resumption with retry
+* Improve logging for proxy publish
+* Small documentation fix
+* Improve proxy publish method
+* Add Retry to developers documentation
+* Move flow states to developers documentation
+* Remove extraneous vim configuration comments
+* Make schedule a proper method of GraphAction
+* Simplify graph analyzer interface
+* Test storage with memory and sqlite backends
+* Fix few minor spelling errors
+* Fix executor requests publishing bug
+* Flow smart revert with retry controller
+* Add atom intentions for tasks and retries
+* [WBE] Collect information from workers
+* Add tox environment for pypy
+* docs: Add inheritance diagram to exceptions documentation
+* Adjust logging levels and usage to follow standards
+* Introduce message types for WBE protocol
+* Add retry action to execute retries
+* Extend logbook and storage to work with retry
+* Add retry to execution graph
+* Add retry to Flow patterns
+* Add base class for Retry
+* Update request \`expired\` property docsting
+* docs: Add page describing atom arguments and results
+* docs: Improve BaseTask method docstrings
+* Remove extra quote symbol
+* docs: Relative links improvements
+* docs: Ingore 'taskflow.' prefix when sorting module index
+* Update comment + six.text\_type instead of str for name
+* Avoid calling callbacks while holding locks
+* Rename remote task to request
+* Rework proxy publish functionality
+* Updated from global requirements
+* Use message.requeue instead of message.reject
+* Lock test tweaks
+* Move endpoint subclass finding to reflection util
+* Correct LOG.warning in persistence utils
+* Introduce remote tasks cache for worker-executor
+* Worker-based engine clean-ups
+* A few worker-engine cleanups
+* Add a delay before releasing the lock
+* Allow connection string to be just backend name
+* Get rid of openstack.common.py3kcompat
+* Clean-up several comments in reflection.py
+* Fix try\_clean not getting the job\_path
+* Updated from global requirements
+* Rename uuid to topic
+* Fixups for threads\_count usage and logging
+* Use the stop watch utility instead of custom timing
+* Unify usage of storage error exception type
+* Add zookeeper job/jobboard impl
+* Updated from global requirements
+* Removed copyright from empty files
+* Remove extraneous vim configuration comments
+* Use six.text\_type() instead of str() in sqlalchemy backend
+* Fix dummy lock missing pending\_writers method
+* Move some common/to be shared kazoo utils to kazoo\_utils
+* Switch to using the type checking decode\_json
+* Fix few spelling and grammar errors
+* Fixed spelling error
+* Run action-engine tests with worker-based engine
+* Message-oriented worker-based flow with kombu
+* Check atom doesn't provide and return same values
+* Fix command for pylint tox env
+* Remove locale overrides form tox template
+* Reduce test and optional requirements to global requirements
+* Rework sphinx documentation
+* Remove extraneous vim configuration comments
+* Sync with global requirements
+* Instead of doing set diffing just partition when state checking
+* Add ZooKeeper backend to examples
+* Storage protects lower level backend against thread safety
+* Remove tox locale overrides
+* Update .gitreview after repo rename
+* Small storage tests clean-up
+* Support building wheels (PEP-427)
+
+0.1.3
+-----
+
+* Add validate() base method
+* Fix deadlock on waiting for pending\_writers to be empty
+* Rename self.\_zk to self.\_client
+* Use listener instead of AutoSuspendTask in test\_suspend\_flow
+* Use test utils in test\_suspend\_flow
+* Use reader/writer locks in storage
+* Allow the usage of a passed in sqlalchemy engine
+* Be really careful with non-ascii data in exceptions/failures
+* Run zookeeper tests if localhost has a compat. zookeeper server
+* Add optional-requirements.txt
+* Move kazoo to testenv requirements
+* Unpin testtools version and bump subunit to >=0.0.18
+* Remove use of str() in utils.misc.Failure
+* Be more resilent around import/detection/setup errors
+* Some zookeeper persistence improvements/adjustments
+* Add a validate method to dir and memory backends
+* Update oslo copy to oslo commit 39e1c5c5f39204
+* Update oslo.lock from incubator commit 3c125e66d183
+* Refactor task/flow flattening
+* Engine tests refactoring
+* Tests: don't pass 'values' to task constructor
+* Test fetching backends via entry points
+* Pin testtools to 0.9.34 in test requirements
+* Ensure we register the new zookeeper backend as an entrypoint
+* Implement ZooKeeper as persistence storage backend
+* Use addCleanup instead of tearDown in test\_sql\_persistence
+* Retain the same api for all helpers
+* Update execute/revert comments
+* Added more unit tests for Task and FunctorTask
+* Doc strings and comments clean-up
+* List examples function doesn't accept arguments
+* Tests: Persistence test mixin fix
+* Test using mysql + postgres if available
+* Clean-up and improve async-utils tests
+* Use already defined PENDING variable
+* Add utilities for working with binary data
+* Cleanup engine base class
+* Engine cleanups
+* Update atom comments
+* Put full set of requirements to py26, py27 and py33 envs
+* Add base class Atom for all flow units
+* Add more requirements to cover tox environment
+* Put SQLAlchemy requirements on single line
+* Proper exception raised from check\_task\_transition
+* Fix function name typo in persistence utils
+* Use the same way of assert isinstance in all tests
+* Minor cleanup in test\_examples
+* Add possibility to create Failure from exception
+* Exceptions cleanup
+* Alter is\_locked() helper comment
+* Add a setup.cfg keywords to describe taskflow
+* Use the released toxgen tool instead of our copy
+
+0.1.2
+-----
+
+* Move autobinding to task base class
+* Assert functor task revert/execute are callable
+* Use the six callback checker
+* Add envs for different sqlalchemy versions
+* Refactor task handler binding
+* Move six to the right location
+* Use constants for the execution event strings
+* Added htmlcov folder to .gitignore
+* Reduce visibility of task\_action
+* Change internal data store of LogBook from list to dict
+* Misc minor fixes to taskflow/examples
+* Add connection\_proxy param
+* Ignore doc build files
+* Fix spelling errors
+* Switch to just using tox
+* Enable H202 warning for flake8
+* Check tasks should not provide same values
+* Allow max\_backoff and use count instead of attempts
+* Skip invariant checking and adding when nothing provided
+* Avoid not\_done naming conflict
+* Add stronger checking of backend configuration
+* Raise type error instead of silencing it
+* Move the container fetcher function to utils
+* Explicitly list the valid transitions to RESUMING state
+* Name the graph property the same as in engine
+* Bind outside of the try block
+* Graph action refactoring
+* Add make\_completed\_future to async\_utils
+* Update oslo-incubator copy to oslo-incubator commit 8b2b0b743
+* Ensure that mysql traditional mode is enabled
+* Move async utils to own file
+* Update requirements from opentack/requirements
+* Code cleanup for eventlet\_utils.wait\_fo\_any
+* Refactor engine internals
+* Add wait\_for\_any method to eventlet utils
+* Introduce TaskExecutor
+* Run some engine tests with eventlet if it's available
+* Do not create TaskAction for each task
+* Storage: use names instead of uuids in interface
+* Add tests for metadata updates
+* Fix sqlalchemy 0.8 issues
+* Fix minor python3 incompatibility
+* Speed up FlowDetail.find
+* Fix misspellings
+* Raise exception when trying to run empty flow
+* Use update\_task\_metadata in set\_task\_progress
+* Capture task duration
+* Fix another instance of callback comparison
+* Don't forget to return self
+* Fixes how instances methods are not deregistered
+* Targeted graph flow pattern
+* All classes should explicitly inherit object class
+* Initial commit of sphinx related files
+* Improve is\_valid\_attribute\_name utility function
+* Coverage calculation improvements
+* Fix up python 3.3 incompatabilities
+
+0.1.1
+-----
+
+* Pass flow failures to task's revert method
+* Storage: add methods to get all flow failures
+* Pbr requirement went missing
+* Update code to comply with hacking 0.8.0
+* Don't reset tasks to PENDING state while reverting
+* Let pbr determine version automatically
+* Be more careful when passing result to revert()
+
+0.1
+---
+
+* Support for optional task arguments
+* Do not erase task progress details
+* Storage: restore injected data on resumption
+* Inherit the greenpool default size
+* Add debug logging showing what is flattened
+* Remove incorrect comment
+* Unit tests refactoring
+* Use py3kcompat.urlutils from oslo instead of six.urllib\_parse
+* Update oslo and bring py3kcompat in
+* Support several output formats in state\_graph tool
+* Remove task\_action state checks
+* Wrapped exception doc/intro comment updates
+* Doc/intro updates for simple\_linear\_listening
+* Add docs/intro to simple\_linear example
+* Update intro/comments for reverting\_linear example
+* Add docs explaining what/how resume\_volume\_create works
+* A few resuming from backend comment adjustments
+* Add an introduction to explain resume\_many example
+* Increase persistence example comments
+* Boost graph flow example comments
+* Also allow "\_" to be valid identifier
+* Remove uuid from taskflow.flow.Flow
+* A few additional example boot\_vm comments + tweaks
+* Add a resuming booting vm example
+* Add task state verification
+* Beef up storage comments
+* Removed unused utilities
+* Helpers to save flow factory in metadata
+* Storage: add flow name and uuid properties
+* Create logbook if not provided for create\_flow\_details
+* Prepare for 0.1 release
+* Comment additions for exponential backoff
+* Beef up the action engine comments
+* Pattern comment additions/adjustments
+* Add more comments to flow/task
+* Save with the same connection
+* Add a persistence util logbook formatting function
+* Rename get\_graph() -> execution\_graph
+* Continue adding docs to examples
+* Add more comments that explain example & usage
+* Add more comments that explain example & usage
+* Add more comments that explain example & usage
+* Add more comments that explain example & usage
+* Fix several python3 incompatibilities
+* Python3 compatibility for utils.reflection
+* No module name for builtin type and exception names
+* Fix python3 compatibility issues in examples
+* Fix print statements for python 2/3
+* Add a mini-cinder volume create with resumption
+* Update oslo copy and bring over versionutils
+* Move toward python 3/2 compatible metaclass
+* Add a secondary booting vm example
+* Resumption from backend for action engine
+* A few wording/spelling adjustments
+* Create a green executor & green future
+* Add a simple mini-billing stack example
+* Add a example which uses a sqlite persistence layer
+* Add state to dot->svg tool
+* Add a set of useful listeners
+* Remove decorators and move to utils
+* Add reasons as to why the edges were created
+* Fix entrypoints being updated/created by update.py
+* Validate each flow state change
+* Update state sequence for failed flows
+* Flow utils and adding comments
+* Bump requirements to the latest
+* Add a inspect sanity check and note about bound methods
+* Some small exception cleanups
+* Check for duplicate task names on flattening
+* Correctly save task versions
+* Allow access by index
+* Fix importing of module files
+* Wrapping and serializing failures
+* Simpler API to load flows into engines
+* Avoid setting object variables
+* A few adjustments to the progress code
+* Cleanup unused states
+* Remove d2to dependency
+* Warn if multiple providers found
+* Memory persistence backend improvements
+* Create database from models for SQLite
+* Don't allow mutating operations on the underlying graph
+* Add graph density
+* Suspend single and multi threaded engines
+* Remove old tests for unexisted flow types
+* Boot fake vm example fixed
+* Export graph to dot util
+* Remove unused utility classes
+* Remove black list of graph flow
+* Task decorator was removed and examples updated
+* Remove weakref usage
+* Add basic sanity tests for unordered flow
+* Clean up job/jobboard code
+* Add a directory/filesystem based persistence layer
+* Remove the older (not used) resumption mechanism
+* Reintegrate parallel action
+* Add a flow flattening util
+* Allow to specify default provides at task definition
+* Graph flow, sequential graph action
+* Task progress
+* Verify provides and requires
+* Remap the emails of the committers
+* Use executors instead of pools
+* Fix linked exception forming
+* Remove threaded and distributed flows
+* Add check that task provides all results it should
+* Use six string types instead of basestring
+* Remove usage of oslo.db and oslo.config
+* Move toward using a backend+connection model
+* Add provides and requires properties to Flow
+* Fixed crash when running the engine
+* Remove the common config since its not needed
+* Allow the lock decorator to take a list
+* Allow provides to be a set and results to be a dictionary
+* Allow engines to be copied + blacklist broken flows
+* Add link to why we have to make this factory due to late binding
+* Use the lock decorator and close/join the thread pool
+* Engine, task, linear\_flow unification
+* Combine multiple exceptions into a linked one
+* Converted some examples to use patterns/engines
+* MultiThreaded engine and parallel action
+* State management for engines
+* Action engine: save task results
+* Initial implementation of action-based engine
+* Further updates to update.py
+* Split utils module
+* Rename Task.\_\_call\_\_ to Task.execute
+* Reader/writer no longer used
+* Rename "revert\_with" => "revert" and "execute\_with" to "execute"
+* Notify on task reversion
+* Have runner keep the exception
+* Use distutil version classes
+* Add features to task.Task
+* Add get\_required\_callable\_args utility function
+* Add get\_callable\_name utility function
+* Require uuid + move functor\_task to task.py
+* Check examples when running tests
+* Use the same root test class
+* LazyPluggable is no longer used
+* Add a locally running threaded flow
+* Change namings in functor\_task and add docs to its \_\_init\_\_
+* Rework the persistence layer
+* Do not have the runner modify the uuid
+* Refactor decorators
+* Nicer way to make task out of any callable
+* Use oslo's sqlalchemy layer
+* File movements
+* Added Backend API Database Implementation
+* Added Memory Persistence API and Generic Datatypes
+* Resync the latest oslo code
+* Remove openstack.common.exception usage
+* Forgot to move this one to the right folder
+* Add a new simple calculator example
+* Quiet the provider linking
+* Deep-copy not always possible
+* Add a example which simulates booting a vm
+* Add a more complicated graph example
+* Move examples under the source tree
+* Adjust a bunch of hacking violations
+* Fix typos in test\_linear\_flow.py and simple\_linear\_listening.py
+* Fix minor code style
+* Fix two minor bugs in docs/examples
+* Show file modifications and fix dirpath based on config file
+* Add a way to use taskflow until library stabilized
+* Provide the length of the flows
+* Parents should be frozen after creation
+* Allow graph dependencies to be manually provided
+* Add helper reset internals function
+* Move to using pbr
+* Unify creation/usage of uuids
+* Use the runner interface as the best task lookup
+* Ensure we document and complete correct removal
+* Pass runners instead of task objects/uuids
+* Move how resuming is done to be disconnected from jobs/flows
+* Clear out before connecting
+* Make connection/validation of tasks be after they are added
+* Add helper to do notification
+* Store results by add() uuid instead of in array format
+* Integrate better locking and a runner helper class
+* Cleaning up various components
+* Move some of the ordered flow helper classes to utils
+* Allow instance methods to be wrapped and unwrapped correctly
+* Add a start of a few simple examples
+* Update readme to point to links
+* Fix most of the hacking rules
+* Fix all flake8 E\* and F\* errors
+* Fix the current flake8 errors
+* Don't keep the state/version in the task name
+* Dinky change to trigger jenkins so I can cleanup
+* Add the task to the accumulator before running
+* Add .settings and .venv into .gitignore
+* Fix tests for python 2.6
+* Add the ability to soft\_reset a workflow
+* Add a .gitreview file so that git-review works
+* Ensure we have an exception and capture the exc\_info
+* Update how graph results are fetched when they are optional
+* Allow for optional task requirements
+* We were not notifying when errors occured so fix that
+* Bring over the nova get\_wrapped\_function helper and use it
+* Allow for passing in the metadata when creating a task detail entry
+* Update how the version task functor attribute is found
+* Remove more tabs incidents
+* Removed test noise and formatted for pep8
+* Continue work on decorator usage
+* Ensure we pickup the packages
+* Fixed pep8 formatting... Finally
+* Add flow disassociation and adjust the assocate path
+* Add a setup.cfg and populate it with a default set of nosetests options
+* Fix spacing
+* Add a better task name algorithm
+* Add a major/minor version
+* Add a get many attr/s and join helper functions
+* Reduce test noise
+* Fix a few unit tests due to changes
+* Ensure we handle functor names and resetting correctly
+* Remove safe\_attr
+* Modifying db tests
+* Removing .pyc
+* Fixing .py in .gitignore
+* Update db api test
+* DB api test cases and revisions
+* Allow for turning off auto-extract and add a test
+* Use a function to filter args and add comments
+* Use update instead of overwrite
+* Move decorators to new file and update to use better wraps()
+* Continue work with decorator usage
+* Update with adding a provides and requires decorator for standalone function usage
+* Instead of apply use \_\_call\_\_
+* Add comment to why we accumulate before notifying task listeners
+* Use a default sqlite backing using a taskflow file
+* Add a basic rollback accumlator test
+* Use rollback accumulator and remove requires()/provides() from being functions
+* Allow (or disallow) multiple providers of items
+* Clean the lines in a seperate function
+* Resync with oslo-incubator
+* Remove uuid since we are now using uuidutils
+* Remove error code not found in strict version of pylint
+* Include more dev testing packages + matching versions
+* Update dependencies for new db/distributed backends
+* Move some of the functions to use there openstack/common counterparts
+* More import fixups
+* Patch up the imports
+* Fix syntax error
+* Rename cause -> exception and make exception optional
+* Allow any of the previous tasks to satisfy requirements
+* Ensure we change the self and parents states correctly
+* Always have a name provided
+* Cleaning up files/extraneous files/fixing relations
+* More pylint cleanups
+* Make more tests for linear and shuffle test utils to common file
+* Only do differences on set objects
+* Ensure we fetch the appropriate inputs for the running task
+* Have the linear workflow verify the tasks inputs
+* Specify that task provides/requires must be an immutable set
+* Clean Up for DB changes
+* db api defined
+* Fleshing out sqlalchemy api
+* Almost done with sqlalchemy api
+* Fix state check
+* Fix flow exception wording
+* Ensure job is pending before we associate and run
+* More pylint cleanups
+* Ensure we associate with parent flows as well
+* Add a nice run() method to the job class that will run a flow
+* Massive pylint cleanup
+* deleting .swp files
+* deleting .swp files
+* cleaning for initial pull request
+* Add a few more graph ordering test cases
+* Update automatic naming and arg checks
+* Update order calls and connect call
+* Move flow failure to flow file and correctly catch ordering failure
+* Just kidding - really fixing relations this time
+* Fixing table relations
+* Allow job id to be passed in
+* Check who is being connected to and ensure > 0 connectors
+* Move the await function to utils
+* Graph tests and adjustments releated to
+* Add graph flow tests
+* Fix name changes missed
+* Enable extraction of what a functor requires from its args
+* Called flow now, not workflow
+* Second pass at models
+* More tests
+* Simplify existence checks
+* More pythonic functions and workflow -> flow renaming
+* Added more utils, added model for workflow
+* Spelling errors and stuff
+* adding parentheses to read method
+* Implemented basic sqlalchemy session class
+* Setting up Configs and SQLAlchemy/DB backend
+* Fix the import
+* Use a different logger method if tolerant vs not tolerant
+* More function comments
+* Add a bunch of linear workflow tests
+* Allow resuming stage to be interrupted
+* Fix the missing context variable
+* Moving over celery/distributed workflows
+* Update description wording
+* Pep fix
+* Instead of using notify member functions, just use functors
+* More wording fixes
+* Add the ability to alter the task failure reconcilation
+* Correctly run the tasks after partial resumption
+* Another wording fix
+* Spelling fix
+* Allow the functor task to take a name and provide it a default
+* Updated functor task comments
+* Move some of the useful helpers and functions to other files
+* Add the ability to associate a workflow with a job
+* Move the useful functor wrapping task from test to wrappers file
+* Add a thread posting/claiming example and rework tests to use it
+* After adding reposting/unclaiming reflect those changes here
+* Add a nicer string name that shows what the class name is
+* Adjust some of the states jobs and workflows could be in
+* Add a more useful name that shows this is a task
+* Remove impl of erasing which doesn't do much and allow for job reposting
+* Various reworkings
+* Rename logbook contents
+* Get a memory test example working
+* Add a pylintrc file to be used with pylint
+* Rework the logbook to be chapter/page based
+* Move ordered workflow to its own file
+* Increase the number of comments
+* Start adding in a more generic DAG based workflow
+* Remove dict\_provider dependency
+* Rework due to code comments
+* Begin adding testing functionality
+* Fill in the majority of the memory job
+* Rework how we should be using lists instead of ordereddicts for optimal usage
+* Add a context manager to the useful read/writer lock
+* Ensure that the task has a name
+* Add a running state which can be used to know when a workflow is running
+* Rename the date created field
+* Add some search functionality and adjust the await() function params
+* Remove and add a few new exceptions
+* Shrink down the exposed methods
+* Remove the promise object for now
+* Add RESUMING
+* Fix spelling
+* Continue on getting ready for the memory impl. to be useful
+* On python <= 2.6 we need to import ordereddict
+* Remove a few other references to nova
+* Add in openstack common and remove patch references
+* Move simplification over
+* Continue moving here
+* Update README.md
+* Update readme
+* Move the code over for now
+* Initial commit
diff -pruN 5.12.0-2/PKG-INFO 6.0.2-0ubuntu1/PKG-INFO
--- 5.12.0-2/PKG-INFO	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-0ubuntu1/PKG-INFO	2025-08-25 12:49:58.764545400 +0000
@@ -0,0 +1,148 @@
+Metadata-Version: 2.1
+Name: taskflow
+Version: 6.0.2
+Summary: Taskflow structured state management library.
+Home-page: https://docs.openstack.org/taskflow/latest/
+Author: OpenStack
+Author-email: openstack-discuss@lists.openstack.org
+Keywords: reliable,tasks,execution,parallel,dataflow,workflows,distributed
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: OpenStack
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Information Technology
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: POSIX :: Linux
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: System :: Distributed Computing
+Requires-Python: >=3.9
+License-File: LICENSE
+Requires-Dist: pbr>=2.0.0
+Requires-Dist: debtcollector>=1.2.0
+Requires-Dist: futurist>=1.2.0
+Requires-Dist: fasteners>=0.17.3
+Requires-Dist: networkx>=2.1.0
+Requires-Dist: stevedore>=1.20.0
+Requires-Dist: jsonschema>=3.2.0
+Requires-Dist: automaton>=1.9.0
+Requires-Dist: oslo.utils>=3.33.0
+Requires-Dist: oslo.serialization>=2.18.0
+Requires-Dist: tenacity>=6.0.0
+Requires-Dist: cachetools>=2.0.0
+Requires-Dist: pydot>=1.2.4
+Provides-Extra: zookeeper
+Requires-Dist: kazoo>=2.6.0; extra == "zookeeper"
+Provides-Extra: redis
+Requires-Dist: redis>=4.0.0; extra == "redis"
+Provides-Extra: etcd
+Requires-Dist: etcd3gw>=2.0.0; extra == "etcd"
+Provides-Extra: workers
+Requires-Dist: kombu>=4.3.0; extra == "workers"
+Provides-Extra: eventlet
+Requires-Dist: eventlet>=0.18.2; extra == "eventlet"
+Provides-Extra: database
+Requires-Dist: SQLAlchemy>=1.0.10; extra == "database"
+Requires-Dist: alembic>=0.8.10; extra == "database"
+Requires-Dist: SQLAlchemy-Utils>=0.30.11; extra == "database"
+Requires-Dist: PyMySQL>=0.7.6; extra == "database"
+Requires-Dist: psycopg2>=2.8.0; extra == "database"
+Provides-Extra: test
+Requires-Dist: kazoo>=2.6.0; extra == "test"
+Requires-Dist: redis>=4.0.0; extra == "test"
+Requires-Dist: etcd3gw>=2.0.0; extra == "test"
+Requires-Dist: kombu>=4.3.0; extra == "test"
+Requires-Dist: eventlet>=0.18.2; extra == "test"
+Requires-Dist: SQLAlchemy>=1.0.10; extra == "test"
+Requires-Dist: alembic>=0.8.10; extra == "test"
+Requires-Dist: SQLAlchemy-Utils>=0.30.11; extra == "test"
+Requires-Dist: PyMySQL>=0.7.6; extra == "test"
+Requires-Dist: psycopg2>=2.8.0; extra == "test"
+Requires-Dist: zake>=0.1.6; extra == "test"
+Requires-Dist: pydotplus>=2.0.2; extra == "test"
+Requires-Dist: oslotest>=3.2.0; extra == "test"
+Requires-Dist: testtools>=2.2.0; extra == "test"
+Requires-Dist: testscenarios>=0.4; extra == "test"
+Requires-Dist: stestr>=2.0.0; extra == "test"
+Requires-Dist: pifpaf>=0.10.0; extra == "test"
+
+========================
+Team and repository tags
+========================
+
+.. image:: https://governance.openstack.org/tc/badges/taskflow.svg
+    :target: https://governance.openstack.org/tc/reference/tags/index.html
+
+.. Change things from this point on
+
+TaskFlow
+========
+
+.. image:: https://img.shields.io/pypi/v/taskflow.svg
+    :target: https://pypi.org/project/taskflow/
+    :alt: Latest Version
+
+A library to do [jobs, tasks, flows] in a highly available, easy to understand
+and declarative manner (and more!) to be used with OpenStack and other
+projects.
+
+* Free software: Apache license
+* Documentation: https://docs.openstack.org/taskflow/latest/
+* Source: https://opendev.org/openstack/taskflow
+* Bugs: https://bugs.launchpad.net/taskflow/
+* Release notes: https://docs.openstack.org/releasenotes/taskflow/
+
+Join us
+-------
+
+- https://launchpad.net/taskflow
+
+Testing and requirements
+------------------------
+
+Requirements
+~~~~~~~~~~~~
+
+Because this project has many optional (pluggable) parts like persistence
+backends and engines, we decided to split our requirements into two
+parts: - things that are absolutely required (you can't use the project
+without them) are put into ``requirements.txt``. The requirements
+that are required by some optional part of this project (you can use the
+project without them) are put into our ``test-requirements.txt`` file (so
+that we can still test the optional functionality works as expected). If
+you want to use the feature in question (`eventlet`_ or the worker based engine
+that uses `kombu`_ or the `sqlalchemy`_ persistence backend or jobboards which
+have an implementation built using `kazoo`_ ...), you should add
+that requirement(s) to your project or environment.
+
+Tox.ini
+~~~~~~~
+
+Our ``tox.ini`` file describes several test environments that allow to test
+TaskFlow with different python versions and sets of requirements installed.
+Please refer to the `tox`_ documentation to understand how to make these test
+environments work for you.
+
+Developer documentation
+-----------------------
+
+We also have sphinx documentation in ``docs/source``.
+
+*To build it, run:*
+
+::
+
+    $ python setup.py build_sphinx
+
+.. _kazoo: https://kazoo.readthedocs.io/en/latest/
+.. _sqlalchemy: https://www.sqlalchemy.org/
+.. _kombu: https://kombu.readthedocs.io/en/latest/
+.. _eventlet: http://eventlet.net/
+.. _tox: https://tox.testrun.org/
+
diff -pruN 5.12.0-2/debian/changelog 6.0.2-0ubuntu1/debian/changelog
--- 5.12.0-2/debian/changelog	2025-03-28 08:53:00.000000000 +0000
+++ 6.0.2-0ubuntu1/debian/changelog	2025-09-22 20:23:52.000000000 +0000
@@ -1,220 +1,241 @@
-python-taskflow (5.12.0-2) unstable; urgency=medium
+python-taskflow (6.0.2-0ubuntu1) questing; urgency=medium
 
-  * Uploading to unstable.
+  * New upstream release for OpenStack Flamingo (LP: #2125012).
+  * d/watch: Add Flamingo key verification.
+  * d/u/signing-key.asc: Add Flamingo public key.
 
- -- Thomas Goirand <zigo@debian.org>  Fri, 28 Mar 2025 09:53:00 +0100
+ -- Myles Penner <myles.penner@canonical.com>  Mon, 22 Sep 2025 13:23:52 -0700
 
-python-taskflow (5.12.0-1) experimental; urgency=medium
+python-taskflow (6.0.1+git2025070814.d01920ef-0ubuntu2) questing; urgency=medium
 
-  * New upstream release.
-  * Add python3-debtcollector as (build-)depends.
+  * d/control: Drop pydot dependency.
 
- -- Thomas Goirand <zigo@debian.org>  Mon, 10 Mar 2025 16:13:58 +0100
+ -- Myles Penner <myles.penner@canonical.com>  Wed, 23 Jul 2025 15:40:11 -0700
 
-python-taskflow (5.11.0-1) experimental; urgency=medium
+python-taskflow (6.0.1+git2025070814.d01920ef-0ubuntu1) questing; urgency=medium
 
-  * New upstream release.
-  * d/watch: switch to version=4 and mode=git.
+  [ Guillaume Boutry ]
+  * d/gbp.conf, .launchpad.yaml: Sync from cloud-archive-tools for
+    flamingo.
 
- -- Thomas Goirand <zigo@debian.org>  Wed, 26 Feb 2025 13:26:26 +0100
+  [ Myles Penner ]
+  * New upstream release for OpenStack Flamingo. (LP: #2116155)
+  * d/p/reproducible_build.patch: Refresh.
+  * d/p/move-test-requirements-out-of-runtime-requirements.patch: Refresh.
+  * d/p/remove-bad-enum.py-calls.patch: Refresh.
+  * d/control: Align (Build-)Depends with upstream. 
+  * d/rules: Include PBR version.
 
-python-taskflow (5.9.1-4) unstable; urgency=medium
+ -- Myles Penner <myles.penner@canonical.com>  Wed, 16 Jul 2025 15:59:16 -0700
 
-  * Upgrade -doc to bootstrap 5 (Closes: #1088509).
+python-taskflow (5.11.0-0ubuntu1) plucky; urgency=medium
 
- -- Thomas Goirand <zigo@debian.org>  Thu, 02 Jan 2025 12:02:49 +0100
+  * New upstream release for OpenStack Epoxy.
+  * d/p/*: Refresh.
 
-python-taskflow (5.9.1-3) unstable; urgency=medium
+ -- James Page <james.page@ubuntu.com>  Thu, 27 Feb 2025 12:56:39 +0000
 
-  * Switch to pybuild (Closes: #1090625).
+python-taskflow (5.10.0-0ubuntu1) plucky; urgency=medium
 
- -- Thomas Goirand <zigo@debian.org>  Wed, 18 Dec 2024 22:18:17 +0100
+  * d/gbp.conf, .launchpad.yaml: Sync from cloud-archive-tools for
+    epoxy.
+  * New upstream release for OpenStack Epoxy.
+  * d/control: Drop dependencies that are no longer required.
 
-python-taskflow (5.9.1-2) unstable; urgency=medium
+ -- James Page <james.page@ubuntu.com>  Fri, 17 Jan 2025 09:21:15 +0000
 
-  * Uploading to unstable.
+python-taskflow (5.9.1-0ubuntu1) oracular; urgency=medium
 
- -- Thomas Goirand <zigo@debian.org>  Thu, 19 Sep 2024 17:36:38 +0200
+  * d/gbp.conf: upstream-branch -> upstream-dalmatian.
+  * New upstream release for OpenStack Dalmatian.
 
-python-taskflow (5.9.1-1) experimental; urgency=medium
+ -- James Page <james.page@ubuntu.com>  Wed, 25 Sep 2024 09:34:54 +0100
 
-  * New upstream release.
-  * Added python3-etcd3gw as build-depends.
-
- -- Thomas Goirand <zigo@debian.org>  Mon, 26 Aug 2024 14:10:25 +0200
-
-python-taskflow (5.6.0-2) unstable; urgency=medium
-
-  * Uploading to unstable.
-
- -- Thomas Goirand <zigo@debian.org>  Wed, 03 Apr 2024 16:25:50 +0200
-
-python-taskflow (5.6.0-1) experimental; urgency=medium
-
-  * New upstream release.
-
- -- Thomas Goirand <zigo@debian.org>  Sat, 16 Mar 2024 13:10:56 +0100
-
-python-taskflow (5.5.0-1) experimental; urgency=medium
+python-taskflow (5.9.0-0ubuntu1) oracular; urgency=medium
 
   * New upstream release.
+  * d/control: Align (Build-)Depends with upstream.
 
- -- Thomas Goirand <zigo@debian.org>  Sat, 24 Feb 2024 18:47:40 +0100
+ -- James Page <james.page@ubuntu.com>  Wed, 07 Aug 2024 14:43:15 +0100
 
-python-taskflow (5.4.0-2) unstable; urgency=medium
+python-taskflow (5.5.0-0ubuntu1) noble; urgency=medium
 
-  * Added python3-pyasyncore as (build-)depends, so this package can work with
-    Python 3.12 (Closes: #1058184).
+  * New upstream release for OpenStack Caracal. 
+  * d/p/*: Refresh.
 
- -- Thomas Goirand <zigo@debian.org>  Mon, 30 Oct 2023 16:15:28 +0100
+ -- James Page <james.page@ubuntu.com>  Mon, 11 Mar 2024 12:11:34 +0000
 
-python-taskflow (5.4.0-1) unstable; urgency=medium
-
-  * New upstream release.
-  * Cleans better.
-  * Blacklist test_dir_backend_cache_overfill().
+python-taskflow (5.4.0-0ubuntu3) noble; urgency=medium
 
- -- Thomas Goirand <zigo@debian.org>  Wed, 04 Oct 2023 22:45:47 +0200
+  * d/p/fix-missing-underscore-js.patch: Dropped. Not needed.
 
-python-taskflow (5.1.0-2) unstable; urgency=medium
+ -- Corey Bryant <corey.bryant@canonical.com>  Wed, 13 Dec 2023 09:46:31 -0500
 
-  * Uploading to unstable.
-  * Blacklist unit.test_utils.UriParseTest.test_ipv6_host.
+python-taskflow (5.4.0-0ubuntu2) noble; urgency=medium
 
- -- Thomas Goirand <zigo@debian.org>  Mon, 19 Jun 2023 11:51:50 +0200
+  * d/gbp.conf, .launchpad.yaml: Sync from cloud-archive-tools for
+    caracal.
+  * d/control: Depend on python3-pyasyncore until upstream no longer
+    has a dependency on asyncore (LP: #2024588).
+  * d/control: Depend on python3-eventlet >= 0.33.1-4ubuntu2 which is
+    patched with Python 3.12 support.
+  * d/control, d/rules, d/p/fix-missing-underscore-js.patch: Add work-around
+    for missing underscore.js.
 
-python-taskflow (5.1.0-1) experimental; urgency=medium
+ -- Corey Bryant <corey.bryant@canonical.com>  Tue, 24 Oct 2023 16:21:25 -0400
 
-  * New upstream release.
-  * Removed patch applied upstream:
-    - Adapt-to-new-jsonschema-versions.patch
-    - py3.11_Fix-test_while_is_not.patch
+python-taskflow (5.4.0-0ubuntu1) mantic; urgency=medium
 
- -- Thomas Goirand <zigo@debian.org>  Mon, 27 Feb 2023 09:31:42 +0100
+  * d/gbp.conf, .launchpad.yaml: Sync from cloud-archive-tools for
+    bobcat.
+  * New upstream release for OpenStack Bobcat.
 
-python-taskflow (5.0.0-3) unstable; urgency=medium
+ -- Corey Bryant <corey.bryant@canonical.com>  Mon, 11 Sep 2023 14:28:24 -0400
 
-  * Add py3.11_Fix-test_while_is_not.patch (Closes: #1025126).
+python-taskflow (5.1.0-0ubuntu1) lunar; urgency=medium
 
- -- Thomas Goirand <zigo@debian.org>  Fri, 09 Dec 2022 11:22:43 +0100
+  * New upstream release for OpenStack Antelope.
+  * d/p/adapt-to-new-jsonschema-versions.patch: Dropped. No longer needed.
 
-python-taskflow (5.0.0-2) unstable; urgency=medium
+ -- Corey Bryant <corey.bryant@canonical.com>  Fri, 06 Jan 2023 17:12:04 -0500
 
-  * Uploading to unstable.
+python-taskflow (5.0.0-0ubuntu1) kinetic; urgency=medium
 
- -- Thomas Goirand <zigo@debian.org>  Fri, 23 Sep 2022 12:16:16 +0200
+  * New upstream release for OpenStack Zed.
+  * d/control: Align (Build-)Depends with upstream.
+  * d/p/adapt-to-new-jsonschema-versions.patch: Updates to work with
+    new jsonschema.
+  * d/p/*: Rebased.
 
-python-taskflow (5.0.0-1) experimental; urgency=medium
+ -- Corey Bryant <corey.bryant@canonical.com>  Tue, 02 Aug 2022 16:13:39 -0400
 
-  * New upstream release.
-  * Removed python3-six from (build-)depends.
+python-taskflow (4.6.4-0ubuntu3) kinetic; urgency=medium
 
- -- Thomas Goirand <zigo@debian.org>  Tue, 30 Aug 2022 12:16:30 +0200
+  * d/t/control: Switch test name from python-ostestr to python-stestr.
 
-python-taskflow (4.7.0-1) experimental; urgency=medium
+ -- Corey Bryant <corey.bryant@canonical.com>  Tue, 21 Jun 2022 10:19:38 -0400
 
-  * New upstream release.
-  * Set min version of python3-fasteners to 0.17.3.
+python-taskflow (4.6.4-0ubuntu2) kinetic; urgency=medium
 
- -- Thomas Goirand <zigo@debian.org>  Tue, 30 Aug 2022 12:09:36 +0200
+  * d/t/control, d/t/python-(o)stestr: Switch from ostestr to stestr.
 
-python-taskflow (4.6.4-3) unstable; urgency=medium
+ -- Corey Bryant <corey.bryant@canonical.com>  Mon, 13 Jun 2022 09:15:12 -0400
 
-  * Refreshed patches.
-  * Add Adapt-to-new-jsonschema-versions.patch (Closes: #1016228).
-  * Add autopkgtest.
+python-taskflow (4.6.4-0ubuntu1) jammy; urgency=medium
 
- -- Thomas Goirand <zigo@debian.org>  Sun, 31 Jul 2022 12:57:31 +0200
+  * New upstream release for OpenStack Yoga.
 
-python-taskflow (4.6.4-2) unstable; urgency=medium
+ -- Corey Bryant <corey.bryant@canonical.com>  Wed, 02 Mar 2022 15:51:50 -0500
 
-  * Uploading to unstable.
+python-taskflow (4.6.3-0ubuntu1) jammy; urgency=medium
 
- -- Thomas Goirand <zigo@debian.org>  Thu, 24 Mar 2022 10:59:02 +0100
+  * New upstream release for OpenStack Yoga.
+  * d/control: Bump debhelper compat to 13.
+  * d/control: Align (Build-)Depends with upstream.
 
-python-taskflow (4.6.4-1) experimental; urgency=medium
+ -- Corey Bryant <corey.bryant@canonical.com>  Wed, 08 Dec 2021 10:12:01 -0500
 
-  * New upstream release.
+python-taskflow (4.6.0-0ubuntu1) hirsute; urgency=medium
 
- -- Thomas Goirand <zigo@debian.org>  Sat, 19 Feb 2022 10:58:53 +0100
+  [ Chris MacNaughton ]
+  * d/control: Update VCS paths for move to lp:~ubuntu-openstack-dev.
 
-python-taskflow (4.6.2-2) unstable; urgency=medium
+  [ Corey Bryant ]
+  * New upstream release for OpenStack Wallaby.
 
-  * Uploading to unstable.
+ -- Corey Bryant <corey.bryant@canonical.com>  Wed, 17 Mar 2021 10:45:47 -0400
 
- -- Thomas Goirand <zigo@debian.org>  Wed, 29 Sep 2021 11:45:46 +0200
+python-taskflow (4.5.0-0ubuntu1) groovy; urgency=medium
 
-python-taskflow (4.6.2-1) experimental; urgency=medium
+  * New upstream release for OpenStack Victoria.
+  * d/control: Align (Build-)Depends with upstream.
+  * d/p/*: Rebased.
 
-  * New upstream release.
+ -- Corey Bryant <corey.bryant@canonical.com>  Fri, 18 Sep 2020 12:38:56 -0400
 
- -- Thomas Goirand <zigo@debian.org>  Tue, 07 Sep 2021 11:02:23 +0200
+python-taskflow (4.4.0-0ubuntu1) groovy; urgency=medium
 
-python-taskflow (4.6.1-1) experimental; urgency=medium
+  * New upstream release for OpenStack Victoria.
 
-  * New upstream release.
-  * Removed python3-mock from build-depends.
-  * Requires SQLA 1.4.
-  * Set minimum sqlalchemy-utils to >= 0.37.
+ -- Chris MacNaughton <chris.macnaughton@canonical.com>  Thu, 03 Sep 2020 08:53:56 +0000
 
- -- Thomas Goirand <zigo@debian.org>  Tue, 24 Aug 2021 11:21:21 +0200
+python-taskflow (4.3.1-0ubuntu1) groovy; urgency=medium
 
-python-taskflow (4.6.0-2) unstable; urgency=medium
+  [ Chris MacNaughton ]
+  * New upstream release for OpenStack Victoria.
+  * d/control: Align (Build-)Depends with upstream.
 
-  * Upload to unstable.
+  [ Corey Bryant ]
+  * d/control: Restore previous min version of python3-sqlalchemy-utils BD.
 
- -- Thomas Goirand <zigo@debian.org>  Mon, 16 Aug 2021 09:48:27 +0200
+ -- Corey Bryant <corey.bryant@canonical.com>  Wed, 29 Jul 2020 14:01:41 -0400
 
-python-taskflow (4.6.0-1) experimental; urgency=medium
+python-taskflow (4.2.0-0ubuntu2) groovy; urgency=medium
 
-  * New upstream release.
-  * Removed (build-)depends versions when satisfied in Bullseye.
+  * d/control: Add depends on python3-sqlalchemy-utils.
 
- -- Thomas Goirand <zigo@debian.org>  Mon, 08 Mar 2021 13:08:26 +0100
+ -- Chris MacNaughton <chris.macnaughton@canonical.com>  Tue, 21 Jul 2020 14:59:33 +0000
 
-python-taskflow (4.5.0-3) unstable; urgency=medium
+python-taskflow (4.2.0-0ubuntu1) groovy; urgency=medium
 
-  * Added Restrictions: superficial to d/tests/control (Closes: #974511).
+  * New upstream release for OpenStack Victoria.
+  * d/control: Align (Build-)Depends with upstream.
+  * d/control, d/rules: Switch to debhelper compat 13 and pybuild.
+  * d/control: Update Standards-Version to 4.5.0.
 
- -- Thomas Goirand <zigo@debian.org>  Sat, 21 Nov 2020 00:05:12 +0100
+ -- Corey Bryant <corey.bryant@canonical.com>  Fri, 12 Jun 2020 13:23:18 -0400
 
-python-taskflow (4.5.0-2) unstable; urgency=medium
+python-taskflow (4.1.0-0ubuntu2) groovy; urgency=medium
 
-  * Fixed debian/watch.
-  * Uploading to unstable.
-  * Add a debian/salsa-ci.yml.
+  * Move python3-kazoo to Suggests (LP: #1880705)
 
- -- Thomas Goirand <zigo@debian.org>  Wed, 14 Oct 2020 14:21:42 +0200
+ -- Chris MacNaughton <chris.macnaughton@canonical.com>  Wed, 27 May 2020 14:02:53 +0100
 
-python-taskflow (4.5.0-1) experimental; urgency=medium
+python-taskflow (4.1.0-0ubuntu1) focal; urgency=medium
 
-  * New upstream release.
-  * Fixed (build-)depends for this release.
+  * New upstream release for OpenStack Ussuri.
+  * d/control: Align (Build-)Depends with upstream.
 
- -- Thomas Goirand <zigo@debian.org>  Sun, 13 Sep 2020 14:46:42 +0200
+ -- Corey Bryant <corey.bryant@canonical.com>  Fri, 10 Apr 2020 10:06:26 -0400
 
-python-taskflow (4.4.0-1) experimental; urgency=medium
+python-taskflow (4.0.0-0ubuntu1) focal; urgency=medium
 
-  * New upstream release.
-  * Set minimum version for python3-jsonschema.
+  * New upstream release for OpenStack Ussuri.
+  * d/control: Align (Build-)Depends with upstream.
+  * d/p/*: Rebased.
 
- -- Thomas Goirand <zigo@debian.org>  Tue, 08 Sep 2020 08:35:53 +0200
+ -- Corey Bryant <corey.bryant@canonical.com>  Wed, 11 Mar 2020 16:36:28 -0400
 
-python-taskflow (4.1.0-2) unstable; urgency=medium
+python-taskflow (3.8.0-0ubuntu2) focal; urgency=medium
 
-  * Uploading to unstable.
+  * d/control: Drop Depends on python3-pydot as it's in Suggests and switch
+    Recommends to python3.
 
- -- Thomas Goirand <zigo@debian.org>  Fri, 08 May 2020 11:58:58 +0200
+ -- Corey Bryant <corey.bryant@canonical.com>  Thu, 23 Jan 2020 15:59:26 -0500
 
-python-taskflow (4.1.0-1) experimental; urgency=medium
+python-taskflow (3.8.0-0ubuntu1) focal; urgency=low
 
-  * New upstream release.
-  * Fixed (build-)depends for this release.
-  * Removed patches applied upstream:
-    - fix-py38-hmac-compatibility.patch
-    - fix-networkx-2.x-compatibility.patch    
+  * Merge from Debian unstable.  Remaining changes:
+    - d/control, d/tests/*: Add import and unit test autopkgtests.
+    - d/control: Move python3-pydot(plus) to Suggests as they're only
+      required for tools/state_graph.py, and aren't in Ubuntu main.
+    - d/gbp.conf: Retain for gbp and pristine-tar config.
+    - d/p/0003-Pass-digest-to-hmac.new.patch: Fix working with Python 3.8
+      by passing digest to hmac.new() (LP: #1852317)
+    - d/p/move-test-requirements-out-of-runtime-requirements.patch:
+      Move stestr and pydot from runtime requirements to test requirements,
+      avoiding pulling these two packages into Ubuntu main.
+    - d/watch: Get tarball from tarballs.openstack.org.
+    - d/control: Drop python3-contextlib2 as it is only needed for < python3.0.
+    - d/control,python-taskflow-doc.links: Use generated assets for
+      documentation rather than using libjs-* packages.
+  * New upstream release for OpenStack Ussuri.
+  * d/control: Align (Build-)Depends with upstream.
+  * d/p/0003-Pass-digest-to-hmac.new.patch,
+    d/p/fix-networkx-2.x-compatibility.patch,
+    d/p/fix-py38-hmac-compatibility.patch: Dropped. Fixed in upstream release.
 
- -- Thomas Goirand <zigo@debian.org>  Mon, 06 Apr 2020 22:59:02 +0200
+ -- Corey Bryant <corey.bryant@canonical.com>  Thu, 12 Dec 2019 17:10:29 -0500
 
 python-taskflow (3.7.1-4) unstable; urgency=medium
 
@@ -247,6 +268,45 @@ python-taskflow (3.7.1-1) experimental;
 
  -- Thomas Goirand <zigo@debian.org>  Wed, 18 Sep 2019 22:26:28 +0200
 
+python-taskflow (3.7.1-0ubuntu3) focal; urgency=medium
+
+  * Fix working with Python 3.8 by passing digest to hmac.new()
+    (LP: #1852317)
+
+ -- Balint Reczey <rbalint@ubuntu.com>  Tue, 12 Nov 2019 16:52:52 +0100
+
+python-taskflow (3.7.1-0ubuntu2) eoan; urgency=medium
+
+  * d/p/python-ostestr: Drop py2 tests as package no longer ships a py2
+    module.
+
+ -- James Page <james.page@ubuntu.com>  Wed, 02 Oct 2019 08:16:42 +0100
+
+python-taskflow (3.7.1-0ubuntu1) eoan; urgency=medium
+
+  * New upstream release for OpenStack Train.
+
+ -- James Page <james.page@ubuntu.com>  Thu, 26 Sep 2019 10:36:29 +0100
+
+python-taskflow (3.7.0-0ubuntu3) eoan; urgency=medium
+
+  * d/control: Drop python3-contextlib2 as it is only needed for < python3.0.
+
+ -- Corey Bryant <corey.bryant@canonical.com>  Tue, 13 Aug 2019 15:36:07 -0400
+
+python-taskflow (3.7.0-0ubuntu2) eoan; urgency=medium
+
+  * d/control, d/rules, d/pydist-overrides: Drop Python 2 support.
+
+ -- Corey Bryant <corey.bryant@canonical.com>  Wed, 10 Jul 2019 17:11:29 -0400
+
+python-taskflow (3.7.0-0ubuntu1) eoan; urgency=medium
+
+  * New upstream release for OpenStack Stein.
+  * d/p/*: Refresh.
+
+ -- James Page <james.page@ubuntu.com>  Wed, 05 Jun 2019 15:11:16 +0100
+
 python-taskflow (3.4.0-2) unstable; urgency=medium
 
   * Uploading to unstable.
@@ -261,6 +321,28 @@ python-taskflow (3.4.0-1) experimental;
 
  -- Thomas Goirand <zigo@debian.org>  Wed, 27 Mar 2019 13:41:21 +0100
 
+python-taskflow (3.4.0-0ubuntu1) disco; urgency=medium
+
+  * New upstream release for OpenStack Stein.
+  * d/p/*: Refresh.
+
+ -- James Page <james.page@ubuntu.com>  Fri, 22 Mar 2019 14:11:03 +0000
+
+python-taskflow (3.3.1-0ubuntu2) disco; urgency=medium
+
+  * d/p/move-test-requirements-out-of-runtime-requirements.patch:
+    Move stestr and pydot from runtime requirements to test requirements,
+    avoiding pulling these two packages into Ubuntu main.
+
+ -- James Page <james.page@ubuntu.com>  Tue, 20 Nov 2018 14:24:02 +0000
+
+python-taskflow (3.3.1-0ubuntu1) disco; urgency=medium
+
+  * New upstream release for OpenStack Stein.
+  * d/control: Switch to stestr for unit tests.
+
+ -- James Page <james.page@ubuntu.com>  Tue, 20 Nov 2018 10:39:18 +0000
+
 python-taskflow (3.2.0-3) unstable; urgency=medium
 
   * Add patch to fix StrEnum class that is breaking multiple packages at
@@ -287,6 +369,29 @@ python-taskflow (3.2.0-1) unstable; urge
 
  -- Thomas Goirand <zigo@debian.org>  Fri, 07 Sep 2018 12:11:00 +0200
 
+python-taskflow (3.2.0-0ubuntu2) cosmic; urgency=medium
+
+  * d/control,python-taskflow-doc.links: Use generated assets for
+    documentation rather than using libjs-* packages.
+
+ -- James Page <james.page@ubuntu.com>  Tue, 14 Aug 2018 14:16:01 +0100
+
+python-taskflow (3.2.0-0ubuntu1) cosmic; urgency=low
+
+  * Merge from Debian unstable.  Remaining changes:
+    - d/control: Enable autopkgtest-pkg-python testsuite.
+    - d/control: Move python-pydotplus to Suggests as it is only required
+      for tools/state_graph.py, and it is not in Ubuntu main.
+    - d/gbp.conf: Retain for gbp and pristine-tar config.
+    - d/watch: Get tarball from tarballs.openstack.org.
+    - d/tests/*: Replace python-taskflow with python-ostestr as it has
+      better coverage.
+  * New upstream release.
+  * d/control: Align (Build-)Depends with upstream.
+  * d/p/fix-networkx-2-support.patch: Dropped. Fixed in new upstream release.
+
+ -- Corey Bryant <corey.bryant@canonical.com>  Tue, 07 Aug 2018 09:32:59 -0400
+
 python-taskflow (3.1.0-5) unstable; urgency=medium
 
   * Fix description in fix-networkx-2-support.patch (Closes: #902985).
@@ -342,6 +447,73 @@ python-taskflow (3.1.0-1) experimental;
 
  -- Thomas Goirand <zigo@debian.org>  Thu, 15 Feb 2018 09:05:20 +0000
 
+python-taskflow (3.1.0-0ubuntu6) cosmic; urgency=medium
+
+  * Tweak 5f4c3d67335d3d28f8c27a5bbf95656615ad9143.patch to make it build.
+
+ -- Gianfranco Costamagna <locutusofborg@debian.org>  Tue, 03 Jul 2018 10:01:13 +0200
+
+python-taskflow (3.1.0-0ubuntu5) cosmic; urgency=medium
+
+  * debian/patches/5f4c3d67335d3d28f8c27a5bbf95656615ad9143.patch:
+    - upstream-proposed patch to make it compatible with networkx2
+    - add pydot to dependencies
+
+ -- Gianfranco Costamagna <locutusofborg@debian.org>  Thu, 28 Jun 2018 16:09:16 +0200
+
+python-taskflow (3.1.0-0ubuntu4) cosmic; urgency=medium
+
+  * d/tests/*,d/control: Add autopkgtests to exercise import and unit
+    tests as part of Ubuntu CI, resolving current failures.
+  * d/p/oslo.serialization-compat.patch: Cherry pick fix for compat
+    with oslo.serialization 2.26.0.
+
+ -- James Page <james.page@ubuntu.com>  Tue, 19 Jun 2018 23:35:12 +0100
+
+python-taskflow (3.1.0-0ubuntu3) cosmic; urgency=medium
+
+  * d/tests/control: Use @builddeps@ when executing unit tests.
+
+ -- James Page <james.page@ubuntu.com>  Tue, 19 Jun 2018 15:57:36 +0100
+
+python-taskflow (3.1.0-0ubuntu2) bionic; urgency=medium
+
+  * d/rules: Set PYTHONPATH to CURDIR for override_dh_auto_test and
+    override_dh_sphinxdoc. This fixes "ImportError: No module named taskflow"
+    for test_resume_many_flows.
+
+ -- Corey Bryant <corey.bryant@canonical.com>  Wed, 11 Apr 2018 12:35:25 -0400
+
+python-taskflow (3.1.0-0ubuntu1) bionic; urgency=medium
+
+  * New upstream release.
+  * d/*: wrap-and-sort -bast.
+  * d/control: Align (Build-)Depends with upstream.
+  * d/control: Update Standards-Version to 4.1.2.
+  * d/control: Bump debhelper compat to 10.
+
+ -- Corey Bryant <corey.bryant@canonical.com>  Thu, 25 Jan 2018 09:19:52 -0500
+
+python-taskflow (2.17.0-0ubuntu2) bionic; urgency=medium
+
+  * d/p/*: Drop, revert being held in oslo.serialization until
+    OpenStack projects are compatible with oslo.serialization >= 2.21.2.
+
+ -- James Page <james.page@ubuntu.com>  Mon, 11 Dec 2017 09:09:43 +0000
+
+python-taskflow (2.17.0-0ubuntu1) bionic; urgency=medium
+
+  * New upstream release.
+  * Align (Build-)Depends with upstream milestone.
+  * d/p/*: Drop, no longer needed.
+  * d/control: Fixup alembic runtime depends.
+  * d/control: wrap-and-sort -at
+  * d/p/oslo.serialization-2.21.2-compat.patch: Fix compatibility
+    with oslo.serialization >= 2.21.2 (LP: #1736394).
+  * d/control: Add python-openstackdocstheme to BD's.
+
+ -- James Page <james.page@ubuntu.com>  Tue, 05 Dec 2017 13:10:32 +0000
+
 python-taskflow (2.14.0-1) experimental; urgency=medium
 
   [ Ondřej Nový ]
@@ -369,6 +541,73 @@ python-taskflow (2.14.0-1) experimental;
 
  -- Thomas Goirand <zigo@debian.org>  Sat, 07 Oct 2017 11:44:18 +0200
 
+python-taskflow (2.14.0-0ubuntu1) artful; urgency=medium
+
+  * New upstream release.
+  * d/p/drop-openstackdoctheme.patch: Temporarily drop openstackdocstheme
+    sphinx extension until sphinx>=1.6.2 is available.
+
+ -- Corey Bryant <corey.bryant@canonical.com>  Tue, 15 Aug 2017 17:00:17 -0400
+
+python-taskflow (2.13.0-0ubuntu1) artful; urgency=medium
+
+  * New upstream release.
+
+ -- Corey Bryant <corey.bryant@canonical.com>  Fri, 07 Jul 2017 15:49:20 -0400
+
+python-taskflow (2.11.0-0ubuntu1) artful; urgency=medium
+
+  * d/watch: Use tarballs.openstack.org.
+  * New upstream release.
+  * Align (Build-)Depends with upstream milestone.
+
+ -- James Page <james.page@ubuntu.com>  Fri, 02 Jun 2017 09:23:00 +0100
+
+python-taskflow (2.9.0-0ubuntu1) zesty; urgency=medium
+
+  * New upstream release.
+
+ -- Chuck Short <zulcss@ubuntu.com>  Mon, 23 Jan 2017 10:12:57 -0500
+
+python-taskflow (2.8.0-0ubuntu1) zesty; urgency=medium
+
+  * New upstream release.
+  * d/control: Align (Build-)Depends with upstream.
+
+ -- Corey Bryant <corey.bryant@canonical.com>  Mon, 05 Dec 2016 11:01:39 -0500
+
+python-taskflow (2.7.0-0ubuntu1) zesty; urgency=medium
+
+  * d/gbp.conf: Update gbp configuration file.
+  * d/control: Update Vcs-* links and maintainers.
+  * New upstream release.
+  * d/control: Align (Build-)Depends with upstream.
+
+ -- Corey Bryant <corey.bryant@canonical.com>  Thu, 20 Oct 2016 09:35:30 -0400
+
+python-taskflow (2.3.0-1ubuntu1) yakkety; urgency=medium
+
+  * d/control: Move python-pydotplus to Suggests as it is only required
+    for tools/state_graph.py, and it is not in Ubuntu main.
+
+ -- Corey Bryant <corey.bryant@canonical.com>  Mon, 03 Oct 2016 13:14:33 -0400
+
+python-taskflow (2.3.0-1) experimental; urgency=medium
+
+  [ Ondřej Nový ]
+  * Standards-Version is 3.9.8 now (no change)
+  * Added missing Debian tests dependencies
+  * Added Python module import Debian tests
+  * d/rules: Changed UPSTREAM_GIT protocol to https
+  * d/copyright: Changed source URL to https protocol
+
+  [ Thomas Goirand ]
+  * New upstream release.
+  * Fixed (build-)depends for this release.
+  * Using pkgos-dh_auto_{test,install} from openstack-pkg-tools >= 52~.
+
+ -- Thomas Goirand <zigo@debian.org>  Wed, 13 Jul 2016 21:47:19 +0200
+
 python-taskflow (1.30.0-2) unstable; urgency=medium
 
   * Uploading to unstable.
@@ -536,3 +775,4 @@ python-taskflow (0.1.2-1) unstable; urge
   * Initial release.
 
  -- Thomas Goirand <zigo@debian.org>  Fri, 24 Jan 2014 22:06:31 +0800
+
diff -pruN 5.12.0-2/debian/control 6.0.2-0ubuntu1/debian/control
--- 5.12.0-2/debian/control	2025-03-28 08:53:00.000000000 +0000
+++ 6.0.2-0ubuntu1/debian/control	2025-09-22 20:23:52.000000000 +0000
@@ -1,65 +1,65 @@
 Source: python-taskflow
 Section: python
 Priority: optional
-Maintainer: Debian OpenStack <team+openstack@tracker.debian.org>
+Maintainer: Ubuntu Developers <ubuntu-devel-discuss@lists.ubuntu.com>
+XSBC-Original-Maintainer: Debian OpenStack <team+openstack@tracker.debian.org>
 Uploaders:
  Thomas Goirand <zigo@debian.org>,
  Michal Arbet <michal.arbet@ultimum.io>,
 Build-Depends:
- debhelper-compat (= 10),
+ debhelper-compat (= 13),
  dh-python,
  openstack-pkg-tools,
  python3-all,
- python3-pbr,
+ python3-pbr (>= 2.0.0),
  python3-setuptools,
- python3-sphinx,
+ python3-sphinx (>= 2.0.0),
+ python3-sphinxcontrib.jquery,
 Build-Depends-Indep:
  alembic,
- python3-alembic,
- python3-automaton,
- python3-cachetools,
- python3-coverage,
- python3-debtcollector,
- python3-etcd3gw,
- python3-eventlet,
+ python3-alembic (>= 0.8.10),
+ python3-automaton (>= 1.9.0),
+ python3-cachetools (>= 2.0.0),
+ python3-debtcollector (>= 1.2.0),
+ python3-etcd3gw (>= 2.0.0),
+ python3-eventlet (>= 0.35.2),
  python3-fasteners (>= 0.17.3),
- python3-futurist,
- python3-hacking,
- python3-jsonschema,
- python3-kazoo,
- python3-kombu,
+ python3-futurist (>= 1.2.0),
+ python3-jsonschema (>= 3.2.0),
+ python3-kazoo (>= 2.6.0),
+ python3-kombu (>= 4.3.0),
  python3-netifaces,
- python3-networkx,
- python3-openstackdocstheme,
- python3-oslo.serialization,
- python3-oslo.utils,
- python3-oslotest,
- python3-psycopg2,
+ python3-networkx (>= 2.1.0),
+ python3-openstackdocstheme (>= 2.2.1),
+ python3-oslo.serialization (>= 2.18.0),
+ python3-oslo.utils (>= 3.33.0),
+ python3-oslotest (>= 1:3.2.0),
+ python3-psycopg2 (>= 2.8.0),
  python3-pyasyncore,
- python3-pydot,
- python3-pymysql,
- python3-redis,
- python3-sqlalchemy (>= 1.4),
- python3-sqlalchemy-utils (>= 0.37),
- python3-stestr,
- python3-stevedore,
+ python3-pydot (>= 1.2.4),
+ python3-pydotplus (>= 2.0.2),
+ python3-pymysql (>= 0.7.6),
+ python3-redis (>= 4.0.0),
+ python3-sqlalchemy (>= 1.0.10),
+ python3-sqlalchemy-utils (>= 0.30.11),
+ python3-stestr (>= 2.0.0),
+ python3-stevedore (>= 1:1.20.0),
  python3-subunit,
- python3-tenacity,
- python3-testscenarios,
- python3-testtools,
- python3-zake,
+ python3-tenacity (>= 6.0.0),
+ python3-testscenarios (>= 0.4),
+ python3-testtools (>= 2.2.0),
+ python3-zake (>= 0.1.6),
  subunit,
-Standards-Version: 4.5.1
-Vcs-Browser: https://salsa.debian.org/openstack-team/libs/python-taskflow
-Vcs-Git: https://salsa.debian.org/openstack-team/libs/python-taskflow.git
+Standards-Version: 4.5.0
+Vcs-Browser: https://git.launchpad.net/~ubuntu-openstack-dev/ubuntu/+source/python-taskflow
+Vcs-Git: https://git.launchpad.net/~ubuntu-openstack-dev/ubuntu/+source/python-taskflow
 Homepage: https://github.com/openstack/taskflow
+Testsuite: autopkgtest-pkg-python
 
 Package: python-taskflow-doc
 Section: doc
 Architecture: all
 Depends:
- libjs-bootstrap5,
- libjs-jquery,
  ${misc:Depends},
  ${sphinxdoc:Depends},
 Description: Taskflow structured state management library - doc
@@ -74,30 +74,32 @@ Architecture: all
 Depends:
  alembic,
  python3-alembic,
- python3-automaton,
- python3-cachetools,
- python3-debtcollector,
- python3-eventlet,
+ python3-automaton (>= 1.9.0),
+ python3-cachetools (>= 2.0.0),
+ python3-eventlet (>= 0.35.2),
  python3-fasteners (>= 0.17.3),
- python3-futurist,
- python3-jsonschema,
- python3-kazoo,
+ python3-futurist (>= 1.2.0),
+ python3-jsonschema (>= 3.2.0),
  python3-kombu,
- python3-networkx,
- python3-oslo.serialization,
- python3-oslo.utils,
- python3-pbr,
+ python3-networkx (>= 2.1.0),
+ python3-oslo.serialization (>= 2.18.0),
+ python3-oslo.utils (>= 3.33.0),
+ python3-pbr (>= 2.0.0),
  python3-psycopg2,
  python3-pyasyncore,
- python3-pydot,
  python3-pymysql,
- python3-sqlalchemy (>= 1.4),
- python3-stevedore,
- python3-tenacity,
+ python3-sqlalchemy,
+ python3-sqlalchemy-utils (>= 0.36.1-0ubuntu2~),
+ python3-stevedore (>= 1:1.20.0),
+ python3-tenacity (>= 6.0.0),
  ${misc:Depends},
  ${python3:Depends},
 Recommends:
- ${python:Recommends},
+ ${python3:Recommends},
+Suggests:
+ python3-kazoo,
+ python3-pydot,
+ python3-pydotplus,
 Description: Taskflow structured state management library - Python 3.x
  TaskFlow, is a library to do [jobs, tasks, flows] in a HA manner
  using different backends to be used with OpenStack projects.
diff -pruN 5.12.0-2/debian/gbp.conf 6.0.2-0ubuntu1/debian/gbp.conf
--- 5.12.0-2/debian/gbp.conf	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-0ubuntu1/debian/gbp.conf	2025-09-22 20:23:52.000000000 +0000
@@ -0,0 +1,9 @@
+[DEFAULT]
+debian-branch = master
+upstream-tag = %(version)s
+pristine-tar = True
+upstream-branch = upstream-flamingo
+
+[buildpackage]
+export-dir = ../build-area
+prebuild = [ ! -f .launchpad.yaml ] || rm .launchpad.yaml
diff -pruN 5.12.0-2/debian/patches/move-test-requirements-out-of-runtime-requirements.patch 6.0.2-0ubuntu1/debian/patches/move-test-requirements-out-of-runtime-requirements.patch
--- 5.12.0-2/debian/patches/move-test-requirements-out-of-runtime-requirements.patch	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-0ubuntu1/debian/patches/move-test-requirements-out-of-runtime-requirements.patch	2025-09-22 20:23:52.000000000 +0000
@@ -0,0 +1,35 @@
+From 74db4d924bb6204395b9803e6168caa953896b2b Mon Sep 17 00:00:00 2001
+From: James Page <james.page@ubuntu.com>
+Date: Tue, 20 Nov 2018 14:05:37 +0000
+Subject: [PATCH] Move test requirements out of runtime requirements
+
+pydot and stestr are test requirements for taskflow; drop
+from requirements.txt.
+---
+ requirements.txt      | 6 ------
+ test-requirements.txt | 1 +
+ 2 files changed, 1 insertion(+), 6 deletions(-)
+
+Index: python-taskflow/requirements.txt
+===================================================================
+--- python-taskflow.orig/requirements.txt
++++ python-taskflow/requirements.txt
+@@ -35,6 +35,3 @@ tenacity>=6.0.0 # Apache-2.0
+ 
+ # For lru caches and such
+ cachetools>=2.0.0 # MIT License
+-
+-# For pydot output tests
+-pydot>=1.2.4 # MIT License
+Index: python-taskflow/test-requirements.txt
+===================================================================
+--- python-taskflow.orig/test-requirements.txt
++++ python-taskflow/test-requirements.txt
+@@ -26,6 +26,7 @@ psycopg2>=2.8.0 # LGPL/ZPL
+ 
+ # test
+ zake>=0.1.6 # Apache-2.0
++pydot>=1.2.4 # MIT License
+ pydotplus>=2.0.2 # MIT License
+ oslotest>=3.2.0 # Apache-2.0
+ testtools>=2.2.0 # MIT
diff -pruN 5.12.0-2/debian/patches/remove-bad-enum.py-calls.patch 6.0.2-0ubuntu1/debian/patches/remove-bad-enum.py-calls.patch
--- 5.12.0-2/debian/patches/remove-bad-enum.py-calls.patch	2025-03-28 08:53:00.000000000 +0000
+++ 6.0.2-0ubuntu1/debian/patches/remove-bad-enum.py-calls.patch	2025-09-22 20:23:52.000000000 +0000
@@ -8,7 +8,7 @@ Index: python-taskflow/taskflow/utils/mi
 ===================================================================
 --- python-taskflow.orig/taskflow/utils/misc.py
 +++ python-taskflow/taskflow/utils/misc.py
-@@ -49,13 +49,6 @@ _SCHEME_REGEX = re.compile(r"^([A-Za-z][
+@@ -48,13 +48,6 @@ _SCHEME_REGEX = re.compile(r"^([A-Za-z][
  class StrEnum(str, enum.Enum):
      """An enumeration that is also a string and can be compared to strings."""
  
@@ -17,8 +17,8 @@ Index: python-taskflow/taskflow/utils/mi
 -            if not isinstance(a, str):
 -                raise TypeError("Enumeration '%s' (%s) is not"
 -                                " a string" % (a, type(a).__name__))
--        return super(StrEnum, cls).__new__(cls, *args, **kwargs)
+-        return super().__new__(cls, *args, **kwargs)
 -
  
- class StringIO(six.StringIO):
+ class StringIO(io.StringIO):
      """String buffer with some small additions."""
diff -pruN 5.12.0-2/debian/patches/reproducible_build.patch 6.0.2-0ubuntu1/debian/patches/reproducible_build.patch
--- 5.12.0-2/debian/patches/reproducible_build.patch	2025-03-28 08:53:00.000000000 +0000
+++ 6.0.2-0ubuntu1/debian/patches/reproducible_build.patch	2025-09-22 20:23:52.000000000 +0000
@@ -7,7 +7,7 @@ Index: python-taskflow/taskflow/conducto
 ===================================================================
 --- python-taskflow.orig/taskflow/conductors/backends/impl_executor.py
 +++ python-taskflow/taskflow/conductors/backends/impl_executor.py
-@@ -103,7 +103,9 @@ class ExecutorConductor(base.Conductor,
+@@ -101,7 +101,9 @@ class ExecutorConductor(base.Conductor,
      def __init__(self, name, jobboard,
                   persistence=None, engine=None,
                   engine_options=None, wait_timeout=None,
@@ -15,6 +15,6 @@ Index: python-taskflow/taskflow/conducto
 +                 log=None, max_simultaneous_jobs=None):
 +        if max_simultaneous_jobs is None:
 +            max_simultaneous_jobs = MAX_SIMULTANEOUS_JOBS
-         super(ExecutorConductor, self).__init__(
+         super().__init__(
              name, jobboard, persistence=persistence,
              engine=engine, engine_options=engine_options)
diff -pruN 5.12.0-2/debian/patches/series 6.0.2-0ubuntu1/debian/patches/series
--- 5.12.0-2/debian/patches/series	2025-03-28 08:53:00.000000000 +0000
+++ 6.0.2-0ubuntu1/debian/patches/series	2025-09-22 20:23:52.000000000 +0000
@@ -1 +1,3 @@
 reproducible_build.patch
+move-test-requirements-out-of-runtime-requirements.patch
+remove-bad-enum.py-calls.patch
diff -pruN 5.12.0-2/debian/python-taskflow-doc.links 6.0.2-0ubuntu1/debian/python-taskflow-doc.links
--- 5.12.0-2/debian/python-taskflow-doc.links	2025-03-28 08:53:00.000000000 +0000
+++ 6.0.2-0ubuntu1/debian/python-taskflow-doc.links	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-usr/share/javascript/bootstrap/js/bootstrap.js usr/share/doc/python-taskflow-doc/html/_static/js/bootstrap.js
-usr/share/javascript/bootstrap/js/bootstrap.min.js usr/share/doc/python-taskflow-doc/html/_static/js/bootstrap.min.js
-usr/share/javascript/jquery/jquery.js usr/share/doc/python-taskflow-doc/html/_static/js/jquery-3.2.1.js
-usr/share/javascript/jquery/jquery.min.js usr/share/doc/python-taskflow-doc/html/_static/js/jquery-3.2.1.min.js
diff -pruN 5.12.0-2/debian/rules 6.0.2-0ubuntu1/debian/rules
--- 5.12.0-2/debian/rules	2025-03-28 08:53:00.000000000 +0000
+++ 6.0.2-0ubuntu1/debian/rules	2025-09-22 20:23:52.000000000 +0000
@@ -1,15 +1,21 @@
 #!/usr/bin/make -f
 
+export PYBUILD_NAME=taskflow
+
 UPSTREAM_GIT := https://github.com/openstack/taskflow.git
+
+DEBVERS ?= $(shell dpkg-parsechangelog | sed -n -e 's/^Version: //p')
+VERSION ?= $(shell echo '$(DEBVERS)' | sed -e 's/^[[:digit:]]*://' -e 's/[-].*//' -e 's/[+].*//' -e 's/~git.*//g' -e 's/~/./g')
+export PBR_VERSION=$(VERSION)
+
 include /usr/share/openstack-pkg-tools/pkgos.make
 
 %:
 	dh $@ --buildsystem=pybuild --with python3,sphinxdoc
 
 override_dh_auto_clean:
-	rm -rf .eggs *.egg-info build
 	find . -iname '*.pyc' -delete
-	for i in $$(find . -type d -iname __pycache__) ; do rm -rf $$i ; done
+	rm -rf .eggs
 
 override_dh_auto_build:
 	echo "Do nothing..."
@@ -19,7 +25,7 @@ override_dh_auto_install:
 		python3 setup.py install -f --install-layout=deb --root=$(CURDIR)/debian/tmp ; \
 	done
 ifeq (,$(findstring nocheck, $(DEB_BUILD_OPTIONS)))
-	PYTHONPATH=$(CURDIR)/debian/tmp/usr/lib/python3/dist-packages pkgos-dh_auto_test --no-py2 'taskflow\.tests\.(?!(.*test_examples\.ExamplesTestCase.*|.*worker_based\.test_protocol\.TestProtocol\.test_to_dict_with_invalid_json_failures.*|.*unit\.test_utils\.UriParseTest\.test_ipv6_host.*|.*persistence\.test_dir_persistence\.DirPersistenceTest\.test_dir_backend_cache_overfill.*))'
+	PYTHONPATH=$(CURDIR)/debian/tmp/usr/lib/python3/dist-packages pkgos-dh_auto_test --no-py2 'taskflow\.tests\.(?!(.*test_examples\.ExamplesTestCase.*|.*worker_based\.test_protocol\.TestProtocol\.test_to_dict_with_invalid_json_failures.*))'
 endif
 
 override_dh_auto_test:
@@ -28,5 +34,6 @@ override_dh_auto_test:
 override_dh_sphinxdoc:
 ifeq (,$(findstring nodoc, $(DEB_BUILD_OPTIONS)))
 	PYTHONPATH=. PYTHON=python3 python3 -m sphinx -b html doc/source $(CURDIR)/debian/python-taskflow-doc/usr/share/doc/python-taskflow-doc/html
+	dh_link -p python-taskflow-doc /usr/share/javascript/underscore/underscore.js /usr/share/doc/python-taskflow-doc/html/_static/underscore.js
 	dh_sphinxdoc
 endif
diff -pruN 5.12.0-2/debian/salsa-ci.yml 6.0.2-0ubuntu1/debian/salsa-ci.yml
--- 5.12.0-2/debian/salsa-ci.yml	2025-03-28 08:53:00.000000000 +0000
+++ 6.0.2-0ubuntu1/debian/salsa-ci.yml	1970-01-01 00:00:00.000000000 +0000
@@ -1,3 +0,0 @@
-include:
-  - https://salsa.debian.org/salsa-ci-team/pipeline/raw/master/salsa-ci.yml
-  - https://salsa.debian.org/salsa-ci-team/pipeline/raw/master/pipeline-jobs.yml
diff -pruN 5.12.0-2/debian/source/options 6.0.2-0ubuntu1/debian/source/options
--- 5.12.0-2/debian/source/options	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-0ubuntu1/debian/source/options	2025-09-22 20:23:52.000000000 +0000
@@ -0,0 +1 @@
+extend-diff-ignore = "^.launchpad.yaml"
diff -pruN 5.12.0-2/debian/tests/control 6.0.2-0ubuntu1/debian/tests/control
--- 5.12.0-2/debian/tests/control	2025-03-28 08:53:00.000000000 +0000
+++ 6.0.2-0ubuntu1/debian/tests/control	2025-09-22 20:23:52.000000000 +0000
@@ -1,4 +1,5 @@
-Tests: unittests
+Tests: python-stestr
 Depends:
+ @,
  @builddeps@,
-Restrictions: allow-stderr needs-root
+Restrictions: allow-stderr, needs-recommends
diff -pruN 5.12.0-2/debian/tests/python-stestr 6.0.2-0ubuntu1/debian/tests/python-stestr
--- 5.12.0-2/debian/tests/python-stestr	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-0ubuntu1/debian/tests/python-stestr	2025-09-22 20:23:52.000000000 +0000
@@ -0,0 +1,9 @@
+#!/bin/sh
+
+set -ex
+
+for py in $(py3versions -r 2>/dev/null); do
+    echo "Testing with $py:"
+    PYTHON=$py stestr run
+    rm -rf .stestr
+done
diff -pruN 5.12.0-2/debian/tests/unittests 6.0.2-0ubuntu1/debian/tests/unittests
--- 5.12.0-2/debian/tests/unittests	2025-03-28 08:53:00.000000000 +0000
+++ 6.0.2-0ubuntu1/debian/tests/unittests	1970-01-01 00:00:00.000000000 +0000
@@ -1,10 +0,0 @@
-#!/bin/sh
-
-set -e
-set -x
-export OSLO_PACKAGE_VERSION=$(dpkg-parsechangelog -SVersion | sed -e 's/^[[:digit:]]*://' -e 's/[-].*//' -e 's/~git.*//' -e 's/~/.0/' -e 's/+dfsg1//' -e 's/+ds1//' | head -n 1)
-
-CWD=$(pwd)
-
-python3 setup.py install -f --install-layout=deb --root=$(CURDIR)/debian/tmp
-pkgos-dh_auto_test --no-py2 'taskflow\.tests\.(?!(.*test_examples\.ExamplesTestCase.*|.*worker_based\.test_protocol\.TestProtocol\.test_to_dict_with_invalid_json_failures.*|.*unit\.test_utils\.UriParseTest\.test_ipv6_host.*|.*persistence\.test_dir_persistence\.DirPersistenceTest\.test_dir_backend_cache_overfill.*))'
diff -pruN 5.12.0-2/debian/upstream/signing-key.asc 6.0.2-0ubuntu1/debian/upstream/signing-key.asc
--- 5.12.0-2/debian/upstream/signing-key.asc	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-0ubuntu1/debian/upstream/signing-key.asc	2025-09-22 20:23:52.000000000 +0000
@@ -0,0 +1,34 @@
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+
+mDMEZ92jlhYJKwYBBAHaRw8BAQdAnSMr01EpuQHlcTQ8c0ENVDFs9mi/TdWB0IlG
+ZF1eC2S0Qk9wZW5TdGFjayBJbmZyYSAoMjAyNS4yL0ZsYW1pbmdvIEN5Y2xlKSA8
+aW5mcmEtcm9vdEBvcGVuc3RhY2sub3JnPoiWBBMWCAA+FiEExxsAfvl4h/2PtjZc
+60/W1hjmIYEFAmfdo5YCGwMFCQFj9QAFCwkIBwIGFQoJCAsCBBYCAwECHgECF4AA
+CgkQ60/W1hjmIYElLAEAkMjANVr+kVjZGp8KtliT+ySFAqoCQceuscYt7WmeD4wA
+/2gHNfFDJwkWn9JY9mCR2y7gwf6skTWl3ssPogTa84wDiHUEEBYIAB0WIQQiKE9p
+2ezN8994GXkccRrxk/+OVAUCZ92k6wAKCRAccRrxk/+OVMQGAQD0X/Eow2mK/YzZ
+rMO60g2KI767/rnKJWKFUTCZCfswxwD/WxuGjGRB98RZ9IbCcmaUvRqHf9812h60
+/RCoB4AtiwSJAjMEEAEKAB0WIQSXrklvwC3sn8NTsudI+ZYRQ0lYKQUCZ92vKwAK
+CRBI+ZYRQ0lYKdthD/wJh0uxw05WsmFwjuds82JdnH9ow3OdG/0bhds8eEvgYuQe
+ZEQ9/RC5hJ+SiV+/zclIaG+xYHW3VkZUgOmmLxRzmqFbWbDm5yKp7jTqM2zYB4Jx
+EiLB4dWHZFmrcqfkJ06nXVMjGkj73N+DqHWQ6hleaiE7tcbI++w1AN7niL0rW008
+iP8IHoWLh2dJxQlcheVdjCe0jEU3qO8KxhUTeARPD/Vp1CpJWlq9vWs9/bweMrtj
+FsQwxBiZcQ/0zXDnQFUKpbNDFjQZnjmmwTqhopquL5e5mZWr0NussI20JXyZHj7T
+N5ikDAFPf9iM1Apb+/g9njGUgYFEOgEJuDr1oLolZ+9+7uZrKGD1gmdY7pVG12Xn
+2QJ3rft/Wy8Obac+TdA+UoEYQu4LOUpUOmPOcXE/8/fTxybkQGOZm1Ufaddz+6ee
+uHBbIaxI1kh5MrxH5cIaEkvdjOGg+bMyq7C8CE8WgSEN/JiupEZYgDduOSuqGHDp
++9ydEkanNjGN+K4rXJ7ABBv9freINx5kmCAaDzXeIAL8n1/Rzd6Z/acOlC9omDM0
+mYKKIfLMp6Rp7SKnB37Tp0dKGP4r/SJsx5Sxn7XrktVJ1ht5ByipDD6VBN3+OQPx
+56pRLDCk9EFDjbOW0iKzyPx+Nya5G9CN9AqQXe1MlHsFn+q7DEUSmlGZfvtjN7g4
+BGfdo5YSCisGAQQBl1UBBQEBB0DHSvmmZUEZ94olzKZSHa2HBCWhrhOVNFn/0ag8
+KyY5fQMBCAeIfgQYFggAJhYhBMcbAH75eIf9j7Y2XOtP1tYY5iGBBQJn3aOWAhsM
+BQkBY/UAAAoJEOtP1tYY5iGB6XcA+wY0JUi2ZUqH0CRs4EfS6VML/7u08g8ZByN4
+DZ2htqOGAQDmGHHjpMhKz04eDXLpNO6UZ/Q6LnqEXztW8eBXVtiZALgzBGfdpHYW
+CSsGAQQB2kcPAQEHQKH/4Td8MRK/9UxGyPqWWaZl88m4xE48XM4kP6w3asRViO8E
+GBYIACAWIQTHGwB++XiH/Y+2NlzrT9bWGOYhgQUCZ92kdgIbAgCBCRDrT9bWGOYh
+gXYgBBkWCAAdFiEE3cFPzrMpT3Q4I7HG9zFcLOa3NF0FAmfdpHYACgkQ9zFcLOa3
+NF1xqwEA3muioM6tJtSbiCUCDau2QnalBkfov/A2FFIxvnyHbH0A/051M0O09Tcl
+E7tAieH8W63Jhg3n/GzKl36hXqh3ANML8kIA/0eu6JgK+F0s8iiy+sQecTD7W38B
+A8CSZ29ANdlMwzf7AP9araqMvKRpTMAzhQ+1Eluh8FmXQhzeZDhlIZ8DQDQ/CQ==
+=FVKc
+-----END PGP PUBLIC KEY BLOCK-----
diff -pruN 5.12.0-2/debian/watch 6.0.2-0ubuntu1/debian/watch
--- 5.12.0-2/debian/watch	2025-03-28 08:53:00.000000000 +0000
+++ 6.0.2-0ubuntu1/debian/watch	2025-09-22 20:23:52.000000000 +0000
@@ -1,3 +1,3 @@
-version=4
-opts="mode=git,uversionmangle=s/\.0rc/~rc/;s/\.0b1/~b1/;s/\.0b2/~b2/;s/\.0b3/~b3/" \
-https://github.com/openstack/taskflow refs/tags/(\d[brc\d\.]+)
+version=3
+opts=uversionmangle=s/\.(b|rc)/~$1/,pgpsigurlmangle=s/$/.asc/ \
+    http://tarballs.openstack.org/taskflow/ taskflow-(\d.*)\.tar\.gz
diff -pruN 5.12.0-2/doc/source/conf.py 6.0.2-0ubuntu1/doc/source/conf.py
--- 5.12.0-2/doc/source/conf.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/doc/source/conf.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
 # Copyright (C) 2020 Red Hat, Inc.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
diff -pruN 5.12.0-2/doc/source/user/engines.rst 6.0.2-0ubuntu1/doc/source/user/engines.rst
--- 5.12.0-2/doc/source/user/engines.rst	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/doc/source/user/engines.rst	2025-08-25 12:49:32.000000000 +0000
@@ -449,7 +449,6 @@ Components
 .. automodule:: taskflow.engines.action_engine.completer
 .. automodule:: taskflow.engines.action_engine.deciders
 .. automodule:: taskflow.engines.action_engine.executor
-.. automodule:: taskflow.engines.action_engine.process_executor
 .. automodule:: taskflow.engines.action_engine.runtime
 .. automodule:: taskflow.engines.action_engine.scheduler
 .. automodule:: taskflow.engines.action_engine.selector
diff -pruN 5.12.0-2/pyproject.toml 6.0.2-0ubuntu1/pyproject.toml
--- 5.12.0-2/pyproject.toml	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-0ubuntu1/pyproject.toml	2025-08-25 12:49:32.000000000 +0000
@@ -0,0 +1,3 @@
+[build-system]
+requires = ["pbr>=6.1.1"]
+build-backend = "pbr.build"
diff -pruN 5.12.0-2/releasenotes/notes/mask-keys-74b9bb5c420d8091.yaml 6.0.2-0ubuntu1/releasenotes/notes/mask-keys-74b9bb5c420d8091.yaml
--- 5.12.0-2/releasenotes/notes/mask-keys-74b9bb5c420d8091.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-0ubuntu1/releasenotes/notes/mask-keys-74b9bb5c420d8091.yaml	2025-08-25 12:49:32.000000000 +0000
@@ -0,0 +1,7 @@
+---
+features:
+  - |
+    Added ``mask_inputs_keys`` and ``mask_outputs_keys`` parameters to the
+    constructors for ``FailureFormatter`` and ``DynamicLoggingListener``
+    that can be used to mask sensitive information from the ``requires``
+    and ``provides`` fields respectively when logging a atom.
diff -pruN 5.12.0-2/releasenotes/notes/remove-process_executor-f59d40a5dd287cd7.yaml 6.0.2-0ubuntu1/releasenotes/notes/remove-process_executor-f59d40a5dd287cd7.yaml
--- 5.12.0-2/releasenotes/notes/remove-process_executor-f59d40a5dd287cd7.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-0ubuntu1/releasenotes/notes/remove-process_executor-f59d40a5dd287cd7.yaml	2025-08-25 12:49:32.000000000 +0000
@@ -0,0 +1,4 @@
+---
+upgrade:
+  - |
+    Process executor was removed.
diff -pruN 5.12.0-2/releasenotes/source/2025.1.rst 6.0.2-0ubuntu1/releasenotes/source/2025.1.rst
--- 5.12.0-2/releasenotes/source/2025.1.rst	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-0ubuntu1/releasenotes/source/2025.1.rst	2025-08-25 12:49:32.000000000 +0000
@@ -0,0 +1,6 @@
+===========================
+2025.1 Series Release Notes
+===========================
+
+.. release-notes::
+   :branch: stable/2025.1
diff -pruN 5.12.0-2/releasenotes/source/conf.py 6.0.2-0ubuntu1/releasenotes/source/conf.py
--- 5.12.0-2/releasenotes/source/conf.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/releasenotes/source/conf.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
 # Copyright (C) 2020 Red Hat, Inc.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
diff -pruN 5.12.0-2/releasenotes/source/index.rst 6.0.2-0ubuntu1/releasenotes/source/index.rst
--- 5.12.0-2/releasenotes/source/index.rst	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/releasenotes/source/index.rst	2025-08-25 12:49:32.000000000 +0000
@@ -6,6 +6,7 @@
    :maxdepth: 1
 
    unreleased
+   2025.1
    2024.2
    2024.1
    2023.2
diff -pruN 5.12.0-2/setup-etcd-env.sh 6.0.2-0ubuntu1/setup-etcd-env.sh
--- 5.12.0-2/setup-etcd-env.sh	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/setup-etcd-env.sh	2025-08-25 12:49:32.000000000 +0000
@@ -1,7 +1,7 @@
 #!/bin/bash
 set -eux
 if [ -z "$(which etcd)" ]; then
-    ETCD_VERSION=3.4.27
+    ETCD_VERSION=${ETCD_VERSION:-3.5.21}
     case `uname -s` in
         Darwin)
             OS=darwin
diff -pruN 5.12.0-2/setup.cfg 6.0.2-0ubuntu1/setup.cfg
--- 5.12.0-2/setup.cfg	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/setup.cfg	2025-08-25 12:49:58.764545400 +0000
@@ -1,77 +1,77 @@
 [metadata]
 name = taskflow
 summary = Taskflow structured state management library.
-description_file =
-    README.rst
+description_file = 
+	README.rst
 author = OpenStack
 author_email = openstack-discuss@lists.openstack.org
 home_page = https://docs.openstack.org/taskflow/latest/
 keywords = reliable,tasks,execution,parallel,dataflow,workflows,distributed
 python_requires = >=3.9
-classifier =
-    Development Status :: 5 - Production/Stable
-    Environment :: OpenStack
-    Intended Audience :: Developers
-    Intended Audience :: Information Technology
-    License :: OSI Approved :: Apache Software License
-    Operating System :: POSIX :: Linux
-    Programming Language :: Python
-    Programming Language :: Python :: 3
-    Programming Language :: Python :: 3.9
-    Programming Language :: Python :: 3.10
-    Programming Language :: Python :: 3.11
-    Programming Language :: Python :: 3.12
-    Programming Language :: Python :: 3 :: Only
-    Programming Language :: Python :: Implementation :: CPython
-    Topic :: Software Development :: Libraries
-    Topic :: System :: Distributed Computing
+classifier = 
+	Development Status :: 5 - Production/Stable
+	Environment :: OpenStack
+	Intended Audience :: Developers
+	Intended Audience :: Information Technology
+	License :: OSI Approved :: Apache Software License
+	Operating System :: POSIX :: Linux
+	Programming Language :: Python
+	Programming Language :: Python :: 3
+	Programming Language :: Python :: 3.9
+	Programming Language :: Python :: 3.10
+	Programming Language :: Python :: 3.11
+	Programming Language :: Python :: 3.12
+	Programming Language :: Python :: 3 :: Only
+	Programming Language :: Python :: Implementation :: CPython
+	Topic :: Software Development :: Libraries
+	Topic :: System :: Distributed Computing
 
 [files]
-packages =
-    taskflow
+packages = 
+	taskflow
 
 [entry_points]
-taskflow.jobboards =
-    zookeeper = taskflow.jobs.backends.impl_zookeeper:ZookeeperJobBoard
-    redis = taskflow.jobs.backends.impl_redis:RedisJobBoard
-    etcd = taskflow.jobs.backends.impl_etcd:EtcdJobBoard
-
-taskflow.conductors =
-    blocking = taskflow.conductors.backends.impl_blocking:BlockingConductor
-    nonblocking = taskflow.conductors.backends.impl_nonblocking:NonBlockingConductor
-
-taskflow.persistence =
-    dir = taskflow.persistence.backends.impl_dir:DirBackend
-    file = taskflow.persistence.backends.impl_dir:DirBackend
-    memory = taskflow.persistence.backends.impl_memory:MemoryBackend
-    mysql = taskflow.persistence.backends.impl_sqlalchemy:SQLAlchemyBackend
-    postgresql = taskflow.persistence.backends.impl_sqlalchemy:SQLAlchemyBackend
-    sqlite = taskflow.persistence.backends.impl_sqlalchemy:SQLAlchemyBackend
-    zookeeper = taskflow.persistence.backends.impl_zookeeper:ZkBackend
-
-taskflow.engines =
-    default = taskflow.engines.action_engine.engine:SerialActionEngine
-    serial = taskflow.engines.action_engine.engine:SerialActionEngine
-    parallel = taskflow.engines.action_engine.engine:ParallelActionEngine
-    worker-based = taskflow.engines.worker_based.engine:WorkerBasedActionEngine
-    workers = taskflow.engines.worker_based.engine:WorkerBasedActionEngine
+taskflow.jobboards = 
+	zookeeper = taskflow.jobs.backends.impl_zookeeper:ZookeeperJobBoard
+	redis = taskflow.jobs.backends.impl_redis:RedisJobBoard
+	etcd = taskflow.jobs.backends.impl_etcd:EtcdJobBoard
+taskflow.conductors = 
+	blocking = taskflow.conductors.backends.impl_blocking:BlockingConductor
+	nonblocking = taskflow.conductors.backends.impl_nonblocking:NonBlockingConductor
+taskflow.persistence = 
+	dir = taskflow.persistence.backends.impl_dir:DirBackend
+	file = taskflow.persistence.backends.impl_dir:DirBackend
+	memory = taskflow.persistence.backends.impl_memory:MemoryBackend
+	mysql = taskflow.persistence.backends.impl_sqlalchemy:SQLAlchemyBackend
+	postgresql = taskflow.persistence.backends.impl_sqlalchemy:SQLAlchemyBackend
+	sqlite = taskflow.persistence.backends.impl_sqlalchemy:SQLAlchemyBackend
+	zookeeper = taskflow.persistence.backends.impl_zookeeper:ZkBackend
+taskflow.engines = 
+	default = taskflow.engines.action_engine.engine:SerialActionEngine
+	serial = taskflow.engines.action_engine.engine:SerialActionEngine
+	parallel = taskflow.engines.action_engine.engine:ParallelActionEngine
+	worker-based = taskflow.engines.worker_based.engine:WorkerBasedActionEngine
+	workers = taskflow.engines.worker_based.engine:WorkerBasedActionEngine
 
 [extras]
-# NOTE(dhellmann): The entries in this section of the file need to be
-# kept consistent with the entries in test-requirements.txt.
-zookeeper =
-  kazoo>=2.6.0 # Apache-2.0
-redis =
-  redis>=4.0.0 # MIT
-etcd =
-  etcd3gw>=2.0.0 # Apache-2.0
-workers =
-  kombu>=4.3.0 # BSD
-eventlet =
-  eventlet>=0.18.2 # MIT
-database =
-  SQLAlchemy>=1.0.10 # MIT
-  alembic>=0.8.10 # MIT
-  SQLAlchemy-Utils>=0.30.11 # BSD License
-  PyMySQL>=0.7.6 # MIT License
-  psycopg2>=2.8.0 # LGPL/ZPL
+zookeeper = 
+	kazoo>=2.6.0 # Apache-2.0
+redis = 
+	redis>=4.0.0 # MIT
+etcd = 
+	etcd3gw>=2.0.0 # Apache-2.0
+workers = 
+	kombu>=4.3.0 # BSD
+eventlet = 
+	eventlet>=0.18.2 # MIT
+database = 
+	SQLAlchemy>=1.0.10 # MIT
+	alembic>=0.8.10 # MIT
+	SQLAlchemy-Utils>=0.30.11 # BSD License
+	PyMySQL>=0.7.6 # MIT License
+	psycopg2>=2.8.0 # LGPL/ZPL
+
+[egg_info]
+tag_build = 
+tag_date = 0
+
diff -pruN 5.12.0-2/taskflow/atom.py 6.0.2-0ubuntu1/taskflow/atom.py
--- 5.12.0-2/taskflow/atom.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/atom.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Rackspace Hosting Inc. All Rights Reserved.
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
@@ -159,7 +157,7 @@ def _build_arg_mapping(atom_name, reqs,
     return required, optional
 
 
-class Atom(object, metaclass=abc.ABCMeta):
+class Atom(metaclass=abc.ABCMeta):
     """An unit of work that causes a flow to progress (in some manner).
 
     An atom is a named object that operates with input data to perform
@@ -379,7 +377,7 @@ class Atom(object, metaclass=abc.ABCMeta
         """
 
     def __str__(self):
-        return '"%s==%s"' % (self.name, misc.get_version_string(self))
+        return '"{}=={}"'.format(self.name, misc.get_version_string(self))
 
     def __repr__(self):
-        return '<%s %s>' % (reflection.get_class_name(self), self)
+        return '<{} {}>'.format(reflection.get_class_name(self), self)
diff -pruN 5.12.0-2/taskflow/conductors/backends/__init__.py 6.0.2-0ubuntu1/taskflow/conductors/backends/__init__.py
--- 5.12.0-2/taskflow/conductors/backends/__init__.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/conductors/backends/__init__.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/conductors/backends/impl_blocking.py 6.0.2-0ubuntu1/taskflow/conductors/backends/impl_blocking.py
--- 5.12.0-2/taskflow/conductors/backends/impl_blocking.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/conductors/backends/impl_blocking.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
 #    not use this file except in compliance with the License. You may obtain
 #    a copy of the License at
@@ -33,7 +31,7 @@ class BlockingConductor(impl_executor.Ex
                  persistence=None, engine=None,
                  engine_options=None, wait_timeout=None,
                  log=None, max_simultaneous_jobs=MAX_SIMULTANEOUS_JOBS):
-        super(BlockingConductor, self).__init__(
+        super().__init__(
             name, jobboard,
             persistence=persistence, engine=engine,
             engine_options=engine_options,
diff -pruN 5.12.0-2/taskflow/conductors/backends/impl_executor.py 6.0.2-0ubuntu1/taskflow/conductors/backends/impl_executor.py
--- 5.12.0-2/taskflow/conductors/backends/impl_executor.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/conductors/backends/impl_executor.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
 #    not use this file except in compliance with the License. You may obtain
 #    a copy of the License at
@@ -104,7 +102,7 @@ class ExecutorConductor(base.Conductor,
                  persistence=None, engine=None,
                  engine_options=None, wait_timeout=None,
                  log=None, max_simultaneous_jobs=MAX_SIMULTANEOUS_JOBS):
-        super(ExecutorConductor, self).__init__(
+        super().__init__(
             name, jobboard, persistence=persistence,
             engine=engine, engine_options=engine_options)
         self._wait_timeout = tt.convert_to_timeout(
@@ -123,24 +121,19 @@ class ExecutorConductor(base.Conductor,
                                        " it has not been")
 
     def stop(self):
-        """Requests the conductor to stop dispatching.
-
-        This method can be used to request that a conductor stop its
-        consumption & dispatching loop.
-
-        The method returns immediately regardless of whether the conductor has
-        been stopped.
-        """
         self._wait_timeout.interrupt()
 
+    # Inherit the docs, so we can reference them in our class docstring,
+    # if we don't do this sphinx gets confused...
+    stop.__doc__ = base.Conductor.stop.__doc__
+
     @property
     def dispatching(self):
         """Whether or not the dispatching loop is still dispatching."""
         return not self._dead.is_set()
 
     def _listeners_from_job(self, job, engine):
-        listeners = super(ExecutorConductor, self)._listeners_from_job(
-            job, engine)
+        listeners = super()._listeners_from_job(job, engine)
         listeners.append(logging_listener.LoggingListener(engine,
                                                           log=self._log))
         return listeners
@@ -178,7 +171,7 @@ class ExecutorConductor(base.Conductor,
                     stage_func()
                     self._notifier.notify("%s_end" % event_name, details)
             except excp.WrappedFailure as e:
-                if all((f.check(*self.NO_CONSUME_EXCEPTIONS) for f in e)):
+                if all(f.check(*self.NO_CONSUME_EXCEPTIONS) for f in e):
                     consume = False
                 if self._log.isEnabledFor(logging.WARNING):
                     if consume:
@@ -345,14 +338,8 @@ class ExecutorConductor(base.Conductor,
     run.__doc__ = base.Conductor.run.__doc__
 
     def wait(self, timeout=None):
-        """Waits for the conductor to gracefully exit.
-
-        This method waits for the conductor to gracefully exit. An optional
-        timeout can be provided, which will cause the method to return
-        within the specified timeout. If the timeout is reached, the returned
-        value will be ``False``, otherwise it will be ``True``.
-
-        :param timeout: Maximum number of seconds that the :meth:`wait` method
-                        should block for.
-        """
         return self._dead.wait(timeout)
+
+    # Inherit the docs, so we can reference them in our class docstring,
+    # if we don't do this sphinx gets confused...
+    wait.__doc__ = base.Conductor.wait.__doc__
diff -pruN 5.12.0-2/taskflow/conductors/backends/impl_nonblocking.py 6.0.2-0ubuntu1/taskflow/conductors/backends/impl_nonblocking.py
--- 5.12.0-2/taskflow/conductors/backends/impl_nonblocking.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/conductors/backends/impl_nonblocking.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
 #    not use this file except in compliance with the License. You may obtain
 #    a copy of the License at
@@ -54,7 +52,7 @@ class NonBlockingConductor(impl_executor
                  engine_options=None, wait_timeout=None,
                  log=None, max_simultaneous_jobs=MAX_SIMULTANEOUS_JOBS,
                  executor_factory=None):
-        super(NonBlockingConductor, self).__init__(
+        super().__init__(
             name, jobboard,
             persistence=persistence, engine=engine,
             engine_options=engine_options, wait_timeout=wait_timeout,
diff -pruN 5.12.0-2/taskflow/conductors/base.py 6.0.2-0ubuntu1/taskflow/conductors/base.py
--- 5.12.0-2/taskflow/conductors/base.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/conductors/base.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
 #    not use this file except in compliance with the License. You may obtain
 #    a copy of the License at
@@ -25,7 +23,7 @@ from taskflow.types import notifier
 from taskflow.utils import misc
 
 
-class Conductor(object, metaclass=abc.ABCMeta):
+class Conductor(metaclass=abc.ABCMeta):
     """Base for all conductor implementations.
 
     Conductors act as entities which extract jobs from a jobboard, assign
@@ -164,6 +162,30 @@ class Conductor(object, metaclass=abc.AB
         """
 
     @abc.abstractmethod
+    def stop(self):
+        """Requests the conductor to stop dispatching.
+
+        This method can be used to request that a conductor stop its
+        consumption & dispatching loop.
+
+        The method returns immediately regardless of whether the conductor has
+        been stopped.
+        """
+
+    @abc.abstractmethod
+    def wait(self, timeout=None):
+        """Waits for the conductor to gracefully exit.
+
+        This method waits for the conductor to gracefully exit. An optional
+        timeout can be provided, which will cause the method to return
+        within the specified timeout. If the timeout is reached, the returned
+        value will be ``False``, otherwise it will be ``True``.
+
+        :param timeout: Maximum number of seconds that the :meth:`wait` method
+                        should block for.
+        """
+
+    @abc.abstractmethod
     def _dispatch_job(self, job):
         """Dispatches a claimed job for work completion.
 
diff -pruN 5.12.0-2/taskflow/deciders.py 6.0.2-0ubuntu1/taskflow/deciders.py
--- 5.12.0-2/taskflow/deciders.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/deciders.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/engines/__init__.py 6.0.2-0ubuntu1/taskflow/engines/__init__.py
--- 5.12.0-2/taskflow/engines/__init__.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/__init__.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/engines/action_engine/actions/base.py 6.0.2-0ubuntu1/taskflow/engines/action_engine/actions/base.py
--- 5.12.0-2/taskflow/engines/action_engine/actions/base.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/action_engine/actions/base.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -19,7 +17,7 @@ import abc
 from taskflow import states
 
 
-class Action(object, metaclass=abc.ABCMeta):
+class Action(metaclass=abc.ABCMeta):
     """An action that handles executing, state changes, ... of atoms."""
 
     NO_RESULT = object()
diff -pruN 5.12.0-2/taskflow/engines/action_engine/actions/retry.py 6.0.2-0ubuntu1/taskflow/engines/action_engine/actions/retry.py
--- 5.12.0-2/taskflow/engines/action_engine/actions/retry.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/action_engine/actions/retry.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -24,7 +22,7 @@ class RetryAction(base.Action):
     """An action that handles executing, state changes, ... of retry atoms."""
 
     def __init__(self, storage, notifier, retry_executor):
-        super(RetryAction, self).__init__(storage, notifier)
+        super().__init__(storage, notifier)
         self._retry_executor = retry_executor
 
     def _get_retry_args(self, retry, revert=False, addons=None):
diff -pruN 5.12.0-2/taskflow/engines/action_engine/actions/task.py 6.0.2-0ubuntu1/taskflow/engines/action_engine/actions/task.py
--- 5.12.0-2/taskflow/engines/action_engine/actions/task.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/action_engine/actions/task.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -29,7 +27,7 @@ class TaskAction(base.Action):
     """An action that handles scheduling, state changes, ... of task atoms."""
 
     def __init__(self, storage, notifier, task_executor):
-        super(TaskAction, self).__init__(storage, notifier)
+        super().__init__(storage, notifier)
         self._task_executor = task_executor
 
     def _is_identity_transition(self, old_state, state, task, progress=None):
diff -pruN 5.12.0-2/taskflow/engines/action_engine/builder.py 6.0.2-0ubuntu1/taskflow/engines/action_engine/builder.py
--- 5.12.0-2/taskflow/engines/action_engine/builder.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/action_engine/builder.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -58,7 +56,7 @@ TIMED_STATES = (st.ANALYZING, st.RESUMIN
 LOG = logging.getLogger(__name__)
 
 
-class MachineMemory(object):
+class MachineMemory:
     """State machine memory."""
 
     def __init__(self):
@@ -73,7 +71,7 @@ class MachineMemory(object):
             fut.cancel()
 
 
-class MachineBuilder(object):
+class MachineBuilder:
     """State machine *builder* that powers the engine components.
 
     NOTE(harlowja): the machine (states and events that will trigger
diff -pruN 5.12.0-2/taskflow/engines/action_engine/compiler.py 6.0.2-0ubuntu1/taskflow/engines/action_engine/compiler.py
--- 5.12.0-2/taskflow/engines/action_engine/compiler.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/action_engine/compiler.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -46,12 +44,12 @@ ATOMS = (TASK, RETRY)
 FLOWS = (FLOW, FLOW_END)
 
 
-class Terminator(object):
+class Terminator:
     """Flow terminator class."""
 
     def __init__(self, flow):
         self._flow = flow
-        self._name = "%s[$]" % (self._flow.name,)
+        self._name = "{}[$]".format(self._flow.name)
 
     @property
     def flow(self):
@@ -68,7 +66,7 @@ class Terminator(object):
         return '"%s[$]"' % flow_name
 
 
-class Compilation(object):
+class Compilation:
     """The result of a compilers ``compile()`` is this *immutable* object."""
 
     #: Task nodes will have a ``kind`` metadata key with this value.
@@ -135,7 +133,7 @@ def _add_update_edges(graph, nodes_from,
                     graph.add_edge(u, v, attr_dict=attr_dict.copy())
 
 
-class TaskCompiler(object):
+class TaskCompiler:
     """Non-recursive compiler of tasks."""
 
     def compile(self, task, parent=None):
@@ -147,7 +145,7 @@ class TaskCompiler(object):
         return graph, node
 
 
-class FlowCompiler(object):
+class FlowCompiler:
     """Recursive compiler of flows."""
 
     def __init__(self, deep_compiler_func):
@@ -162,9 +160,9 @@ class FlowCompiler(object):
             parent.add(tree_node)
         if flow.retry is not None:
             tree_node.add(tr.Node(flow.retry, kind=RETRY))
-        decomposed = dict(
-            (child, self._deep_compiler_func(child, parent=tree_node)[0])
-            for child in flow)
+        decomposed = {
+            child: self._deep_compiler_func(child, parent=tree_node)[0]
+            for child in flow}
         decomposed_graphs = list(decomposed.values())
         graph = gr.merge_graphs(graph, *decomposed_graphs,
                                 overlap_detector=_overlap_occurrence_detector)
@@ -223,7 +221,7 @@ class FlowCompiler(object):
         return graph, tree_node
 
 
-class PatternCompiler(object):
+class PatternCompiler:
     """Compiles a flow pattern (or task) into a compilation unit.
 
     Let's dive into the basic idea for how this works:
diff -pruN 5.12.0-2/taskflow/engines/action_engine/completer.py 6.0.2-0ubuntu1/taskflow/engines/action_engine/completer.py
--- 5.12.0-2/taskflow/engines/action_engine/completer.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/action_engine/completer.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -29,7 +27,7 @@ from taskflow import states as st
 LOG = logging.getLogger(__name__)
 
 
-class Strategy(object, metaclass=abc.ABCMeta):
+class Strategy(metaclass=abc.ABCMeta):
     """Failure resolution strategy base class."""
 
     strategy = None
@@ -56,7 +54,7 @@ class RevertAndRetry(Strategy):
     strategy = retry_atom.RETRY
 
     def __init__(self, runtime, retry):
-        super(RevertAndRetry, self).__init__(runtime)
+        super().__init__(runtime)
         self._retry = retry
 
     def apply(self):
@@ -73,7 +71,7 @@ class RevertAll(Strategy):
     strategy = retry_atom.REVERT_ALL
 
     def __init__(self, runtime):
-        super(RevertAll, self).__init__(runtime)
+        super().__init__(runtime)
 
     def apply(self):
         return self._runtime.reset_atoms(
@@ -87,7 +85,7 @@ class Revert(Strategy):
     strategy = retry_atom.REVERT
 
     def __init__(self, runtime, atom):
-        super(Revert, self).__init__(runtime)
+        super().__init__(runtime)
         self._atom = atom
 
     def apply(self):
@@ -98,7 +96,7 @@ class Revert(Strategy):
         return tweaked
 
 
-class Completer(object):
+class Completer:
     """Completes atoms using actions to complete them."""
 
     def __init__(self, runtime):
diff -pruN 5.12.0-2/taskflow/engines/action_engine/deciders.py 6.0.2-0ubuntu1/taskflow/engines/action_engine/deciders.py
--- 5.12.0-2/taskflow/engines/action_engine/deciders.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/action_engine/deciders.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -26,7 +24,7 @@ from taskflow import states
 LOG = logging.getLogger(__name__)
 
 
-class Decider(object, metaclass=abc.ABCMeta):
+class Decider(metaclass=abc.ABCMeta):
     """Base class for deciders.
 
     Provides interface to be implemented by sub-classes.
diff -pruN 5.12.0-2/taskflow/engines/action_engine/engine.py 6.0.2-0ubuntu1/taskflow/engines/action_engine/engine.py
--- 5.12.0-2/taskflow/engines/action_engine/engine.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/action_engine/engine.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -40,11 +38,6 @@ from taskflow import storage
 from taskflow.types import failure
 from taskflow.utils import misc
 
-try:
-    from taskflow.engines.action_engine import process_executor
-except ImportError:
-    process_executor = None
-
 LOG = logging.getLogger(__name__)
 
 
@@ -170,7 +163,7 @@ class ActionEngine(base.Engine):
     """
 
     def __init__(self, flow, flow_detail, backend, options):
-        super(ActionEngine, self).__init__(flow, flow_detail, backend, options)
+        super().__init__(flow, flow_detail, backend, options)
         self._runtime = None
         self._compiled = False
         self._compilation = None
@@ -479,8 +472,7 @@ class SerialActionEngine(ActionEngine):
     """Engine that runs tasks in serial manner."""
 
     def __init__(self, flow, flow_detail, backend, options):
-        super(SerialActionEngine, self).__init__(flow, flow_detail,
-                                                 backend, options)
+        super().__init__(flow, flow_detail, backend, options)
         self._task_executor = executor.SerialTaskExecutor()
 
 
@@ -548,7 +540,6 @@ String (case insensitive)    Executor us
       polling while a higher number will involve less polling but a slower time
       for an engine to notice a task has completed.
 
-    .. |pe|  replace:: process_executor
     .. |cfp| replace:: concurrent.futures.process
     .. |cft| replace:: concurrent.futures.thread
     .. |cf| replace:: concurrent.futures
@@ -563,16 +554,9 @@ String (case insensitive)    Executor us
     _executor_cls_matchers = [
         _ExecutorTypeMatch((futures.ThreadPoolExecutor,),
                            executor.ParallelThreadTaskExecutor),
-    ]
-    if process_executor is not None:
-        _executor_cls_matchers.append(
-            _ExecutorTypeMatch((futures.ProcessPoolExecutor,),
-                               process_executor.ParallelProcessTaskExecutor)
-        )
-    _executor_cls_matchers.append(
         _ExecutorTypeMatch((futures.Executor,),
                            executor.ParallelThreadTaskExecutor),
-    )
+    ]
 
     # One of these should match when a string/text is provided for the
     # 'executor' option (a mixed case equivalent is allowed since the match
@@ -584,18 +568,12 @@ String (case insensitive)    Executor us
                                       'greenthreaded']),
                            executor.ParallelGreenThreadTaskExecutor),
     ]
-    if process_executor is not None:
-        _executor_str_matchers.append(
-            _ExecutorTextMatch(frozenset(['processes', 'process']),
-                               process_executor.ParallelProcessTaskExecutor)
-        )
 
     # Used when no executor is provided (either a string or object)...
     _default_executor_cls = executor.ParallelThreadTaskExecutor
 
     def __init__(self, flow, flow_detail, backend, options):
-        super(ParallelActionEngine, self).__init__(flow, flow_detail,
-                                                   backend, options)
+        super().__init__(flow, flow_detail, backend, options)
         # This ensures that any provided executor will be validated before
         # we get to far in the compilation/execution pipeline...
         self._task_executor = self._fetch_task_executor(self._options)
diff -pruN 5.12.0-2/taskflow/engines/action_engine/executor.py 6.0.2-0ubuntu1/taskflow/engines/action_engine/executor.py
--- 5.12.0-2/taskflow/engines/action_engine/executor.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/action_engine/executor.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -78,7 +76,7 @@ def _revert_task(task, arguments, result
     return (REVERTED, result)
 
 
-class SerialRetryExecutor(object):
+class SerialRetryExecutor:
     """Executes and reverts retries."""
 
     def __init__(self):
@@ -105,7 +103,7 @@ class SerialRetryExecutor(object):
         return fut
 
 
-class TaskExecutor(object, metaclass=abc.ABCMeta):
+class TaskExecutor(metaclass=abc.ABCMeta):
     """Executes and reverts tasks.
 
     This class takes task and its arguments and executes or reverts it.
diff -pruN 5.12.0-2/taskflow/engines/action_engine/process_executor.py 6.0.2-0ubuntu1/taskflow/engines/action_engine/process_executor.py
--- 5.12.0-2/taskflow/engines/action_engine/process_executor.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/action_engine/process_executor.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,720 +0,0 @@
-# -*- coding: utf-8 -*-
-
-#    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-
-import asyncore
-import binascii
-import collections
-import errno
-import functools
-import hashlib
-import hmac
-import math
-import os
-import pickle
-import socket
-import struct
-import time
-
-import futurist
-from oslo_utils import excutils
-
-from taskflow.engines.action_engine import executor as base
-from taskflow import logging
-from taskflow import task as ta
-from taskflow.types import notifier as nt
-from taskflow.utils import iter_utils
-from taskflow.utils import misc
-from taskflow.utils import schema_utils as su
-from taskflow.utils import threading_utils
-
-LOG = logging.getLogger(__name__)
-
-# Internal parent <-> child process protocol schema, message constants...
-MAGIC_HEADER = 0xDECAF
-CHALLENGE = 'identify_yourself'
-CHALLENGE_RESPONSE = 'worker_reporting_in'
-ACK = 'ack'
-EVENT = 'event'
-SCHEMAS = {
-    # Basic jsonschemas for verifying that the data we get back and
-    # forth from parent <-> child observes at least a basic expected
-    # format.
-    CHALLENGE: {
-        "type": "string",
-        "minLength": 1,
-    },
-    ACK: {
-        "type": "string",
-        "minLength": 1,
-    },
-    CHALLENGE_RESPONSE: {
-        "type": "string",
-        "minLength": 1,
-    },
-    EVENT: {
-        "type": "object",
-        "properties": {
-            'event_type': {
-                "type": "string",
-            },
-            'sent_on': {
-                "type": "number",
-            },
-        },
-        "required": ['event_type', 'sent_on'],
-        "additionalProperties": True,
-    },
-}
-
-
-class UnknownSender(Exception):
-    """Exception raised when message from unknown sender is recvd."""
-
-
-class ChallengeIgnored(Exception):
-    """Exception raised when challenge has not been responded to."""
-
-
-class Reader(object):
-    """Reader machine that streams & parses messages that it then dispatches.
-
-    TODO(harlowja): Use python-suitcase in the future when the following
-    are addressed/resolved and released:
-
-    - https://github.com/digidotcom/python-suitcase/issues/28
-    - https://github.com/digidotcom/python-suitcase/issues/29
-
-    Binary format format is the following (no newlines in actual format)::
-
-        <magic-header> (4 bytes)
-        <mac-header-length> (4 bytes)
-        <mac> (1 or more variable bytes)
-        <identity-header-length> (4 bytes)
-        <identity> (1 or more variable bytes)
-        <msg-header-length> (4 bytes)
-        <msg> (1 or more variable bytes)
-    """
-
-    #: Per state memory initializers.
-    _INITIALIZERS = {
-        'magic_header_left': 4,
-        'mac_header_left': 4,
-        'identity_header_left': 4,
-        'msg_header_left': 4,
-    }
-
-    #: Linear steps/transitions (order matters here).
-    _TRANSITIONS = tuple([
-        'magic_header_left',
-        'mac_header_left',
-        'mac_left',
-        'identity_header_left',
-        'identity_left',
-        'msg_header_left',
-        'msg_left',
-    ])
-
-    def __init__(self, auth_key, dispatch_func, msg_limit=-1):
-        if not callable(dispatch_func):
-            raise ValueError("Expected provided dispatch function"
-                             " to be callable")
-        self.auth_key = auth_key
-        self.dispatch_func = dispatch_func
-        msg_limiter = iter_utils.iter_forever(msg_limit)
-        self.msg_count = next(msg_limiter)
-        self._msg_limiter = msg_limiter
-        self._buffer = misc.BytesIO()
-        self._state = None
-        # Local machine variables and such are stored in here.
-        self._memory = {}
-        self._transitions = collections.deque(self._TRANSITIONS)
-        # This is the per state callback handler set. The first entry reads
-        # the data and the second entry is called after reading is completed,
-        # typically to save that data into object memory, or to validate
-        # it.
-        self._handlers = {
-            'magic_header_left': (self._read_field_data,
-                                  self._save_and_validate_magic),
-            'mac_header_left': (self._read_field_data,
-                                functools.partial(self._save_pos_integer,
-                                                  'mac_left')),
-            'mac_left': (functools.partial(self._read_data, 'mac'),
-                         functools.partial(self._save_data, 'mac')),
-            'identity_header_left': (self._read_field_data,
-                                     functools.partial(self._save_pos_integer,
-                                                       'identity_left')),
-            'identity_left': (functools.partial(self._read_data, 'identity'),
-                              functools.partial(self._save_data, 'identity')),
-            'msg_header_left': (self._read_field_data,
-                                functools.partial(self._save_pos_integer,
-                                                  'msg_left')),
-            'msg_left': (functools.partial(self._read_data, 'msg'),
-                         self._dispatch_and_reset),
-        }
-        # Force transition into first state...
-        self._transition()
-
-    def _save_pos_integer(self, key_name, data):
-        key_val = struct.unpack("!i", data)[0]
-        if key_val <= 0:
-            raise IOError("Invalid %s length received for key '%s', expected"
-                          " greater than zero length" % (key_val, key_name))
-        self._memory[key_name] = key_val
-        return True
-
-    def _save_data(self, key_name, data):
-        self._memory[key_name] = data
-        return True
-
-    def _dispatch_and_reset(self, data):
-        self.dispatch_func(
-            self._memory['identity'],
-            # Lazy evaluate so the message can be thrown out as needed
-            # (instead of the receiver discarding it after the fact)...
-            functools.partial(_decode_message, self.auth_key, data,
-                              self._memory['mac']))
-        self.msg_count = next(self._msg_limiter)
-        self._memory.clear()
-
-    def _transition(self):
-        try:
-            self._state = self._transitions.popleft()
-        except IndexError:
-            self._transitions.extend(self._TRANSITIONS)
-            self._state = self._transitions.popleft()
-        try:
-            self._memory[self._state] = self._INITIALIZERS[self._state]
-        except KeyError:
-            pass
-        self._handle_func, self._post_handle_func = self._handlers[self._state]
-
-    def _save_and_validate_magic(self, data):
-        magic_header = struct.unpack("!i", data)[0]
-        if magic_header != MAGIC_HEADER:
-            raise IOError("Invalid magic header received, expected 0x%x but"
-                          " got 0x%x for message %s" % (MAGIC_HEADER,
-                                                        magic_header,
-                                                        self.msg_count + 1))
-        self._memory['magic'] = magic_header
-        return True
-
-    def _read_data(self, save_key_name, data):
-        data_len_left = self._memory[self._state]
-        self._buffer.write(data[0:data_len_left])
-        if len(data) < data_len_left:
-            data_len_left -= len(data)
-            self._memory[self._state] = data_len_left
-            return ''
-        else:
-            self._memory[self._state] = 0
-            buf_data = self._buffer.getvalue()
-            self._buffer.reset()
-            self._post_handle_func(buf_data)
-            self._transition()
-            return data[data_len_left:]
-
-    def _read_field_data(self, data):
-        return self._read_data(self._state, data)
-
-    @property
-    def bytes_needed(self):
-        return self._memory.get(self._state, 0)
-
-    def feed(self, data):
-        while len(data):
-            data = self._handle_func(data)
-
-
-class BadHmacValueError(ValueError):
-    """Value error raised when an invalid hmac is discovered."""
-
-
-def _create_random_string(desired_length):
-    if desired_length <= 0:
-        return b''
-    data_length = int(math.ceil(desired_length / 2.0))
-    data = os.urandom(data_length)
-    hex_data = binascii.hexlify(data)
-    return hex_data[0:desired_length]
-
-
-def _calculate_hmac(auth_key, body):
-    mac = hmac.new(auth_key, body, hashlib.md5).hexdigest()
-    if isinstance(mac, str):
-        mac = mac.encode("ascii")
-    return mac
-
-
-def _encode_message(auth_key, message, identity, reverse=False):
-    message = pickle.dumps(message, 2)
-    message_mac = _calculate_hmac(auth_key, message)
-    pieces = [
-        struct.pack("!i", MAGIC_HEADER),
-        struct.pack("!i", len(message_mac)),
-        message_mac,
-        struct.pack("!i", len(identity)),
-        identity,
-        struct.pack("!i", len(message)),
-        message,
-    ]
-    if reverse:
-        pieces.reverse()
-    return tuple(pieces)
-
-
-def _decode_message(auth_key, message, message_mac):
-    tmp_message_mac = _calculate_hmac(auth_key, message)
-    if tmp_message_mac != message_mac:
-        raise BadHmacValueError('Invalid message hmac')
-    return pickle.loads(message)
-
-
-class Channel(object):
-    """Object that workers use to communicate back to their creator."""
-
-    def __init__(self, port, identity, auth_key):
-        self.identity = identity
-        self.port = port
-        self.auth_key = auth_key
-        self.dead = False
-        self._sent = self._received = 0
-        self._socket = None
-        self._read_pipe = None
-        self._write_pipe = None
-
-    def close(self):
-        if self._socket is not None:
-            self._socket.close()
-            self._socket = None
-            self._read_pipe = None
-            self._write_pipe = None
-
-    def _ensure_connected(self):
-        if self._socket is None:
-            s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-            s.setblocking(1)
-            try:
-                s.connect(("", self.port))
-            except socket.error as e:
-                with excutils.save_and_reraise_exception():
-                    s.close()
-                    if e.errno in (errno.ECONNREFUSED, errno.ENOTCONN,
-                                   errno.ECONNRESET):
-                        # Don't bother with further connections...
-                        self.dead = True
-            read_pipe = s.makefile("rb", 0)
-            write_pipe = s.makefile("wb", 0)
-            try:
-                msg = self._do_recv(read_pipe=read_pipe)
-                su.schema_validate(msg, SCHEMAS[CHALLENGE])
-                if msg != CHALLENGE:
-                    raise IOError("Challenge expected not received")
-                else:
-                    pieces = _encode_message(self.auth_key,
-                                             CHALLENGE_RESPONSE,
-                                             self.identity)
-                    self._do_send_and_ack(pieces, write_pipe=write_pipe,
-                                          read_pipe=read_pipe)
-            except Exception:
-                with excutils.save_and_reraise_exception():
-                    s.close()
-            else:
-                self._socket = s
-                self._read_pipe = read_pipe
-                self._write_pipe = write_pipe
-
-    def recv(self):
-        self._ensure_connected()
-        return self._do_recv()
-
-    def _do_recv(self, read_pipe=None):
-        if read_pipe is None:
-            read_pipe = self._read_pipe
-        msg_capture = collections.deque(maxlen=1)
-        msg_capture_func = (lambda _from_who, msg_decoder_func:
-                            msg_capture.append(msg_decoder_func()))
-        reader = Reader(self.auth_key, msg_capture_func, msg_limit=1)
-        try:
-            maybe_msg_num = self._received + 1
-            bytes_needed = reader.bytes_needed
-            while True:
-                blob = read_pipe.read(bytes_needed)
-                if len(blob) != bytes_needed:
-                    raise EOFError("Read pipe closed while reading %s"
-                                   " bytes for potential message %s"
-                                   % (bytes_needed, maybe_msg_num))
-                reader.feed(blob)
-                bytes_needed = reader.bytes_needed
-        except StopIteration:
-            pass
-        msg = msg_capture[0]
-        self._received += 1
-        return msg
-
-    def _do_send(self, pieces, write_pipe=None):
-        if write_pipe is None:
-            write_pipe = self._write_pipe
-        for piece in pieces:
-            write_pipe.write(piece)
-        write_pipe.flush()
-
-    def _do_send_and_ack(self, pieces, write_pipe=None, read_pipe=None):
-        self._do_send(pieces, write_pipe=write_pipe)
-        self._sent += 1
-        msg = self._do_recv(read_pipe=read_pipe)
-        su.schema_validate(msg, SCHEMAS[ACK])
-        if msg != ACK:
-            raise IOError("Failed receiving ack for sent"
-                          " message %s" % self._metrics['sent'])
-
-    def send(self, message):
-        self._ensure_connected()
-        self._do_send_and_ack(_encode_message(self.auth_key, message,
-                                              self.identity))
-
-
-class EventSender(object):
-    """Sends event information from a child worker process to its creator."""
-
-    def __init__(self, channel):
-        self._channel = channel
-        self._pid = None
-
-    def __call__(self, event_type, details):
-        if not self._channel.dead:
-            if self._pid is None:
-                self._pid = os.getpid()
-            message = {
-                'event_type': event_type,
-                'details': details,
-                'sent_on': time.time(),
-            }
-            LOG.trace("Sending %s (from child %s)", message, self._pid)
-            self._channel.send(message)
-
-
-class DispatcherHandler(asyncore.dispatcher):
-    """Dispatches from a single connection into a target."""
-
-    #: Read/write chunk size.
-    CHUNK_SIZE = 8192
-
-    def __init__(self, sock, addr, dispatcher):
-        super(DispatcherHandler, self).__init__(map=dispatcher.map,
-                                                sock=sock)
-        self.blobs_to_write = list(dispatcher.challenge_pieces)
-        self.reader = Reader(dispatcher.auth_key, self._dispatch)
-        self.targets = dispatcher.targets
-        self.tied_to = None
-        self.challenge_responded = False
-        self.ack_pieces = _encode_message(dispatcher.auth_key, ACK,
-                                          dispatcher.identity,
-                                          reverse=True)
-        self.addr = addr
-
-    def handle_close(self):
-        self.close()
-
-    def writable(self):
-        return bool(self.blobs_to_write)
-
-    def handle_write(self):
-        try:
-            blob = self.blobs_to_write.pop()
-        except IndexError:
-            pass
-        else:
-            sent = self.send(blob[0:self.CHUNK_SIZE])
-            if sent < len(blob):
-                self.blobs_to_write.append(blob[sent:])
-
-    def _send_ack(self):
-        self.blobs_to_write.extend(self.ack_pieces)
-
-    def _dispatch(self, from_who, msg_decoder_func):
-        if not self.challenge_responded:
-            msg = msg_decoder_func()
-            su.schema_validate(msg, SCHEMAS[CHALLENGE_RESPONSE])
-            if msg != CHALLENGE_RESPONSE:
-                raise ChallengeIgnored("Discarding connection from %s"
-                                       " challenge was not responded to"
-                                       % self.addr)
-            else:
-                LOG.trace("Peer %s (%s) has passed challenge sequence",
-                          self.addr, from_who)
-                self.challenge_responded = True
-                self.tied_to = from_who
-                self._send_ack()
-        else:
-            if self.tied_to != from_who:
-                raise UnknownSender("Sender %s previously identified as %s"
-                                    " changed there identity to %s after"
-                                    " challenge sequence" % (self.addr,
-                                                             self.tied_to,
-                                                             from_who))
-            try:
-                task = self.targets[from_who]
-            except KeyError:
-                raise UnknownSender("Unknown message from %s (%s) not matched"
-                                    " to any known target" % (self.addr,
-                                                              from_who))
-            msg = msg_decoder_func()
-            su.schema_validate(msg, SCHEMAS[EVENT])
-            if LOG.isEnabledFor(logging.TRACE):
-                msg_delay = max(0, time.time() - msg['sent_on'])
-                LOG.trace("Dispatching message from %s (%s) (it took %0.3f"
-                          " seconds for it to arrive for processing after"
-                          " being sent)", self.addr, from_who, msg_delay)
-            task.notifier.notify(msg['event_type'], msg.get('details'))
-            self._send_ack()
-
-    def handle_read(self):
-        data = self.recv(self.CHUNK_SIZE)
-        if len(data) == 0:
-            self.handle_close()
-        else:
-            try:
-                self.reader.feed(data)
-            except (IOError, UnknownSender):
-                LOG.warning("Invalid received message", exc_info=True)
-                self.handle_close()
-            except (pickle.PickleError, TypeError):
-                LOG.warning("Badly formatted message", exc_info=True)
-                self.handle_close()
-            except (ValueError, su.ValidationError):
-                LOG.warning("Failed validating message", exc_info=True)
-                self.handle_close()
-            except ChallengeIgnored:
-                LOG.warning("Failed challenge sequence", exc_info=True)
-                self.handle_close()
-
-
-class Dispatcher(asyncore.dispatcher):
-    """Accepts messages received from child worker processes."""
-
-    #: See https://docs.python.org/2/library/socket.html#socket.socket.listen
-    MAX_BACKLOG = 5
-
-    def __init__(self, map, auth_key, identity):
-        super(Dispatcher, self).__init__(map=map)
-        self.identity = identity
-        self.challenge_pieces = _encode_message(auth_key, CHALLENGE,
-                                                identity, reverse=True)
-        self.auth_key = auth_key
-        self.targets = {}
-
-    @property
-    def port(self):
-        if self.socket is not None:
-            return self.socket.getsockname()[1]
-        else:
-            return None
-
-    def setup(self):
-        self.targets.clear()
-        self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
-        self.bind(("", 0))
-        LOG.trace("Accepting dispatch requests on port %s", self.port)
-        self.listen(self.MAX_BACKLOG)
-
-    def writable(self):
-        return False
-
-    @property
-    def map(self):
-        return self._map
-
-    def handle_close(self):
-        if self.socket is not None:
-            self.close()
-
-    def handle_accept(self):
-        pair = self.accept()
-        if pair is not None:
-            sock, addr = pair
-            addr = "%s:%s" % (addr[0], addr[1])
-            LOG.trace("Potentially accepted new connection from %s", addr)
-            DispatcherHandler(sock, addr, self)
-
-
-class ParallelProcessTaskExecutor(base.ParallelTaskExecutor):
-    """Executes tasks in parallel using a process pool executor.
-
-    NOTE(harlowja): this executor executes tasks in external processes, so that
-    implies that tasks that are sent to that external process are pickleable
-    since this is how the multiprocessing works (sending pickled objects back
-    and forth) and that the bound handlers (for progress updating in
-    particular) are proxied correctly from that external process to the one
-    that is alive in the parent process to ensure that callbacks registered in
-    the parent are executed on events in the child.
-    """
-
-    #: Default timeout used by asyncore io loop (and eventually select/poll).
-    WAIT_TIMEOUT = 0.01
-
-    constructor_options = [
-        ('max_workers', lambda v: v if v is None else int(v)),
-        ('wait_timeout', lambda v: v if v is None else float(v)),
-    ]
-    """
-    Optional constructor keyword arguments this executor supports. These will
-    typically be passed via engine options (by a engine user) and converted
-    into the correct type before being sent into this
-    classes ``__init__`` method.
-    """
-
-    def __init__(self, executor=None,
-                 max_workers=None, wait_timeout=None):
-        super(ParallelProcessTaskExecutor, self).__init__(
-            executor=executor, max_workers=max_workers)
-        LOG.warning('Process task executor is deprecated. It is now disabled '
-                    'in Python 3.12 or later and will be removed.')
-        self._auth_key = _create_random_string(32)
-        self._dispatcher = Dispatcher({}, self._auth_key,
-                                      _create_random_string(32))
-        if wait_timeout is None:
-            self._wait_timeout = self.WAIT_TIMEOUT
-        else:
-            if wait_timeout <= 0:
-                raise ValueError("Provided wait timeout must be greater"
-                                 " than zero and not '%s'" % wait_timeout)
-            self._wait_timeout = wait_timeout
-        # Only created after starting...
-        self._worker = None
-
-    def _create_executor(self, max_workers=None):
-        return futurist.ProcessPoolExecutor(max_workers=max_workers)
-
-    def start(self):
-        if threading_utils.is_alive(self._worker):
-            raise RuntimeError("Worker thread must be stopped via stop()"
-                               " before starting/restarting")
-        super(ParallelProcessTaskExecutor, self).start()
-        self._dispatcher.setup()
-        self._worker = threading_utils.daemon_thread(
-            asyncore.loop, map=self._dispatcher.map,
-            timeout=self._wait_timeout)
-        self._worker.start()
-
-    def stop(self):
-        super(ParallelProcessTaskExecutor, self).stop()
-        self._dispatcher.close()
-        if threading_utils.is_alive(self._worker):
-            self._worker.join()
-            self._worker = None
-
-    def _submit_task(self, func, task, *args, **kwargs):
-        """Submit a function to run the given task (with given args/kwargs).
-
-        NOTE(harlowja): Adjust all events to be proxies instead since we want
-        those callbacks to be activated in this process, not in the child,
-        also since typically callbacks are functors (or callables) we can
-        not pickle those in the first place...
-
-        To make sure people understand how this works, the following is a
-        lengthy description of what is going on here, read at will:
-
-        So to ensure that we are proxying task triggered events that occur
-        in the executed subprocess (which will be created and used by the
-        thing using the multiprocessing based executor) we need to establish
-        a link between that process and this process that ensures that when a
-        event is triggered in that task in that process that a corresponding
-        event is triggered on the original task that was requested to be ran
-        in this process.
-
-        To accomplish this we have to create a copy of the task (without
-        any listeners) and then reattach a new set of listeners that will
-        now instead of calling the desired listeners just place messages
-        for this process (a dispatcher thread that is created in this class)
-        to dispatch to the original task (using a common accepting socket and
-        per task sender socket that is used and associated to know
-        which task to proxy back too, since it is possible that there many
-        be *many* subprocess running at the same time).
-
-        Once the subprocess task has finished execution, the executor will
-        then trigger a callback that will remove the task + target from the
-        dispatcher (which will stop any further proxying back to the original
-        task).
-        """
-        progress_callback = kwargs.pop('progress_callback', None)
-        clone = task.copy(retain_listeners=False)
-        identity = _create_random_string(32)
-        channel = Channel(self._dispatcher.port, identity, self._auth_key)
-
-        def rebind_task():
-            # Creates and binds proxies for all events the task could receive
-            # so that when the clone runs in another process that this task
-            # can receive the same notifications (thus making it look like the
-            # the notifications are transparently happening in this process).
-            proxy_event_types = set()
-            for (event_type, listeners) in task.notifier.listeners_iter():
-                if listeners:
-                    proxy_event_types.add(event_type)
-            if progress_callback is not None:
-                proxy_event_types.add(ta.EVENT_UPDATE_PROGRESS)
-            if nt.Notifier.ANY in proxy_event_types:
-                # NOTE(harlowja): If ANY is present, just have it be
-                # the **only** event registered, as all other events will be
-                # sent if ANY is registered (due to the nature of ANY sending
-                # all the things); if we also include the other event types
-                # in this set if ANY is present we will receive duplicate
-                # messages in this process (the one where the local
-                # task callbacks are being triggered). For example the
-                # emissions of the tasks notifier (that is running out
-                # of process) will for specific events send messages for
-                # its ANY event type **and** the specific event
-                # type (2 messages, when we just want one) which will
-                # cause > 1 notify() call on the local tasks notifier, which
-                # causes more local callback triggering than we want
-                # to actually happen.
-                proxy_event_types = set([nt.Notifier.ANY])
-            if proxy_event_types:
-                # This sender acts as our forwarding proxy target, it
-                # will be sent pickled to the process that will execute
-                # the needed task and it will do the work of using the
-                # channel object to send back messages to this process for
-                # dispatch into the local task.
-                sender = EventSender(channel)
-                for event_type in proxy_event_types:
-                    clone.notifier.register(event_type, sender)
-            return bool(proxy_event_types)
-
-        def register():
-            if progress_callback is not None:
-                task.notifier.register(ta.EVENT_UPDATE_PROGRESS,
-                                       progress_callback)
-            self._dispatcher.targets[identity] = task
-
-        def deregister(fut=None):
-            if progress_callback is not None:
-                task.notifier.deregister(ta.EVENT_UPDATE_PROGRESS,
-                                         progress_callback)
-            self._dispatcher.targets.pop(identity, None)
-
-        should_register = rebind_task()
-        if should_register:
-            register()
-        try:
-            fut = self._executor.submit(func, clone, *args, **kwargs)
-        except RuntimeError:
-            with excutils.save_and_reraise_exception():
-                if should_register:
-                    deregister()
-
-        fut.atom = task
-        if should_register:
-            fut.add_done_callback(deregister)
-        return fut
diff -pruN 5.12.0-2/taskflow/engines/action_engine/runtime.py 6.0.2-0ubuntu1/taskflow/engines/action_engine/runtime.py
--- 5.12.0-2/taskflow/engines/action_engine/runtime.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/action_engine/runtime.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -43,7 +41,7 @@ _EdgeDecider = collections.namedtuple('_
 LOG = logging.getLogger(__name__)
 
 
-class Runtime(object):
+class Runtime:
     """A aggregate of runtime objects, properties, ... used during execution.
 
     This object contains various utility methods and properties that represent
diff -pruN 5.12.0-2/taskflow/engines/action_engine/scheduler.py 6.0.2-0ubuntu1/taskflow/engines/action_engine/scheduler.py
--- 5.12.0-2/taskflow/engines/action_engine/scheduler.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/action_engine/scheduler.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -21,7 +19,7 @@ from taskflow import states as st
 from taskflow.types import failure
 
 
-class RetryScheduler(object):
+class RetryScheduler:
     """Schedules retry atoms."""
 
     def __init__(self, runtime):
@@ -52,7 +50,7 @@ class RetryScheduler(object):
                                         " intention: %s" % intention)
 
 
-class TaskScheduler(object):
+class TaskScheduler:
     """Schedules task atoms."""
 
     def __init__(self, runtime):
@@ -75,7 +73,7 @@ class TaskScheduler(object):
                                         " intention: %s" % intention)
 
 
-class Scheduler(object):
+class Scheduler:
     """Safely schedules atoms using a runtime ``fetch_scheduler`` routine."""
 
     def __init__(self, runtime):
diff -pruN 5.12.0-2/taskflow/engines/action_engine/scopes.py 6.0.2-0ubuntu1/taskflow/engines/action_engine/scopes.py
--- 5.12.0-2/taskflow/engines/action_engine/scopes.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/action_engine/scopes.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -21,7 +19,7 @@ from taskflow import logging
 LOG = logging.getLogger(__name__)
 
 
-class ScopeWalker(object):
+class ScopeWalker:
     """Walks through the scopes of a atom using a engines compilation.
 
     NOTE(harlowja): for internal usage only.
@@ -79,9 +77,9 @@ class ScopeWalker(object):
         """
         graph = self._execution_graph
         if self._predecessors is None:
-            predecessors = set(
+            predecessors = {
                 node for node in graph.bfs_predecessors_iter(self._atom)
-                if graph.nodes[node]['kind'] in co.ATOMS)
+                if graph.nodes[node]['kind'] in co.ATOMS}
             self._predecessors = predecessors.copy()
         else:
             predecessors = self._predecessors.copy()
diff -pruN 5.12.0-2/taskflow/engines/action_engine/selector.py 6.0.2-0ubuntu1/taskflow/engines/action_engine/selector.py
--- 5.12.0-2/taskflow/engines/action_engine/selector.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/action_engine/selector.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -27,7 +25,7 @@ from taskflow.utils import iter_utils
 LOG = logging.getLogger(__name__)
 
 
-class Selector(object):
+class Selector:
     """Selector that uses a compilation and aids in execution processes.
 
     Its primary purpose is to get the next atoms for execution or reversion
diff -pruN 5.12.0-2/taskflow/engines/action_engine/traversal.py 6.0.2-0ubuntu1/taskflow/engines/action_engine/traversal.py
--- 5.12.0-2/taskflow/engines/action_engine/traversal.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/action_engine/traversal.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/engines/base.py 6.0.2-0ubuntu1/taskflow/engines/base.py
--- 5.12.0-2/taskflow/engines/base.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/base.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -21,7 +19,7 @@ from taskflow.types import notifier
 from taskflow.utils import misc
 
 
-class Engine(object, metaclass=abc.ABCMeta):
+class Engine(metaclass=abc.ABCMeta):
     """Base for all engines implementations.
 
     :ivar Engine.notifier: A notification object that will dispatch
diff -pruN 5.12.0-2/taskflow/engines/helpers.py 6.0.2-0ubuntu1/taskflow/engines/helpers.py
--- 5.12.0-2/taskflow/engines/helpers.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/helpers.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/engines/worker_based/dispatcher.py 6.0.2-0ubuntu1/taskflow/engines/worker_based/dispatcher.py
--- 5.12.0-2/taskflow/engines/worker_based/dispatcher.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/worker_based/dispatcher.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -23,7 +21,7 @@ from taskflow.utils import kombu_utils a
 LOG = logging.getLogger(__name__)
 
 
-class Handler(object):
+class Handler:
     """Component(s) that will be called on reception of messages."""
 
     __slots__ = ['_process_message', '_validator']
@@ -53,7 +51,7 @@ class Handler(object):
         return self._validator
 
 
-class TypeDispatcher(object):
+class TypeDispatcher:
     """Receives messages and dispatches to type specific handlers."""
 
     def __init__(self, type_handlers=None, requeue_filters=None):
diff -pruN 5.12.0-2/taskflow/engines/worker_based/endpoint.py 6.0.2-0ubuntu1/taskflow/engines/worker_based/endpoint.py
--- 5.12.0-2/taskflow/engines/worker_based/endpoint.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/worker_based/endpoint.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -19,7 +17,7 @@ from oslo_utils import reflection
 from taskflow.engines.action_engine import executor
 
 
-class Endpoint(object):
+class Endpoint:
     """Represents a single task with execute/revert methods."""
 
     def __init__(self, task_cls):
diff -pruN 5.12.0-2/taskflow/engines/worker_based/engine.py 6.0.2-0ubuntu1/taskflow/engines/worker_based/engine.py
--- 5.12.0-2/taskflow/engines/worker_based/engine.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/worker_based/engine.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -53,8 +51,7 @@ class WorkerBasedActionEngine(engine.Act
     """
 
     def __init__(self, flow, flow_detail, backend, options):
-        super(WorkerBasedActionEngine, self).__init__(flow, flow_detail,
-                                                      backend, options)
+        super().__init__(flow, flow_detail, backend, options)
         # This ensures that any provided executor will be validated before
         # we get to far in the compilation/execution pipeline...
         self._task_executor = self._fetch_task_executor(self._options,
diff -pruN 5.12.0-2/taskflow/engines/worker_based/executor.py 6.0.2-0ubuntu1/taskflow/engines/worker_based/executor.py
--- 5.12.0-2/taskflow/engines/worker_based/executor.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/worker_based/executor.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/engines/worker_based/protocol.py 6.0.2-0ubuntu1/taskflow/engines/worker_based/protocol.py
--- 5.12.0-2/taskflow/engines/worker_based/protocol.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/worker_based/protocol.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -147,7 +145,7 @@ def failure_to_dict(failure):
         return failure.to_dict(include_args=False)
 
 
-class Message(object, metaclass=abc.ABCMeta):
+class Message(metaclass=abc.ABCMeta):
     """Base class for all message types."""
 
     def __repr__(self):
diff -pruN 5.12.0-2/taskflow/engines/worker_based/proxy.py 6.0.2-0ubuntu1/taskflow/engines/worker_based/proxy.py
--- 5.12.0-2/taskflow/engines/worker_based/proxy.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/worker_based/proxy.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -40,7 +38,7 @@ _TransportDetails = collections.namedtup
                                             'driver_name', 'driver_version'])
 
 
-class Proxy(object):
+class Proxy:
     """A proxy processes messages from/to the named exchange.
 
     For **internal** usage only (not for public consumption).
@@ -145,7 +143,7 @@ class Proxy(object):
 
     def _make_queue(self, routing_key, exchange, channel=None):
         """Make a named queue for the given exchange."""
-        queue_name = "%s_%s" % (self._exchange_name, routing_key)
+        queue_name = "{}_{}".format(self._exchange_name, routing_key)
         return kombu.Queue(name=queue_name,
                            routing_key=routing_key, durable=False,
                            exchange=exchange, auto_delete=True,
diff -pruN 5.12.0-2/taskflow/engines/worker_based/server.py 6.0.2-0ubuntu1/taskflow/engines/worker_based/server.py
--- 5.12.0-2/taskflow/engines/worker_based/server.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/worker_based/server.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -31,7 +29,7 @@ from taskflow.utils import misc
 LOG = logging.getLogger(__name__)
 
 
-class Server(object):
+class Server:
     """Server implementation that waits for incoming tasks requests."""
 
     def __init__(self, topic, exchange, executor, endpoints,
@@ -53,8 +51,8 @@ class Server(object):
                                   transport_options=transport_options,
                                   retry_options=retry_options)
         self._topic = topic
-        self._endpoints = dict([(endpoint.name, endpoint)
-                                for endpoint in endpoints])
+        self._endpoints = {endpoint.name: endpoint
+                           for endpoint in endpoints}
 
     def _delayed_process(self, func):
         """Runs the function using the instances executor (eventually).
diff -pruN 5.12.0-2/taskflow/engines/worker_based/types.py 6.0.2-0ubuntu1/taskflow/engines/worker_based/types.py
--- 5.12.0-2/taskflow/engines/worker_based/types.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/worker_based/types.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -30,7 +28,7 @@ LOG = logging.getLogger(__name__)
 # TODO(harlowja): this needs to be made better, once
 # https://blueprints.launchpad.net/taskflow/+spec/wbe-worker-info is finally
 # implemented we can go about using that instead.
-class TopicWorker(object):
+class TopicWorker:
     """A (read-only) worker and its relevant information + useful methods."""
 
     _NO_IDENTITY = object()
@@ -72,14 +70,15 @@ class TopicWorker(object):
     def __repr__(self):
         r = reflection.get_class_name(self, fully_qualified=False)
         if self.identity is not self._NO_IDENTITY:
-            r += "(identity=%s, tasks=%s, topic=%s)" % (self.identity,
-                                                        self.tasks, self.topic)
+            r += "(identity={}, tasks={}, topic={})".format(
+                self.identity, self.tasks, self.topic)
         else:
-            r += "(identity=*, tasks=%s, topic=%s)" % (self.tasks, self.topic)
+            r += "(identity=*, tasks={}, topic={})".format(
+                self.tasks, self.topic)
         return r
 
 
-class ProxyWorkerFinder(object):
+class ProxyWorkerFinder:
     """Requests and receives responses about workers topic+task details."""
 
     def __init__(self, uuid, proxy, topics,
diff -pruN 5.12.0-2/taskflow/engines/worker_based/worker.py 6.0.2-0ubuntu1/taskflow/engines/worker_based/worker.py
--- 5.12.0-2/taskflow/engines/worker_based/worker.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/engines/worker_based/worker.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -33,7 +31,7 @@ from taskflow.utils import threading_uti
 LOG = logging.getLogger(__name__)
 
 
-class Worker(object):
+class Worker:
     """Worker that can be started on a remote host for handling tasks requests.
 
     :param url: broker url
@@ -88,13 +86,13 @@ class Worker(object):
         connection_details = self._server.connection_details
         transport = connection_details.transport
         if transport.driver_version:
-            transport_driver = "%s v%s" % (transport.driver_name,
-                                           transport.driver_version)
+            transport_driver = "{} v{}".format(transport.driver_name,
+                                               transport.driver_version)
         else:
             transport_driver = transport.driver_name
         try:
             hostname = socket.getfqdn()
-        except socket.error:
+        except OSError:
             hostname = "???"
         try:
             pid = os.getpid()
diff -pruN 5.12.0-2/taskflow/examples/99_bottles.py 6.0.2-0ubuntu1/taskflow/examples/99_bottles.py
--- 5.12.0-2/taskflow/examples/99_bottles.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/99_bottles.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/alphabet_soup.py 6.0.2-0ubuntu1/taskflow/examples/alphabet_soup.py
--- 5.12.0-2/taskflow/examples/alphabet_soup.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/alphabet_soup.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/build_a_car.py 6.0.2-0ubuntu1/taskflow/examples/build_a_car.py
--- 5.12.0-2/taskflow/examples/build_a_car.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/build_a_car.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -111,7 +109,7 @@ def flow_watch(state, details):
 
 
 def task_watch(state, details):
-    print('Task %s => %s' % (details.get('task_name'), state))
+    print('Task {} => {}'.format(details.get('task_name'), state))
 
 
 flow = lf.Flow("make-auto").add(
diff -pruN 5.12.0-2/taskflow/examples/buildsystem.py 6.0.2-0ubuntu1/taskflow/examples/buildsystem.py
--- 5.12.0-2/taskflow/examples/buildsystem.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/buildsystem.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -56,7 +54,7 @@ class LinkTask(task.Task):
     default_provides = 'executable'
 
     def __init__(self, executable_path, *args, **kwargs):
-        super(LinkTask, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
         self._executable_path = executable_path
 
     def execute(self, **kwargs):
diff -pruN 5.12.0-2/taskflow/examples/calculate_in_parallel.py 6.0.2-0ubuntu1/taskflow/examples/calculate_in_parallel.py
--- 5.12.0-2/taskflow/examples/calculate_in_parallel.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/calculate_in_parallel.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -46,7 +44,7 @@ from taskflow import task
 # more uniform manner).
 class Provider(task.Task):
     def __init__(self, name, *args, **kwargs):
-        super(Provider, self).__init__(name=name, **kwargs)
+        super().__init__(name=name, **kwargs)
         self._provide = args
 
     def execute(self):
diff -pruN 5.12.0-2/taskflow/examples/calculate_linear.py 6.0.2-0ubuntu1/taskflow/examples/calculate_linear.py
--- 5.12.0-2/taskflow/examples/calculate_linear.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/calculate_linear.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -54,7 +52,7 @@ from taskflow import task
 class Provider(task.Task):
 
     def __init__(self, name, *args, **kwargs):
-        super(Provider, self).__init__(name=name, **kwargs)
+        super().__init__(name=name, **kwargs)
         self._provide = args
 
     def execute(self):
@@ -79,8 +77,8 @@ class Adder(task.Task):
 # this function needs to undo if some later operation fails.
 class Multiplier(task.Task):
     def __init__(self, name, multiplier, provides=None, rebind=None):
-        super(Multiplier, self).__init__(name=name, provides=provides,
-                                         rebind=rebind)
+        super().__init__(name=name, provides=provides,
+                         rebind=rebind)
         self._multiplier = multiplier
 
     def execute(self, z):
diff -pruN 5.12.0-2/taskflow/examples/create_parallel_volume.py 6.0.2-0ubuntu1/taskflow/examples/create_parallel_volume.py
--- 5.12.0-2/taskflow/examples/create_parallel_volume.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/create_parallel_volume.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -45,7 +43,7 @@ def show_time(name):
     start = time.time()
     yield
     end = time.time()
-    print(" -- %s took %0.3f seconds" % (name, end - start))
+    print(" -- {} took {:0.3f} seconds".format(name, end - start))
 
 
 # This affects how many volumes to create and how much time to *simulate*
@@ -85,8 +83,7 @@ class VolumeCreator(task.Task):
         # volume create can be resumed/revert, and is much easier to use for
         # audit and tracking purposes.
         base_name = reflection.get_callable_name(self)
-        super(VolumeCreator, self).__init__(name="%s-%s" % (base_name,
-                                                            volume_id))
+        super().__init__(name="{}-{}".format(base_name, volume_id))
         self._volume_id = volume_id
 
     def execute(self):
diff -pruN 5.12.0-2/taskflow/examples/delayed_return.py 6.0.2-0ubuntu1/taskflow/examples/delayed_return.py
--- 5.12.0-2/taskflow/examples/delayed_return.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/delayed_return.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -44,7 +42,7 @@ from taskflow.types import notifier
 
 class PokeFutureListener(base.Listener):
     def __init__(self, engine, future, task_name):
-        super(PokeFutureListener, self).__init__(
+        super().__init__(
             engine,
             task_listen_for=(notifier.Notifier.ANY,),
             flow_listen_for=[])
diff -pruN 5.12.0-2/taskflow/examples/distance_calculator.py 6.0.2-0ubuntu1/taskflow/examples/distance_calculator.py
--- 5.12.0-2/taskflow/examples/distance_calculator.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/distance_calculator.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Hewlett-Packard Development Company, L.P.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -62,31 +60,24 @@ if __name__ == '__main__':
     any_distance = linear_flow.Flow("origin").add(DistanceTask())
     results = engines.run(any_distance)
     print(results)
-    print("%s is near-enough to %s: %s" % (results['distance'],
-                                           0.0,
-                                           is_near(results['distance'], 0.0)))
+    print("{} is near-enough to {}: {}".format(
+        results['distance'], 0.0, is_near(results['distance'], 0.0)))
 
     results = engines.run(any_distance, store={'a': Point(1, 1)})
     print(results)
-    print("%s is near-enough to %s: %s" % (results['distance'],
-                                           1.4142,
-                                           is_near(results['distance'],
-                                                   1.4142)))
+    print("{} is near-enough to {}: {}".format(
+        results['distance'], 1.4142, is_near(results['distance'], 1.4142)))
 
     results = engines.run(any_distance, store={'a': Point(10, 10)})
     print(results)
-    print("%s is near-enough to %s: %s" % (results['distance'],
-                                           14.14199,
-                                           is_near(results['distance'],
-                                                   14.14199)))
+    print("{} is near-enough to {}: {}".format(
+        results['distance'], 14.14199, is_near(results['distance'], 14.14199)))
 
     results = engines.run(any_distance,
                           store={'a': Point(5, 5), 'b': Point(10, 10)})
     print(results)
-    print("%s is near-enough to %s: %s" % (results['distance'],
-                                           7.07106,
-                                           is_near(results['distance'],
-                                                   7.07106)))
+    print("{} is near-enough to {}: {}".format(
+        results['distance'], 7.07106, is_near(results['distance'], 7.07106)))
 
     # For this we use the ability to override at task creation time the
     # optional arguments so that we don't need to continue to send them
@@ -97,13 +88,10 @@ if __name__ == '__main__':
     ten_distance.add(DistanceTask(inject={'a': Point(10, 10)}))
     results = engines.run(ten_distance, store={'b': Point(10, 10)})
     print(results)
-    print("%s is near-enough to %s: %s" % (results['distance'],
-                                           0.0,
-                                           is_near(results['distance'], 0.0)))
+    print("{} is near-enough to {}: {}".format(
+        results['distance'], 0.0, is_near(results['distance'], 0.0)))
 
     results = engines.run(ten_distance)
     print(results)
-    print("%s is near-enough to %s: %s" % (results['distance'],
-                                           14.14199,
-                                           is_near(results['distance'],
-                                                   14.14199)))
+    print("{} is near-enough to {}: {}".format(
+        results['distance'], 14.14199, is_near(results['distance'], 14.14199)))
diff -pruN 5.12.0-2/taskflow/examples/dump_memory_backend.py 6.0.2-0ubuntu1/taskflow/examples/dump_memory_backend.py
--- 5.12.0-2/taskflow/examples/dump_memory_backend.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/dump_memory_backend.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -67,6 +65,6 @@ print("---------")
 for path in backend.memory.ls_r(backend.memory.root_path, absolute=True):
     value = backend.memory[path]
     if value:
-        print("%s -> %s" % (path, value))
+        print("{} -> {}".format(path, value))
     else:
         print("%s" % (path))
diff -pruN 5.12.0-2/taskflow/examples/echo_listener.py 6.0.2-0ubuntu1/taskflow/examples/echo_listener.py
--- 5.12.0-2/taskflow/examples/echo_listener.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/echo_listener.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/example_utils.py 6.0.2-0ubuntu1/taskflow/examples/example_utils.py
--- 5.12.0-2/taskflow/examples/example_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/example_utils.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -52,7 +50,7 @@ def rm_path(persist_path):
         raise ValueError("Unknown how to `rm` path: %s" % (persist_path))
     try:
         rm_func(persist_path)
-    except (IOError, OSError):
+    except OSError:
         pass
 
 
diff -pruN 5.12.0-2/taskflow/examples/fake_billing.py 6.0.2-0ubuntu1/taskflow/examples/fake_billing.py
--- 5.12.0-2/taskflow/examples/fake_billing.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/fake_billing.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -47,12 +45,12 @@ from taskflow.utils import misc
 # complete to 100% complete.
 
 
-class DB(object):
+class DB:
     def query(self, sql):
         print("Querying with: %s" % (sql))
 
 
-class UrlCaller(object):
+class UrlCaller:
     def __init__(self):
         self._send_time = 0.5
         self._chunks = 25
@@ -73,7 +71,7 @@ class UrlCaller(object):
 # that require access to a set of resources it is a common pattern to provide
 # a object (in this case this object) on construction of those tasks via the
 # task constructor.
-class ResourceFetcher(object):
+class ResourceFetcher:
     def __init__(self):
         self._db_handle = None
         self._url_handle = None
@@ -93,7 +91,7 @@ class ResourceFetcher(object):
 
 class ExtractInputRequest(task.Task):
     def __init__(self, resources):
-        super(ExtractInputRequest, self).__init__(provides="parsed_request")
+        super().__init__(provides="parsed_request")
         self._resources = resources
 
     def execute(self, request):
@@ -106,7 +104,7 @@ class ExtractInputRequest(task.Task):
 
 class MakeDBEntry(task.Task):
     def __init__(self, resources):
-        super(MakeDBEntry, self).__init__()
+        super().__init__()
         self._resources = resources
 
     def execute(self, parsed_request):
@@ -120,7 +118,7 @@ class MakeDBEntry(task.Task):
 
 class ActivateDriver(task.Task):
     def __init__(self, resources):
-        super(ActivateDriver, self).__init__(provides='sent_to')
+        super().__init__(provides='sent_to')
         self._resources = resources
         self._url = "http://blahblah.com"
 
@@ -138,8 +136,8 @@ class ActivateDriver(task.Task):
 
     def update_progress(self, progress, **kwargs):
         # Override the parent method to also print out the status.
-        super(ActivateDriver, self).update_progress(progress, **kwargs)
-        print("%s is %0.2f%% done" % (self.name, progress * 100))
+        super().update_progress(progress, **kwargs)
+        print("{} is {:0.2f}% done".format(self.name, progress * 100))
 
 
 class DeclareSuccess(task.Task):
@@ -148,7 +146,7 @@ class DeclareSuccess(task.Task):
         print("All data processed and sent to %s" % (sent_to))
 
 
-class DummyUser(object):
+class DummyUser:
     def __init__(self, user, id_):
         self.user = user
         self.id = id_
diff -pruN 5.12.0-2/taskflow/examples/graph_flow.py 6.0.2-0ubuntu1/taskflow/examples/graph_flow.py
--- 5.12.0-2/taskflow/examples/graph_flow.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/graph_flow.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -99,7 +97,7 @@ print("Single threaded engine result %s"
 for (name, value) in expected:
     actual = result.get(name)
     if actual != value:
-        sys.stderr.write("%s != %s\n" % (actual, value))
+        sys.stderr.write("{} != {}\n".format(actual, value))
         unexpected += 1
 
 result = taskflow.engines.run(
@@ -109,7 +107,7 @@ print("Multi threaded engine result %s"
 for (name, value) in expected:
     actual = result.get(name)
     if actual != value:
-        sys.stderr.write("%s != %s\n" % (actual, value))
+        sys.stderr.write("{} != {}\n".format(actual, value))
         unexpected += 1
 
 if unexpected:
diff -pruN 5.12.0-2/taskflow/examples/hello_world.py 6.0.2-0ubuntu1/taskflow/examples/hello_world.py
--- 5.12.0-2/taskflow/examples/hello_world.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/hello_world.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -38,12 +36,12 @@ from taskflow import task
 
 class PrinterTask(task.Task):
     def __init__(self, name, show_name=True, inject=None):
-        super(PrinterTask, self).__init__(name, inject=inject)
+        super().__init__(name, inject=inject)
         self._show_name = show_name
 
     def execute(self, output):
         if self._show_name:
-            print("%s: %s" % (self.name, output))
+            print("{}: {}".format(self.name, output))
         else:
             print(output)
 
diff -pruN 5.12.0-2/taskflow/examples/jobboard_produce_consume_colors.py 6.0.2-0ubuntu1/taskflow/examples/jobboard_produce_consume_colors.py
--- 5.12.0-2/taskflow/examples/jobboard_produce_consume_colors.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/jobboard_produce_consume_colors.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -84,9 +82,9 @@ def dispatch_work(job):
 def safe_print(name, message, prefix=""):
     with STDOUT_LOCK:
         if prefix:
-            print("%s %s: %s" % (prefix, name, message))
+            print("{} {}: {}".format(prefix, name, message))
         else:
-            print("%s: %s" % (name, message))
+            print("{}: {}".format(name, message))
 
 
 def worker(ident, client, consumed):
@@ -138,7 +136,7 @@ def producer(ident, client):
     safe_print(name, "started")
     with backends.backend(name, SHARED_CONF.copy(), client=client) as board:
         for i in range(0, PRODUCER_UNITS):
-            job_name = "%s-%s" % (name, i)
+            job_name = "{}-{}".format(name, i)
             details = {
                 'color': random.choice(['red', 'blue']),
             }
diff -pruN 5.12.0-2/taskflow/examples/parallel_table_multiply.py 6.0.2-0ubuntu1/taskflow/examples/parallel_table_multiply.py
--- 5.12.0-2/taskflow/examples/parallel_table_multiply.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/parallel_table_multiply.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -47,7 +45,7 @@ class RowMultiplier(task.Task):
     """Performs a modification of an input row, creating a output row."""
 
     def __init__(self, name, index, row, multiplier):
-        super(RowMultiplier, self).__init__(name=name)
+        super().__init__(name=name)
         self.index = index
         self.multiplier = multiplier
         self.row = row
diff -pruN 5.12.0-2/taskflow/examples/persistence_example.py 6.0.2-0ubuntu1/taskflow/examples/persistence_example.py
--- 5.12.0-2/taskflow/examples/persistence_example.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/persistence_example.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -58,7 +56,7 @@ class HiTask(task.Task):
 
 class ByeTask(task.Task):
     def __init__(self, blowup):
-        super(ByeTask, self).__init__()
+        super().__init__()
         self._blowup = blowup
 
     def execute(self):
diff -pruN 5.12.0-2/taskflow/examples/pseudo_scoping.py 6.0.2-0ubuntu1/taskflow/examples/pseudo_scoping.py
--- 5.12.0-2/taskflow/examples/pseudo_scoping.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/pseudo_scoping.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Ivan Melnikov <iv at altlinux dot org>
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -67,7 +65,7 @@ class CallTask(task.Task):
     """Task that calls person by number."""
 
     def execute(self, person, number):
-        print('Calling %s %s.' % (person, number))
+        print('Calling {} {}.'.format(person, number))
 
 # This is how it works for one person:
 
@@ -84,7 +82,7 @@ taskflow.engines.run(simple_flow, store=
 # we use `rebind` argument of task constructor.
 def subflow_factory(prefix):
     def pr(what):
-        return '%s-%s' % (prefix, what)
+        return '{}-{}'.format(prefix, what)
 
     return lf.Flow(pr('flow')).add(
         FetchNumberTask(pr('fetch'),
diff -pruN 5.12.0-2/taskflow/examples/resume_from_backend.py 6.0.2-0ubuntu1/taskflow/examples/resume_from_backend.py
--- 5.12.0-2/taskflow/examples/resume_from_backend.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/resume_from_backend.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -62,7 +60,7 @@ import example_utils as eu  # noqa
 
 def print_task_states(flowdetail, msg):
     eu.print_wrapped(msg)
-    print("Flow '%s' state: %s" % (flowdetail.name, flowdetail.state))
+    print("Flow '{}' state: {}".format(flowdetail.name, flowdetail.state))
     # Sort by these so that our test validation doesn't get confused by the
     # order in which the items in the flow detail can be in.
     items = sorted((td.name, td.version, td.state, td.results)
diff -pruN 5.12.0-2/taskflow/examples/resume_many_flows/my_flows.py 6.0.2-0ubuntu1/taskflow/examples/resume_many_flows/my_flows.py
--- 5.12.0-2/taskflow/examples/resume_many_flows/my_flows.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/resume_many_flows/my_flows.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/resume_many_flows/resume_all.py 6.0.2-0ubuntu1/taskflow/examples/resume_many_flows/resume_all.py
--- 5.12.0-2/taskflow/examples/resume_many_flows/resume_all.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/resume_many_flows/resume_all.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -39,7 +37,7 @@ FINISHED_STATES = (states.SUCCESS, state
 
 
 def resume(flowdetail, backend):
-    print('Resuming flow %s %s' % (flowdetail.name, flowdetail.uuid))
+    print('Resuming flow {} {}'.format(flowdetail.name, flowdetail.uuid))
     engine = taskflow.engines.load_from_detail(flow_detail=flowdetail,
                                                backend=backend)
     engine.run()
diff -pruN 5.12.0-2/taskflow/examples/resume_many_flows/run_flow.py 6.0.2-0ubuntu1/taskflow/examples/resume_many_flows/run_flow.py
--- 5.12.0-2/taskflow/examples/resume_many_flows/run_flow.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/resume_many_flows/run_flow.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -38,6 +36,6 @@ import my_flows  # noqa
 with example_utils.get_backend() as backend:
     engine = taskflow.engines.load_from_factory(my_flows.flow_factory,
                                                 backend=backend)
-    print('Running flow %s %s' % (engine.storage.flow_name,
-                                  engine.storage.flow_uuid))
+    print('Running flow {} {}'.format(engine.storage.flow_name,
+                                      engine.storage.flow_uuid))
     engine.run()
diff -pruN 5.12.0-2/taskflow/examples/resume_many_flows.py 6.0.2-0ubuntu1/taskflow/examples/resume_many_flows.py
--- 5.12.0-2/taskflow/examples/resume_many_flows.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/resume_many_flows.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/resume_vm_boot.py 6.0.2-0ubuntu1/taskflow/examples/resume_vm_boot.py
--- 5.12.0-2/taskflow/examples/resume_vm_boot.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/resume_vm_boot.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -63,7 +61,7 @@ class PrintText(task.Task):
     """Just inserts some text print outs in a workflow."""
     def __init__(self, print_what, no_slow=False):
         content_hash = hashlib.md5(print_what.encode('utf-8')).hexdigest()[0:8]
-        super(PrintText, self).__init__(name="Print: %s" % (content_hash))
+        super().__init__(name="Print: %s" % (content_hash))
         self._text = print_what
         self._no_slow = no_slow
 
@@ -78,7 +76,7 @@ class PrintText(task.Task):
 class DefineVMSpec(task.Task):
     """Defines a vm specification to be."""
     def __init__(self, name):
-        super(DefineVMSpec, self).__init__(provides='vm_spec', name=name)
+        super().__init__(provides='vm_spec', name=name)
 
     def execute(self):
         return {
@@ -93,8 +91,7 @@ class DefineVMSpec(task.Task):
 class LocateImages(task.Task):
     """Locates where the vm images are."""
     def __init__(self, name):
-        super(LocateImages, self).__init__(provides='image_locations',
-                                           name=name)
+        super().__init__(provides='image_locations', name=name)
 
     def execute(self, vm_spec):
         image_locations = {}
@@ -107,13 +104,13 @@ class LocateImages(task.Task):
 class DownloadImages(task.Task):
     """Downloads all the vm images."""
     def __init__(self, name):
-        super(DownloadImages, self).__init__(provides='download_paths',
-                                             name=name)
+        super().__init__(provides='download_paths',
+                         name=name)
 
     def execute(self, image_locations):
         for src, loc in image_locations.items():
             with slow_down(1):
-                print("Downloading from %s => %s" % (src, loc))
+                print("Downloading from {} => {}".format(src, loc))
         return sorted(image_locations.values())
 
 
@@ -125,8 +122,8 @@ IPADDR=%s
 ONBOOT=yes"""
 
     def __init__(self, name):
-        super(CreateNetworkTpl, self).__init__(provides='network_settings',
-                                               name=name)
+        super().__init__(provides='network_settings',
+                         name=name)
 
     def execute(self, ips):
         settings = []
@@ -138,7 +135,7 @@ ONBOOT=yes"""
 class AllocateIP(task.Task):
     """Allocates the ips for the given vm."""
     def __init__(self, name):
-        super(AllocateIP, self).__init__(provides='ips', name=name)
+        super().__init__(provides='ips', name=name)
 
     def execute(self, vm_spec):
         ips = []
@@ -152,7 +149,7 @@ class WriteNetworkSettings(task.Task):
     def execute(self, download_paths, network_settings):
         for j, path in enumerate(download_paths):
             with slow_down(1):
-                print("Mounting %s to /tmp/%s" % (path, j))
+                print("Mounting {} to /tmp/{}".format(path, j))
             for i, setting in enumerate(network_settings):
                 filename = ("/tmp/etc/sysconfig/network-scripts/"
                             "ifcfg-eth%s" % (i))
@@ -263,8 +260,8 @@ with eu.get_backend() as backend:
                                            backend=backend, book=book,
                                            engine='parallel',
                                            executor=executor)
-        print("!! Your tracking id is: '%s+%s'" % (book.uuid,
-                                                   engine.storage.flow_uuid))
+        print("!! Your tracking id is: '{}+{}'".format(
+            book.uuid, engine.storage.flow_uuid))
         print("!! Please submit this on later runs for tracking purposes")
     else:
         # Attempt to load from a previously partially completed flow.
diff -pruN 5.12.0-2/taskflow/examples/resume_volume_create.py 6.0.2-0ubuntu1/taskflow/examples/resume_volume_create.py
--- 5.12.0-2/taskflow/examples/resume_volume_create.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/resume_volume_create.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -72,7 +70,7 @@ def find_flow_detail(backend, book_id, f
 class PrintText(task.Task):
     def __init__(self, print_what, no_slow=False):
         content_hash = hashlib.md5(print_what.encode('utf-8')).hexdigest()[0:8]
-        super(PrintText, self).__init__(name="Print: %s" % (content_hash))
+        super().__init__(name="Print: %s" % (content_hash))
         self._text = print_what
         self._no_slow = no_slow
 
@@ -141,8 +139,8 @@ with example_utils.get_backend() as back
         book.add(flow_detail)
         with contextlib.closing(backend.get_connection()) as conn:
             conn.save_logbook(book)
-        print("!! Your tracking id is: '%s+%s'" % (book.uuid,
-                                                   flow_detail.uuid))
+        print("!! Your tracking id is: '{}+{}'".format(book.uuid,
+                                                       flow_detail.uuid))
         print("!! Please submit this on later runs for tracking purposes")
     else:
         flow_detail = find_flow_detail(backend, book_id, flow_id)
diff -pruN 5.12.0-2/taskflow/examples/retry_flow.py 6.0.2-0ubuntu1/taskflow/examples/retry_flow.py
--- 5.12.0-2/taskflow/examples/retry_flow.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/retry_flow.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/reverting_linear.py 6.0.2-0ubuntu1/taskflow/examples/reverting_linear.py
--- 5.12.0-2/taskflow/examples/reverting_linear.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/reverting_linear.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -59,7 +57,7 @@ class CallJoe(task.Task):
 
 class CallSuzzie(task.Task):
     def execute(self, suzzie_number, *args, **kwargs):
-        raise IOError("Suzzie not home right now.")
+        raise OSError("Suzzie not home right now.")
 
 
 # Create your flow and associated tasks (the work to be done).
diff -pruN 5.12.0-2/taskflow/examples/run_by_iter.py 6.0.2-0ubuntu1/taskflow/examples/run_by_iter.py
--- 5.12.0-2/taskflow/examples/run_by_iter.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/run_by_iter.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/run_by_iter_enumerate.py 6.0.2-0ubuntu1/taskflow/examples/run_by_iter_enumerate.py
--- 5.12.0-2/taskflow/examples/run_by_iter_enumerate.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/run_by_iter_enumerate.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -51,4 +49,4 @@ e.compile()
 e.prepare()
 
 for i, st in enumerate(e.run_iter(), 1):
-    print("Transition %s: %s" % (i, st))
+    print("Transition {}: {}".format(i, st))
diff -pruN 5.12.0-2/taskflow/examples/share_engine_thread.py 6.0.2-0ubuntu1/taskflow/examples/share_engine_thread.py
--- 5.12.0-2/taskflow/examples/share_engine_thread.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/share_engine_thread.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -43,11 +41,11 @@ from taskflow.utils import threading_uti
 
 class DelayedTask(task.Task):
     def __init__(self, name):
-        super(DelayedTask, self).__init__(name=name)
+        super().__init__(name=name)
         self._wait_for = random.random()
 
     def execute(self):
-        print("Running '%s' in thread '%s'" % (self.name, tu.get_ident()))
+        print("Running '{}' in thread '{}'".format(self.name, tu.get_ident()))
         time.sleep(self._wait_for)
 
 
diff -pruN 5.12.0-2/taskflow/examples/simple_linear.py 6.0.2-0ubuntu1/taskflow/examples/simple_linear.py
--- 5.12.0-2/taskflow/examples/simple_linear.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/simple_linear.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/simple_linear_listening.py 6.0.2-0ubuntu1/taskflow/examples/simple_linear_listening.py
--- 5.12.0-2/taskflow/examples/simple_linear_listening.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/simple_linear_listening.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -71,7 +69,7 @@ def flow_watch(state, details):
 
 
 def task_watch(state, details):
-    print('Task %s => %s' % (details.get('task_name'), state))
+    print('Task {} => {}'.format(details.get('task_name'), state))
 
 
 # Wrap your functions into a task type that knows how to treat your functions
diff -pruN 5.12.0-2/taskflow/examples/simple_linear_pass.py 6.0.2-0ubuntu1/taskflow/examples/simple_linear_pass.py
--- 5.12.0-2/taskflow/examples/simple_linear_pass.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/simple_linear_pass.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/simple_map_reduce.py 6.0.2-0ubuntu1/taskflow/examples/simple_map_reduce.py
--- 5.12.0-2/taskflow/examples/simple_map_reduce.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/simple_map_reduce.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/switch_graph_flow.py 6.0.2-0ubuntu1/taskflow/examples/switch_graph_flow.py
--- 5.12.0-2/taskflow/examples/switch_graph_flow.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/switch_graph_flow.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -64,7 +62,7 @@ while entries:
     path = entries.pop()
     value = backend.memory[path]
     if value:
-        print("%s -> %s" % (path, value))
+        print("{} -> {}".format(path, value))
     else:
         print("%s" % (path))
     entries.extend(os.path.join(path, child)
diff -pruN 5.12.0-2/taskflow/examples/timing_listener.py 6.0.2-0ubuntu1/taskflow/examples/timing_listener.py
--- 5.12.0-2/taskflow/examples/timing_listener.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/timing_listener.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -45,7 +43,7 @@ from taskflow import task
 
 class VariableTask(task.Task):
     def __init__(self, name):
-        super(VariableTask, self).__init__(name)
+        super().__init__(name)
         self._sleepy_time = random.random()
 
     def execute(self):
diff -pruN 5.12.0-2/taskflow/examples/tox_conductor.py 6.0.2-0ubuntu1/taskflow/examples/tox_conductor.py
--- 5.12.0-2/taskflow/examples/tox_conductor.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/tox_conductor.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -60,7 +58,7 @@ from taskflow.utils import threading_uti
 RUN_TIME = 5
 REVIEW_CREATION_DELAY = 0.5
 SCAN_DELAY = 0.1
-NAME = "%s_%s" % (socket.getfqdn(), os.getpid())
+NAME = "{}_{}".format(socket.getfqdn(), os.getpid())
 
 # This won't really use zookeeper but will use a local version of it using
 # the zake library that mimics an actual zookeeper cluster using threads and
@@ -74,7 +72,7 @@ class RunReview(task.Task):
     # A dummy task that clones the review and runs tox...
 
     def _clone_review(self, review, temp_dir):
-        print("Cloning review '%s' into %s" % (review['id'], temp_dir))
+        print("Cloning review '{}' into {}".format(review['id'], temp_dir))
 
     def _run_tox(self, temp_dir):
         print("Running tox in %s" % temp_dir)
@@ -177,7 +175,7 @@ def generate_reviewer(client, saver, nam
                         'review': review,
                     },
                 }
-                job_name = "%s_%s" % (real_name, review['id'])
+                job_name = "{}_{}".format(real_name, review['id'])
                 print("Posting review '%s'" % review['id'])
                 jb.post(job_name,
                         book=make_save_book(saver, review['id']),
diff -pruN 5.12.0-2/taskflow/examples/wbe_event_sender.py 6.0.2-0ubuntu1/taskflow/examples/wbe_event_sender.py
--- 5.12.0-2/taskflow/examples/wbe_event_sender.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/wbe_event_sender.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/wbe_mandelbrot.py 6.0.2-0ubuntu1/taskflow/examples/wbe_mandelbrot.py
--- 5.12.0-2/taskflow/examples/wbe_mandelbrot.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/wbe_mandelbrot.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/wbe_simple_linear.py 6.0.2-0ubuntu1/taskflow/examples/wbe_simple_linear.py
--- 5.12.0-2/taskflow/examples/wbe_simple_linear.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/wbe_simple_linear.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/examples/wrapped_exception.py 6.0.2-0ubuntu1/taskflow/examples/wrapped_exception.py
--- 5.12.0-2/taskflow/examples/wrapped_exception.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/examples/wrapped_exception.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/exceptions.py 6.0.2-0ubuntu1/taskflow/exceptions.py
--- 5.12.0-2/taskflow/exceptions.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/exceptions.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -67,7 +65,7 @@ class TaskFlowException(Exception):
                   this is not yet implemented/supported natively.
     """
     def __init__(self, message, cause=None):
-        super(TaskFlowException, self).__init__(message)
+        super().__init__(message)
         self._cause = cause
 
     @property
@@ -192,7 +190,7 @@ class MissingDependencies(DependencyFail
         message = self.MESSAGE_TPL % {'who': who, 'requirements': requirements}
         if method:
             message = (self.METHOD_TPL % {'method': method}) + message
-        super(MissingDependencies, self).__init__(message, cause=cause)
+        super().__init__(message, cause=cause)
         self.missing_requirements = requirements
 
 
@@ -228,7 +226,7 @@ class DisallowedAccess(TaskFlowException
     """Raised when storage access is not possible due to state limitations."""
 
     def __init__(self, message, cause=None, state=None):
-        super(DisallowedAccess, self).__init__(message, cause=cause)
+        super().__init__(message, cause=cause)
         self.state = state
 
 
@@ -261,7 +259,7 @@ class WrappedFailure(Exception):
     """
 
     def __init__(self, causes):
-        super(WrappedFailure, self).__init__()
+        super().__init__()
         self._causes = []
         for cause in causes:
             if cause.check(type(self)) and cause.exception:
@@ -306,8 +304,8 @@ class WrappedFailure(Exception):
 
     def __str__(self):
         buf = io.StringIO()
-        buf.write(u'WrappedFailure: [')
+        buf.write('WrappedFailure: [')
         causes_gen = (str(cause) for cause in self._causes)
-        buf.write(u", ".join(causes_gen))
-        buf.write(u']')
+        buf.write(", ".join(causes_gen))
+        buf.write(']')
         return buf.getvalue()
diff -pruN 5.12.0-2/taskflow/flow.py 6.0.2-0ubuntu1/taskflow/flow.py
--- 5.12.0-2/taskflow/flow.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/flow.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -42,7 +40,7 @@ _CHOP_PAT_LEN = len(_CHOP_PAT)
 LINK_DECIDER_DEPTH = 'decider_depth'
 
 
-class Flow(object, metaclass=abc.ABCMeta):
+class Flow(metaclass=abc.ABCMeta):
     """The base abstract class of all flow implementations.
 
     A flow is a structure that defines relationships between tasks. You can
diff -pruN 5.12.0-2/taskflow/formatters.py 6.0.2-0ubuntu1/taskflow/formatters.py
--- 5.12.0-2/taskflow/formatters.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/formatters.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -14,6 +12,7 @@
 #    License for the specific language governing permissions and limitations
 #    under the License.
 
+import copy
 import functools
 
 from taskflow.engines.action_engine import compiler
@@ -58,17 +57,29 @@ def _fetch_predecessor_tree(graph, atom)
     return root
 
 
-class FailureFormatter(object):
+class FailureFormatter:
     """Formats a failure and connects it to associated atoms & engine."""
 
     _BUILDERS = {
         states.EXECUTE: (_fetch_predecessor_tree, 'predecessors'),
     }
 
-    def __init__(self, engine, hide_inputs_outputs_of=()):
+    def __init__(self, engine, hide_inputs_outputs_of=(),
+                 mask_inputs_keys=(), mask_outputs_keys=()):
         self._hide_inputs_outputs_of = hide_inputs_outputs_of
+        self._mask_inputs_keys = mask_inputs_keys
+        self._mask_outputs_keys = mask_outputs_keys
         self._engine = engine
 
+    def _mask_keys(self, data, mask_keys):
+        if not data or not isinstance(data, dict):
+            return data
+        result = copy.deepcopy(data)
+        for k in mask_keys:
+            if k in result:
+                result[k] = '***'
+        return result
+
     def _format_node(self, storage, cache, node):
         """Formats a single tree node into a string version."""
         if node.metadata['kind'] == compiler.FLOW:
@@ -100,14 +111,16 @@ class FailureFormatter(object):
                                                        atom_name,
                                                        fetch_mapped_args)
                 if requires_found:
-                    atom_attrs['requires'] = requires
+                    atom_attrs['requires'] = self._mask_keys(
+                        requires, self._mask_inputs_keys)
                 provides, provides_found = _cached_get(
                     cache, 'provides', atom_name,
                     storage.get_execute_result, atom_name)
                 if provides_found:
-                    atom_attrs['provides'] = provides
+                    atom_attrs['provides'] = self._mask_keys(
+                        provides, self._mask_outputs_keys)
             if atom_attrs:
-                return "Atom '%s' %s" % (atom_name, atom_attrs)
+                return "Atom '{}' {}".format(atom_name, atom_attrs)
             else:
                 return "Atom '%s'" % (atom_name)
         else:
@@ -156,7 +169,8 @@ class FailureFormatter(object):
             builder, kind = self._BUILDERS[atom_intention]
             rooted_tree = builder(graph, atom)
             child_count = rooted_tree.child_count(only_direct=False)
-            buff.write_nl('%s %s (most recent first):' % (child_count, kind))
+            buff.write_nl(
+                '{} {} (most recent first):'.format(child_count, kind))
             formatter = functools.partial(self._format_node, storage, cache)
             direct_child_count = rooted_tree.child_count(only_direct=True)
             for i, child in enumerate(rooted_tree, 1):
diff -pruN 5.12.0-2/taskflow/jobs/backends/__init__.py 6.0.2-0ubuntu1/taskflow/jobs/backends/__init__.py
--- 5.12.0-2/taskflow/jobs/backends/__init__.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/jobs/backends/__init__.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/jobs/backends/impl_redis.py 6.0.2-0ubuntu1/taskflow/jobs/backends/impl_redis.py
--- 5.12.0-2/taskflow/jobs/backends/impl_redis.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/jobs/backends/impl_redis.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -17,7 +15,6 @@
 import contextlib
 import datetime
 import functools
-import re
 import string
 import threading
 import time
@@ -26,6 +23,7 @@ import fasteners
 import msgpack
 from oslo_serialization import msgpackutils
 from oslo_utils import excutils
+from oslo_utils import netutils
 from oslo_utils import strutils
 from oslo_utils import timeutils
 from oslo_utils import uuidutils
@@ -69,10 +67,10 @@ class RedisJob(base.Job):
                  created_on=None, backend=None,
                  book=None, book_data=None,
                  priority=base.JobPriority.NORMAL):
-        super(RedisJob, self).__init__(board, name,
-                                       uuid=uuid, details=details,
-                                       backend=backend,
-                                       book=book, book_data=book_data)
+        super().__init__(board, name,
+                         uuid=uuid, details=details,
+                         backend=backend,
+                         book=book, book_data=book_data)
         self._created_on = created_on
         self._client = board._client
         self._redis_version = board._redis_version
@@ -561,15 +559,17 @@ return cmsgpack.pack(result)
 
     @classmethod
     def _parse_sentinel(cls, sentinel):
-        # IPv6 (eg. [::1]:6379 )
-        match = re.search(r'^\[(\S+)\]:(\d+)$', sentinel)
-        if match:
-            return (match[1], int(match[2]))
-        # IPv4 or hostname (eg. 127.0.0.1:6379 or localhost:6379)
-        match = re.search(r'^(\S+):(\d+)$', sentinel)
-        if match:
-            return (match[1], int(match[2]))
-        raise ValueError('Malformed sentinel server format')
+        host, port = netutils.parse_host_port(sentinel)
+        if host is None or port is None:
+            raise ValueError('Malformed sentinel server format')
+        return (host, port)
+
+    @classmethod
+    def _filter_ssl_options(cls, opts):
+        if not opts.get('ssl', False):
+            return {k: v for (k, v) in opts.items()
+                    if not k.startswith('ssl_')}
+        return opts
 
     @classmethod
     def _make_client(cls, conf):
@@ -584,8 +584,12 @@ return cmsgpack.pack(result)
             sentinels = [(client_conf.pop('host'), client_conf.pop('port'))]
             for fallback in conf.get('sentinel_fallbacks', []):
                 sentinels.append(cls._parse_sentinel(fallback))
+            client_conf = cls._filter_ssl_options(client_conf)
+            sentinel_kwargs = conf.get('sentinel_kwargs')
+            if sentinel_kwargs is not None:
+                sentinel_kwargs = cls._filter_ssl_options(sentinel_kwargs)
             s = sentinel.Sentinel(sentinels,
-                                  sentinel_kwargs=conf.get('sentinel_kwargs'),
+                                  sentinel_kwargs=sentinel_kwargs,
                                   **client_conf)
             return s.master_for(conf['sentinel'])
         else:
@@ -593,7 +597,7 @@ return cmsgpack.pack(result)
 
     def __init__(self, name, conf,
                  client=None, persistence=None):
-        super(RedisJobBoard, self).__init__(name, conf)
+        super().__init__(name, conf)
         self._closed = True
         if client is not None:
             self._client = client
diff -pruN 5.12.0-2/taskflow/jobs/backends/impl_zookeeper.py 6.0.2-0ubuntu1/taskflow/jobs/backends/impl_zookeeper.py
--- 5.12.0-2/taskflow/jobs/backends/impl_zookeeper.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/jobs/backends/impl_zookeeper.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -51,10 +49,10 @@ class ZookeeperJob(base.Job):
                  uuid=None, details=None, book=None, book_data=None,
                  created_on=None, backend=None,
                  priority=base.JobPriority.NORMAL):
-        super(ZookeeperJob, self).__init__(board, name,
-                                           uuid=uuid, details=details,
-                                           backend=backend,
-                                           book=book, book_data=book_data)
+        super().__init__(board, name,
+                         uuid=uuid, details=details,
+                         backend=backend,
+                         book=book, book_data=book_data)
         self._client = client
         self._path = k_paths.normpath(path)
         self._lock_path = self._path + board.LOCK_POSTFIX
@@ -281,7 +279,7 @@ class ZookeeperJobBoard(base.NotifyingJo
 
     def __init__(self, name, conf,
                  client=None, persistence=None, emit_notifications=True):
-        super(ZookeeperJobBoard, self).__init__(name, conf)
+        super().__init__(name, conf)
         if client is not None:
             self._client = client
             self._owned = False
@@ -552,7 +550,8 @@ class ZookeeperJobBoard(base.NotifyingJo
             except Exception:
                 owner = None
             if owner:
-                message = "Job %s already claimed by '%s'" % (job.uuid, owner)
+                message = "Job {} already claimed by '{}'".format(
+                    job.uuid, owner)
             else:
                 message = "Job %s already claimed" % (job.uuid)
             excp.raise_with_cause(excp.UnclaimableJob,
diff -pruN 5.12.0-2/taskflow/jobs/base.py 6.0.2-0ubuntu1/taskflow/jobs/base.py
--- 5.12.0-2/taskflow/jobs/base.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/jobs/base.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Rackspace Hosting Inc. All Rights Reserved.
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
@@ -110,7 +108,7 @@ class JobPriority(enum.Enum):
                 return tuple(values)
 
 
-class Job(object, metaclass=abc.ABCMeta):
+class Job(metaclass=abc.ABCMeta):
     """A abstraction that represents a named and trackable unit of work.
 
     A job connects a logbook, a owner, a priority, last modified and created
@@ -277,12 +275,12 @@ class Job(object, metaclass=abc.ABCMeta)
     def __str__(self):
         """Pretty formats the job into something *more* meaningful."""
         cls_name = type(self).__name__
-        return "%s: %s (priority=%s, uuid=%s, details=%s)" % (
+        return "{}: {} (priority={}, uuid={}, details={})".format(
             cls_name, self.name, self.priority,
             self.uuid, self.details)
 
 
-class JobBoardIterator(object):
+class JobBoardIterator:
     """Iterator over a jobboard that iterates over potential jobs.
 
     It provides the following attributes:
@@ -355,7 +353,7 @@ class JobBoardIterator(object):
             return job
 
 
-class JobBoard(object, metaclass=abc.ABCMeta):
+class JobBoard(metaclass=abc.ABCMeta):
     """A place where jobs can be posted, reposted, claimed and transferred.
 
     There can be multiple implementations of this job board, depending on the
@@ -565,7 +563,7 @@ class NotifyingJobBoard(JobBoard):
     registered are thread safe (and block for as little time as possible).
     """
     def __init__(self, name, conf):
-        super(NotifyingJobBoard, self).__init__(name, conf)
+        super().__init__(name, conf)
         self.notifier = notifier.Notifier()
 
 
diff -pruN 5.12.0-2/taskflow/listeners/base.py 6.0.2-0ubuntu1/taskflow/listeners/base.py
--- 5.12.0-2/taskflow/listeners/base.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/listeners/base.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -77,7 +75,7 @@ def _bulk_register(watch_states, notifie
         return registered
 
 
-class Listener(object):
+class Listener:
     """Base class for listeners.
 
     A listener can be attached to an engine to do various actions on flow and
diff -pruN 5.12.0-2/taskflow/listeners/capturing.py 6.0.2-0ubuntu1/taskflow/listeners/capturing.py
--- 5.12.0-2/taskflow/listeners/capturing.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/listeners/capturing.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -65,7 +63,7 @@ class CaptureListener(base.Listener):
                  # Provide your own list (or previous list) to accumulate
                  # into...
                  values=None):
-        super(CaptureListener, self).__init__(
+        super().__init__(
             engine,
             task_listen_for=task_listen_for,
             flow_listen_for=flow_listen_for,
diff -pruN 5.12.0-2/taskflow/listeners/claims.py 6.0.2-0ubuntu1/taskflow/listeners/claims.py
--- 5.12.0-2/taskflow/listeners/claims.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/listeners/claims.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -49,7 +47,7 @@ class CheckingClaimListener(base.Listene
     """
 
     def __init__(self, engine, job, board, owner, on_job_loss=None):
-        super(CheckingClaimListener, self).__init__(engine)
+        super().__init__(engine)
         self._job = job
         self._board = board
         self._owner = owner
diff -pruN 5.12.0-2/taskflow/listeners/logging.py 6.0.2-0ubuntu1/taskflow/listeners/logging.py
--- 5.12.0-2/taskflow/listeners/logging.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/listeners/logging.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -46,7 +44,7 @@ class LoggingListener(base.DumpingListen
                  retry_listen_for=base.DEFAULT_LISTEN_FOR,
                  log=None,
                  level=logging.DEBUG):
-        super(LoggingListener, self).__init__(
+        super().__init__(
             engine, task_listen_for=task_listen_for,
             flow_listen_for=flow_listen_for, retry_listen_for=retry_listen_for)
         self._logger = misc.pick_first_not_none(log, self._LOGGER, LOG)
@@ -110,8 +108,10 @@ class DynamicLoggingListener(base.Listen
                  retry_listen_for=base.DEFAULT_LISTEN_FOR,
                  log=None, failure_level=logging.WARNING,
                  level=logging.DEBUG, hide_inputs_outputs_of=(),
-                 fail_formatter=None):
-        super(DynamicLoggingListener, self).__init__(
+                 fail_formatter=None,
+                 mask_inputs_keys=(),
+                 mask_outputs_keys=()):
+        super().__init__(
             engine, task_listen_for=task_listen_for,
             flow_listen_for=flow_listen_for, retry_listen_for=retry_listen_for)
         self._failure_level = failure_level
@@ -127,11 +127,15 @@ class DynamicLoggingListener(base.Listen
             states.REVERTED: self._failure_level,
         }
         self._hide_inputs_outputs_of = frozenset(hide_inputs_outputs_of)
+        self._mask_inputs_keys = frozenset(mask_inputs_keys)
+        self._mask_outputs_keys = frozenset(mask_outputs_keys)
         self._logger = misc.pick_first_not_none(log, self._LOGGER, LOG)
         if fail_formatter is None:
             self._fail_formatter = formatters.FailureFormatter(
                 self._engine,
-                hide_inputs_outputs_of=self._hide_inputs_outputs_of)
+                hide_inputs_outputs_of=self._hide_inputs_outputs_of,
+                mask_inputs_keys=self._mask_inputs_keys,
+                mask_outputs_keys=self._mask_outputs_keys)
         else:
             self._fail_formatter = fail_formatter
 
diff -pruN 5.12.0-2/taskflow/listeners/printing.py 6.0.2-0ubuntu1/taskflow/listeners/printing.py
--- 5.12.0-2/taskflow/listeners/printing.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/listeners/printing.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -27,7 +25,7 @@ class PrintingListener(base.DumpingListe
                  flow_listen_for=base.DEFAULT_LISTEN_FOR,
                  retry_listen_for=base.DEFAULT_LISTEN_FOR,
                  stderr=False):
-        super(PrintingListener, self).__init__(
+        super().__init__(
             engine, task_listen_for=task_listen_for,
             flow_listen_for=flow_listen_for, retry_listen_for=retry_listen_for)
         if stderr:
diff -pruN 5.12.0-2/taskflow/listeners/timing.py 6.0.2-0ubuntu1/taskflow/listeners/timing.py
--- 5.12.0-2/taskflow/listeners/timing.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/listeners/timing.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -26,7 +24,7 @@ from taskflow import logging
 from taskflow import states
 
 STARTING_STATES = frozenset((states.RUNNING, states.REVERTING))
-FINISHED_STATES = frozenset((base.FINISH_STATES + (states.REVERTED,)))
+FINISHED_STATES = frozenset(base.FINISH_STATES + (states.REVERTED,))
 WATCH_STATES = frozenset(itertools.chain(FINISHED_STATES, STARTING_STATES,
                                          [states.PENDING]))
 
@@ -48,13 +46,13 @@ class DurationListener(base.Listener):
     to task metadata with key ``'duration'``.
     """
     def __init__(self, engine):
-        super(DurationListener, self).__init__(engine,
-                                               task_listen_for=WATCH_STATES,
-                                               flow_listen_for=WATCH_STATES)
+        super().__init__(engine,
+                         task_listen_for=WATCH_STATES,
+                         flow_listen_for=WATCH_STATES)
         self._timers = {co.TASK: {}, co.FLOW: {}}
 
     def deregister(self):
-        super(DurationListener, self).deregister()
+        super().deregister()
         # There should be none that still exist at deregistering time, so log a
         # warning if there were any that somehow still got left behind...
         for item_type, timers in self._timers.items():
@@ -105,23 +103,22 @@ class PrintingDurationListener(DurationL
     """Listener that prints the duration as well as recording it."""
 
     def __init__(self, engine, printer=None):
-        super(PrintingDurationListener, self).__init__(engine)
+        super().__init__(engine)
         if printer is None:
             self._printer = _printer
         else:
             self._printer = printer
 
     def _record_ending(self, timer, item_type, item_name, state):
-        super(PrintingDurationListener, self)._record_ending(
+        super()._record_ending(
             timer, item_type, item_name, state)
         self._printer("It took %s '%s' %0.2f seconds to"
                       " finish." % (item_type, item_name, timer.elapsed()))
 
     def _receiver(self, item_type, item_name, state):
-        super(PrintingDurationListener, self)._receiver(item_type,
-                                                        item_name, state)
+        super()._receiver(item_type, item_name, state)
         if state in STARTING_STATES:
-            self._printer("'%s' %s started." % (item_name, item_type))
+            self._printer("'{}' {} started.".format(item_name, item_type))
 
 
 class EventTimeListener(base.Listener):
@@ -139,7 +136,7 @@ class EventTimeListener(base.Listener):
                  task_listen_for=base.DEFAULT_LISTEN_FOR,
                  flow_listen_for=base.DEFAULT_LISTEN_FOR,
                  retry_listen_for=base.DEFAULT_LISTEN_FOR):
-        super(EventTimeListener, self).__init__(
+        super().__init__(
             engine, task_listen_for=task_listen_for,
             flow_listen_for=flow_listen_for, retry_listen_for=retry_listen_for)
 
diff -pruN 5.12.0-2/taskflow/logging.py 6.0.2-0ubuntu1/taskflow/logging.py
--- 5.12.0-2/taskflow/logging.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/logging.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/patterns/graph_flow.py 6.0.2-0ubuntu1/taskflow/patterns/graph_flow.py
--- 5.12.0-2/taskflow/patterns/graph_flow.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/patterns/graph_flow.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -66,7 +64,7 @@ class Flow(flow.Flow):
     """
 
     def __init__(self, name, retry=None):
-        super(Flow, self).__init__(name, retry)
+        super().__init__(name, retry)
         self._graph = gr.DiGraph(name=name)
         self._graph.freeze()
 
@@ -332,7 +330,7 @@ class TargetedFlow(Flow):
     """
 
     def __init__(self, *args, **kwargs):
-        super(TargetedFlow, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
         self._subgraph = None
         self._target = None
 
diff -pruN 5.12.0-2/taskflow/patterns/linear_flow.py 6.0.2-0ubuntu1/taskflow/patterns/linear_flow.py
--- 5.12.0-2/taskflow/patterns/linear_flow.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/patterns/linear_flow.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -36,7 +34,7 @@ class Flow(flow.Flow):
     """
 
     def __init__(self, name, retry=None):
-        super(Flow, self).__init__(name, retry)
+        super().__init__(name, retry)
         self._graph = gr.OrderedDiGraph(name=name)
         self._last_item = self._no_last_item
 
@@ -55,8 +53,7 @@ class Flow(flow.Flow):
         return len(self._graph)
 
     def __iter__(self):
-        for item in self._graph.nodes:
-            yield item
+        yield from self._graph.nodes
 
     @property
     def requires(self):
@@ -71,9 +68,7 @@ class Flow(flow.Flow):
         return frozenset(requires)
 
     def iter_nodes(self):
-        for (n, n_data) in self._graph.nodes(data=True):
-            yield (n, n_data)
+        yield from self._graph.nodes(data=True)
 
     def iter_links(self):
-        for (u, v, e_data) in self._graph.edges(data=True):
-            yield (u, v, e_data)
+        yield from self._graph.edges(data=True)
diff -pruN 5.12.0-2/taskflow/patterns/unordered_flow.py 6.0.2-0ubuntu1/taskflow/patterns/unordered_flow.py
--- 5.12.0-2/taskflow/patterns/unordered_flow.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/patterns/unordered_flow.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -26,7 +24,7 @@ class Flow(flow.Flow):
     """
 
     def __init__(self, name, retry=None):
-        super(Flow, self).__init__(name, retry)
+        super().__init__(name, retry)
         self._graph = gr.Graph(name=name)
 
     def add(self, *items):
@@ -40,16 +38,13 @@ class Flow(flow.Flow):
         return len(self._graph)
 
     def __iter__(self):
-        for item in self._graph:
-            yield item
+        yield from self._graph
 
     def iter_links(self):
-        for (u, v, e_data) in self._graph.edges(data=True):
-            yield (u, v, e_data)
+        yield from self._graph.edges(data=True)
 
     def iter_nodes(self):
-        for n, n_data in self._graph.nodes(data=True):
-            yield (n, n_data)
+        yield from self._graph.nodes(data=True)
 
     @property
     def requires(self):
diff -pruN 5.12.0-2/taskflow/persistence/backends/__init__.py 6.0.2-0ubuntu1/taskflow/persistence/backends/__init__.py
--- 5.12.0-2/taskflow/persistence/backends/__init__.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/persistence/backends/__init__.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Rackspace Hosting Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -64,7 +62,7 @@ def fetch(conf, namespace=BACKEND_NAMESP
                                    invoke_kwds=kwargs)
         return mgr.driver
     except RuntimeError as e:
-        raise exc.NotFound("Could not find backend %s: %s" % (backend, e))
+        raise exc.NotFound("Could not find backend {}: {}".format(backend, e))
 
 
 @contextlib.contextmanager
diff -pruN 5.12.0-2/taskflow/persistence/backends/impl_dir.py 6.0.2-0ubuntu1/taskflow/persistence/backends/impl_dir.py
--- 5.12.0-2/taskflow/persistence/backends/impl_dir.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/persistence/backends/impl_dir.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #    Copyright (C) 2013 Rackspace Hosting All Rights Reserved.
 #
@@ -17,7 +15,6 @@
 
 import contextlib
 import errno
-import io
 import os
 import shutil
 
@@ -69,7 +66,7 @@ class DirBackend(path_based.PathBasedBac
     """
 
     def __init__(self, conf):
-        super(DirBackend, self).__init__(conf)
+        super().__init__(conf)
         max_cache_size = self._conf.get('max_cache_size')
         if max_cache_size is not None:
             max_cache_size = int(max_cache_size)
@@ -100,7 +97,7 @@ class Connection(path_based.PathBasedCon
         mtime = os.path.getmtime(filename)
         cache_info = self.backend.file_cache.setdefault(filename, {})
         if not cache_info or mtime > cache_info.get('mtime', 0):
-            with io.open(filename, 'r', encoding=self.backend.encoding) as fp:
+            with open(filename, encoding=self.backend.encoding) as fp:
                 cache_info['data'] = fp.read()
                 cache_info['mtime'] = mtime
         return cache_info['data']
@@ -108,7 +105,7 @@ class Connection(path_based.PathBasedCon
     def _write_to(self, filename, contents):
         contents = misc.binary_encode(contents,
                                       encoding=self.backend.encoding)
-        with io.open(filename, 'wb') as fp:
+        with open(filename, 'wb') as fp:
             fp.write(contents)
         self.backend.file_cache.pop(filename, None)
 
diff -pruN 5.12.0-2/taskflow/persistence/backends/impl_memory.py 6.0.2-0ubuntu1/taskflow/persistence/backends/impl_memory.py
--- 5.12.0-2/taskflow/persistence/backends/impl_memory.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/persistence/backends/impl_memory.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #    Copyright (C) 2013 Rackspace Hosting All Rights Reserved.
 #
@@ -31,10 +29,10 @@ class FakeInode(tree.Node):
     """A in-memory filesystem inode-like object."""
 
     def __init__(self, item, path, value=None):
-        super(FakeInode, self).__init__(item, path=path, value=value)
+        super().__init__(item, path=path, value=value)
 
 
-class FakeFilesystem(object):
+class FakeFilesystem:
     """An in-memory filesystem-like structure.
 
     This filesystem uses posix style paths **only** so users must be careful
@@ -249,8 +247,7 @@ class FakeFilesystem(object):
             parts = path.split(pp.sep)[1:]
         if include_root:
             parts.insert(0, self._root.item)
-        for piece in parts:
-            yield piece
+        yield from parts
 
     def __delitem__(self, path):
         self.delete(path, recursive=True)
@@ -258,7 +255,7 @@ class FakeFilesystem(object):
     @staticmethod
     def _stringify_node(node):
         if 'target' in node.metadata:
-            return "%s (link to %s)" % (node.item, node.metadata['target'])
+            return "{} (link to {})".format(node.item, node.metadata['target'])
         else:
             return str(node.item)
 
@@ -309,7 +306,7 @@ class MemoryBackend(path_based.PathBased
     DEFAULT_PATH = pp.sep
 
     def __init__(self, conf=None):
-        super(MemoryBackend, self).__init__(conf)
+        super().__init__(conf)
         self.memory = FakeFilesystem(deep_copy=self._conf.get('deep_copy',
                                                               True))
         self.lock = fasteners.ReaderWriterLock()
@@ -323,7 +320,7 @@ class MemoryBackend(path_based.PathBased
 
 class Connection(path_based.PathBasedConnection):
     def __init__(self, backend):
-        super(Connection, self).__init__(backend)
+        super().__init__(backend)
         self.upgrade()
 
     @contextlib.contextmanager
diff -pruN 5.12.0-2/taskflow/persistence/backends/impl_sqlalchemy.py 6.0.2-0ubuntu1/taskflow/persistence/backends/impl_sqlalchemy.py
--- 5.12.0-2/taskflow/persistence/backends/impl_sqlalchemy.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/persistence/backends/impl_sqlalchemy.py	2025-08-25 12:49:32.000000000 +0000
@@ -179,7 +179,7 @@ def _ping_listener(dbapi_conn, connectio
             raise
 
 
-class _Alchemist(object):
+class _Alchemist:
     """Internal <-> external row <-> objects + other helper functions.
 
     NOTE(harlowja): for internal usage only.
@@ -235,7 +235,7 @@ class SQLAlchemyBackend(base.Backend):
         }
     """
     def __init__(self, conf, engine=None):
-        super(SQLAlchemyBackend, self).__init__(conf)
+        super().__init__(conf)
         if engine is not None:
             self._engine = engine
             self._owns_engine = False
@@ -581,8 +581,7 @@ class Connection(base.Connection):
             exc.raise_with_cause(exc.StorageFailure,
                                  "Failed getting flow details in"
                                  " logbook '%s'" % book_uuid)
-        for flow_details in gathered:
-            yield flow_details
+        yield from gathered
 
     def get_flow_details(self, fd_uuid, lazy=False):
         try:
@@ -631,8 +630,7 @@ class Connection(base.Connection):
             exc.raise_with_cause(exc.StorageFailure,
                                  "Failed getting atom details in flow"
                                  " detail '%s'" % fd_uuid)
-        for atom_details in gathered:
-            yield atom_details
+        yield from gathered
 
     def close(self):
         pass
diff -pruN 5.12.0-2/taskflow/persistence/backends/impl_zookeeper.py 6.0.2-0ubuntu1/taskflow/persistence/backends/impl_zookeeper.py
--- 5.12.0-2/taskflow/persistence/backends/impl_zookeeper.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/persistence/backends/impl_zookeeper.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 AT&T Labs All Rights Reserved.
 #    Copyright (C) 2015 Rackspace Hosting All Rights Reserved.
 #
@@ -56,7 +54,7 @@ class ZkBackend(path_based.PathBasedBack
     DEFAULT_PATH = '/taskflow'
 
     def __init__(self, conf, client=None):
-        super(ZkBackend, self).__init__(conf)
+        super().__init__(conf)
         if not paths.isabs(self._path):
             raise ValueError("Zookeeper path must be absolute")
         if client is not None:
@@ -87,7 +85,7 @@ class ZkBackend(path_based.PathBasedBack
 
 class ZkConnection(path_based.PathBasedConnection):
     def __init__(self, backend, client, conf):
-        super(ZkConnection, self).__init__(backend)
+        super().__init__(backend)
         self._conf = conf
         self._client = client
         with self._exc_wrapper():
diff -pruN 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/env.py 6.0.2-0ubuntu1/taskflow/persistence/backends/sqlalchemy/alembic/env.py
--- 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/env.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/persistence/backends/sqlalchemy/alembic/env.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/00af93df9d77_add_unique_into_all_indexes.py 6.0.2-0ubuntu1/taskflow/persistence/backends/sqlalchemy/alembic/versions/00af93df9d77_add_unique_into_all_indexes.py
--- 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/00af93df9d77_add_unique_into_all_indexes.py	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/persistence/backends/sqlalchemy/alembic/versions/00af93df9d77_add_unique_into_all_indexes.py	2025-08-25 12:49:32.000000000 +0000
@@ -0,0 +1,74 @@
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+"""Add unique into all indexes
+
+Revision ID: 00af93df9d77
+Revises: 40fc8c914bd2
+Create Date: 2025-02-28 15:44:37.066720
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '00af93df9d77'
+down_revision = '40fc8c914bd2'
+
+from alembic import op
+
+
+def upgrade():
+    bind = op.get_bind()
+    engine = bind.engine
+    if engine.name == 'mysql':
+        with op.batch_alter_table("logbooks") as batch_op:
+            batch_op.drop_index("logbook_uuid_idx")
+            batch_op.create_index(
+                index_name="logbook_uuid_idx",
+                columns=['uuid'],
+                unique=True)
+
+        with op.batch_alter_table("flowdetails") as batch_op:
+            batch_op.drop_index("flowdetails_uuid_idx")
+            batch_op.create_index(
+                index_name="flowdetails_uuid_idx",
+                columns=['uuid'],
+                unique=True)
+
+        with op.batch_alter_table("atomdetails") as batch_op:
+            batch_op.drop_index("taskdetails_uuid_idx")
+            batch_op.create_index(
+                index_name="taskdetails_uuid_idx",
+                columns=['uuid'],
+                unique=True)
+
+
+def downgrade():
+    bind = op.get_bind()
+    engine = bind.engine
+    if engine.name == 'mysql':
+        with op.batch_alter_table("logbooks") as batch_op:
+            batch_op.drop_index("logbook_uuid_idx")
+            batch_op.create_index(
+                index_name="logbook_uuid_idx",
+                columns=['uuid'])
+
+        with op.batch_alter_table("flowdetails") as batch_op:
+            batch_op.drop_index("flowdetails_uuid_idx")
+            batch_op.create_index(
+                index_name="flowdetails_uuid_idx",
+                columns=['uuid'])
+
+        with op.batch_alter_table("atomdetails") as batch_op:
+            batch_op.drop_index("taskdetails_uuid_idx")
+            batch_op.create_index(
+                index_name="taskdetails_uuid_idx",
+                columns=['uuid'])
diff -pruN 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/14b227d79a87_add_intention_column.py 6.0.2-0ubuntu1/taskflow/persistence/backends/sqlalchemy/alembic/versions/14b227d79a87_add_intention_column.py
--- 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/14b227d79a87_add_intention_column.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/persistence/backends/sqlalchemy/alembic/versions/14b227d79a87_add_intention_column.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/1c783c0c2875_replace_exception_an.py 6.0.2-0ubuntu1/taskflow/persistence/backends/sqlalchemy/alembic/versions/1c783c0c2875_replace_exception_an.py
--- 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/1c783c0c2875_replace_exception_an.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/persistence/backends/sqlalchemy/alembic/versions/1c783c0c2875_replace_exception_an.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/1cea328f0f65_initial_logbook_deta.py 6.0.2-0ubuntu1/taskflow/persistence/backends/sqlalchemy/alembic/versions/1cea328f0f65_initial_logbook_deta.py
--- 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/1cea328f0f65_initial_logbook_deta.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/persistence/backends/sqlalchemy/alembic/versions/1cea328f0f65_initial_logbook_deta.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/2ad4984f2864_switch_postgres_to_json_native.py 6.0.2-0ubuntu1/taskflow/persistence/backends/sqlalchemy/alembic/versions/2ad4984f2864_switch_postgres_to_json_native.py
--- 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/2ad4984f2864_switch_postgres_to_json_native.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/persistence/backends/sqlalchemy/alembic/versions/2ad4984f2864_switch_postgres_to_json_native.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/3162c0f3f8e4_add_revert_results_and_revert_failure_.py 6.0.2-0ubuntu1/taskflow/persistence/backends/sqlalchemy/alembic/versions/3162c0f3f8e4_add_revert_results_and_revert_failure_.py
--- 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/3162c0f3f8e4_add_revert_results_and_revert_failure_.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/persistence/backends/sqlalchemy/alembic/versions/3162c0f3f8e4_add_revert_results_and_revert_failure_.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/589dccdf2b6e_rename_taskdetails_to_atomdetails.py 6.0.2-0ubuntu1/taskflow/persistence/backends/sqlalchemy/alembic/versions/589dccdf2b6e_rename_taskdetails_to_atomdetails.py
--- 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/589dccdf2b6e_rename_taskdetails_to_atomdetails.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/persistence/backends/sqlalchemy/alembic/versions/589dccdf2b6e_rename_taskdetails_to_atomdetails.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/84d6e888850_add_task_detail_type.py 6.0.2-0ubuntu1/taskflow/persistence/backends/sqlalchemy/alembic/versions/84d6e888850_add_task_detail_type.py
--- 5.12.0-2/taskflow/persistence/backends/sqlalchemy/alembic/versions/84d6e888850_add_task_detail_type.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/persistence/backends/sqlalchemy/alembic/versions/84d6e888850_add_task_detail_type.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/persistence/backends/sqlalchemy/tables.py 6.0.2-0ubuntu1/taskflow/persistence/backends/sqlalchemy/tables.py
--- 5.12.0-2/taskflow/persistence/backends/sqlalchemy/tables.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/persistence/backends/sqlalchemy/tables.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/persistence/base.py 6.0.2-0ubuntu1/taskflow/persistence/base.py
--- 5.12.0-2/taskflow/persistence/base.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/persistence/base.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Rackspace Hosting Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -19,7 +17,7 @@ import abc
 from taskflow.persistence import models
 
 
-class Backend(object, metaclass=abc.ABCMeta):
+class Backend(metaclass=abc.ABCMeta):
     """Base class for persistence backends."""
 
     def __init__(self, conf):
@@ -39,7 +37,7 @@ class Backend(object, metaclass=abc.ABCM
         """Closes any resources this backend has open."""
 
 
-class Connection(object, metaclass=abc.ABCMeta):
+class Connection(metaclass=abc.ABCMeta):
     """Base class for backend connections."""
 
     @property
diff -pruN 5.12.0-2/taskflow/persistence/models.py 6.0.2-0ubuntu1/taskflow/persistence/models.py
--- 5.12.0-2/taskflow/persistence/models.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/persistence/models.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #    Copyright (C) 2013 Rackspace Hosting All Rights Reserved.
 #
@@ -43,7 +41,7 @@ def _format_meta(metadata, indent):
         # in percent format.
         if k == 'progress' and isinstance(v, misc.NUMERIC_TYPES):
             v = "%0.2f%%" % (v * 100.0)
-        lines.append("%s+ %s = %s" % (" " * (indent + 2), k, v))
+        lines.append("{}+ {} = {}".format(" " * (indent + 2), k, v))
     return lines
 
 
@@ -55,8 +53,8 @@ def _format_shared(obj, indent):
     for attr_name in ("uuid", "state"):
         if not hasattr(obj, attr_name):
             continue
-        lines.append("%s- %s = %s" % (" " * indent, attr_name,
-                                      getattr(obj, attr_name)))
+        lines.append("{}- {} = {}".format(" " * indent, attr_name,
+                                          getattr(obj, attr_name)))
     return lines
 
 
@@ -98,7 +96,7 @@ def _fix_meta(data):
     return meta
 
 
-class LogBook(object):
+class LogBook:
     """A collection of flow details and associated metadata.
 
     Typically this class contains a collection of flow detail entries
@@ -143,7 +141,7 @@ class LogBook(object):
          - created_at = ...
         """
         cls_name = self.__class__.__name__
-        lines = ["%s%s: '%s'" % (" " * indent, cls_name, self.name)]
+        lines = ["{}{}: '{}'".format(" " * indent, cls_name, self.name)]
         lines.extend(_format_shared(self, indent=indent + 1))
         lines.extend(_format_meta(self.meta, indent=indent + 1))
         if self.created_at is not None:
@@ -258,8 +256,7 @@ class LogBook(object):
         return self._name
 
     def __iter__(self):
-        for fd in self._flowdetails_by_id.values():
-            yield fd
+        yield from self._flowdetails_by_id.values()
 
     def __len__(self):
         return len(self._flowdetails_by_id)
@@ -288,7 +285,7 @@ class LogBook(object):
         return clone
 
 
-class FlowDetail(object):
+class FlowDetail:
     """A collection of atom details and associated metadata.
 
     Typically this class contains a collection of atom detail entries that
@@ -345,7 +342,7 @@ class FlowDetail(object):
          - state = ...
         """
         cls_name = self.__class__.__name__
-        lines = ["%s%s: '%s'" % (" " * indent, cls_name, self.name)]
+        lines = ["{}{}: '{}'".format(" " * indent, cls_name, self.name)]
         lines.extend(_format_shared(self, indent=indent + 1))
         lines.extend(_format_meta(self.meta, indent=indent + 1))
         for atom_detail in self:
@@ -463,14 +460,13 @@ class FlowDetail(object):
         return self._name
 
     def __iter__(self):
-        for ad in self._atomdetails_by_id.values():
-            yield ad
+        yield from self._atomdetails_by_id.values()
 
     def __len__(self):
         return len(self._atomdetails_by_id)
 
 
-class AtomDetail(object, metaclass=abc.ABCMeta):
+class AtomDetail(metaclass=abc.ABCMeta):
     """A collection of atom specific runtime information and metadata.
 
     This is a base **abstract** class that contains attributes that are used
@@ -688,14 +684,14 @@ class AtomDetail(object, metaclass=abc.A
     def pformat(self, indent=0, linesep=os.linesep):
         """Pretty formats this atom detail into a string."""
         cls_name = self.__class__.__name__
-        lines = ["%s%s: '%s'" % (" " * (indent), cls_name, self.name)]
+        lines = ["{}{}: '{}'".format(" " * (indent), cls_name, self.name)]
         lines.extend(_format_shared(self, indent=indent + 1))
         lines.append("%s- version = %s"
                      % (" " * (indent + 1), misc.get_version_string(self)))
         lines.append("%s- results = %s"
                      % (" " * (indent + 1), self.results))
-        lines.append("%s- failure = %s" % (" " * (indent + 1),
-                                           bool(self.failure)))
+        lines.append("{}- failure = {}".format(" " * (indent + 1),
+                                               bool(self.failure)))
         lines.extend(_format_meta(self.meta, indent=indent + 1))
         return linesep.join(lines)
 
@@ -793,7 +789,7 @@ class TaskDetail(AtomDetail):
                                           " task details")
         if other is self:
             return self
-        super(TaskDetail, self).merge(other, deep_copy=deep_copy)
+        super().merge(other, deep_copy=deep_copy)
         self.results = other.results
         self.revert_results = other.revert_results
         return self
@@ -834,7 +830,7 @@ class RetryDetail(AtomDetail):
     """
 
     def __init__(self, name, uuid):
-        super(RetryDetail, self).__init__(name, uuid)
+        super().__init__(name, uuid)
         self.results = []
 
     def reset(self, state):
@@ -983,7 +979,7 @@ class RetryDetail(AtomDetail):
                 new_results.append((data, new_failures))
             return new_results
 
-        obj = super(RetryDetail, cls).from_dict(data)
+        obj = super().from_dict(data)
         obj.results = decode_results(obj.results)
         return obj
 
@@ -1001,7 +997,7 @@ class RetryDetail(AtomDetail):
                 new_results.append((data, new_failures))
             return new_results
 
-        base = super(RetryDetail, self).to_dict()
+        base = super().to_dict()
         base['results'] = encode_results(base.get('results'))
         return base
 
@@ -1033,7 +1029,7 @@ class RetryDetail(AtomDetail):
                                           " retry details")
         if other is self:
             return self
-        super(RetryDetail, self).merge(other, deep_copy=deep_copy)
+        super().merge(other, deep_copy=deep_copy)
         results = []
         # NOTE(imelnikov): we can't just deep copy Failures, as they
         # contain tracebacks, which are not copyable.
@@ -1053,8 +1049,7 @@ _DETAIL_TO_NAME = {
     RetryDetail: 'RETRY_DETAIL',
     TaskDetail: 'TASK_DETAIL',
 }
-_NAME_TO_DETAIL = dict((name, cls)
-                       for (cls, name) in _DETAIL_TO_NAME.items())
+_NAME_TO_DETAIL = {name: cls for (cls, name) in _DETAIL_TO_NAME.items()}
 ATOM_TYPES = list(_NAME_TO_DETAIL.keys())
 
 
diff -pruN 5.12.0-2/taskflow/persistence/path_based.py 6.0.2-0ubuntu1/taskflow/persistence/path_based.py
--- 5.12.0-2/taskflow/persistence/path_based.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/persistence/path_based.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Rackspace Hosting All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -36,7 +34,7 @@ class PathBasedBackend(base.Backend, met
     DEFAULT_PATH = None
 
     def __init__(self, conf):
-        super(PathBasedBackend, self).__init__(conf)
+        super().__init__(conf)
         self._path = self._conf.get('path', None)
         if not self._path:
             self._path = self.DEFAULT_PATH
diff -pruN 5.12.0-2/taskflow/retry.py 6.0.2-0ubuntu1/taskflow/retry.py
--- 5.12.0-2/taskflow/retry.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/retry.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Rackspace Hosting Inc. All Rights Reserved.
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
@@ -71,7 +69,7 @@ EXECUTE_REVERT_HISTORY = 'history'
 REVERT_FLOW_FAILURES = 'flow_failures'
 
 
-class History(object):
+class History:
     """Helper that simplifies interactions with retry historical contents."""
 
     def __init__(self, contents, failure=None):
@@ -99,8 +97,7 @@ class History(object):
                 self._contents[index],
             ]
         for (provided, outcomes) in contents:
-            for (owner, outcome) in outcomes.items():
-                yield (owner, outcome)
+            yield from outcomes.items()
 
     def __len__(self):
         return len(self._contents)
@@ -154,10 +151,10 @@ class Retry(atom.Atom, metaclass=abc.ABC
 
     def __init__(self, name=None, provides=None, requires=None,
                  auto_extract=True, rebind=None):
-        super(Retry, self).__init__(name=name, provides=provides,
-                                    requires=requires, rebind=rebind,
-                                    auto_extract=auto_extract,
-                                    ignore_list=[EXECUTE_REVERT_HISTORY])
+        super().__init__(name=name, provides=provides,
+                         requires=requires, rebind=rebind,
+                         auto_extract=auto_extract,
+                         ignore_list=[EXECUTE_REVERT_HISTORY])
 
     @property
     def name(self):
@@ -262,8 +259,7 @@ class Times(Retry):
 
     def __init__(self, attempts=1, name=None, provides=None, requires=None,
                  auto_extract=True, rebind=None, revert_all=False):
-        super(Times, self).__init__(name, provides, requires,
-                                    auto_extract, rebind)
+        super().__init__(name, provides, requires, auto_extract, rebind)
         self._attempts = attempts
 
         if revert_all:
@@ -285,8 +281,7 @@ class ForEachBase(Retry):
 
     def __init__(self, name=None, provides=None, requires=None,
                  auto_extract=True, rebind=None, revert_all=False):
-        super(ForEachBase, self).__init__(name, provides, requires,
-                                          auto_extract, rebind)
+        super().__init__(name, provides, requires, auto_extract, rebind)
 
         if revert_all:
             self._revert_action = REVERT_ALL
@@ -336,8 +331,8 @@ class ForEach(ForEachBase):
 
     def __init__(self, values, name=None, provides=None, requires=None,
                  auto_extract=True, rebind=None, revert_all=False):
-        super(ForEach, self).__init__(name, provides, requires,
-                                      auto_extract, rebind, revert_all)
+        super().__init__(name, provides, requires, auto_extract, rebind,
+                         revert_all)
         self._values = values
 
     def on_failure(self, history, *args, **kwargs):
@@ -368,9 +363,8 @@ class ParameterizedForEach(ForEachBase):
 
     def __init__(self, name=None, provides=None, requires=None,
                  auto_extract=True, rebind=None, revert_all=False):
-        super(ParameterizedForEach, self).__init__(name, provides, requires,
-                                                   auto_extract, rebind,
-                                                   revert_all)
+        super().__init__(name, provides, requires, auto_extract, rebind,
+                         revert_all)
 
     def on_failure(self, values, history, *args, **kwargs):
         return self._on_failure(values, history)
diff -pruN 5.12.0-2/taskflow/states.py 6.0.2-0ubuntu1/taskflow/states.py
--- 5.12.0-2/taskflow/states.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/states.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/storage.py 6.0.2-0ubuntu1/taskflow/storage.py
--- 5.12.0-2/taskflow/storage.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/storage.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -87,7 +85,7 @@ META_PROGRESS = 'progress'
 META_PROGRESS_DETAILS = 'progress_details'
 
 
-class _ProviderLocator(object):
+class _ProviderLocator:
     """Helper to start to better decouple the finding logic from storage.
 
     WIP: part of the larger effort to cleanup/refactor the finding of named
@@ -149,7 +147,7 @@ class _ProviderLocator(object):
                 return (searched_providers, providers_and_results)
         if not atom_providers:
             return (searched_providers, providers_and_results)
-        atom_providers_by_name = dict((p.name, p) for p in atom_providers)
+        atom_providers_by_name = {p.name: p for p in atom_providers}
         for accessible_atom_names in iter(scope_walker):
             # *Always* retain the scope ordering (if any matches
             # happen); instead of retaining the possible provider match
@@ -199,7 +197,7 @@ class _ProviderLocator(object):
         _searched_providers, providers_and_results = self._find(
             looking_for, scope_walker=scope_walker,
             short_circuit=False, find_potentials=True)
-        return set(p for (p, _provider_results) in providers_and_results)
+        return {p for (p, _provider_results) in providers_and_results}
 
     def find(self, looking_for, scope_walker=None, short_circuit=True):
         """Returns the accessible providers."""
@@ -208,7 +206,7 @@ class _ProviderLocator(object):
                           find_potentials=False)
 
 
-class _Provider(object):
+class _Provider:
     """A named symbol provider that produces a output at the given index."""
 
     def __init__(self, name, index):
@@ -270,7 +268,7 @@ def _item_from_first_of(providers, looki
         " extraction" % (looking_for, providers))
 
 
-class Storage(object):
+class Storage:
     """Interface between engines and logbook and its backend (if any).
 
     This class provides a simple interface to save atoms of a given flow and
@@ -326,8 +324,8 @@ class Storage(object):
                 fail_cache[states.REVERT] = ad.revert_failure
             self._failures[ad.name] = fail_cache
 
-        self._atom_name_to_uuid = dict((ad.name, ad.uuid)
-                                       for ad in self._flowdetail)
+        self._atom_name_to_uuid = {ad.name: ad.uuid
+                                   for ad in self._flowdetail}
         try:
             source, _clone = self._atomdetail_by_name(
                 self.injector_name, expected_type=models.TaskDetail)
@@ -336,7 +334,7 @@ class Storage(object):
         else:
             names_iter = source.results.keys()
             self._set_result_mapping(source.name,
-                                     dict((name, name) for name in names_iter))
+                                     {name: name for name in names_iter})
 
     def _with_connection(self, functor, *args, **kwargs):
         # Run the given functor with a backend connection as its first
@@ -911,7 +909,7 @@ class Storage(object):
             provider_name, names = save_persistent()
 
         self._set_result_mapping(provider_name,
-                                 dict((name, name) for name in names))
+                                 {name: name for name in names})
 
     def _fetch_providers(self, looking_for, providers=None):
         """Return pair of (default providers, atom providers)."""
diff -pruN 5.12.0-2/taskflow/task.py 6.0.2-0ubuntu1/taskflow/task.py
--- 5.12.0-2/taskflow/task.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/task.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright 2015 Hewlett-Packard Development Company, L.P.
 #    Copyright (C) 2013 Rackspace Hosting Inc. All Rights Reserved.
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
@@ -61,10 +59,10 @@ class Task(atom.Atom, metaclass=abc.ABCM
                  ignore_list=None, revert_rebind=None, revert_requires=None):
         if name is None:
             name = reflection.get_class_name(self)
-        super(Task, self).__init__(name, provides=provides, requires=requires,
-                                   auto_extract=auto_extract, rebind=rebind,
-                                   inject=inject, revert_rebind=revert_rebind,
-                                   revert_requires=revert_requires)
+        super().__init__(name, provides=provides, requires=requires,
+                         auto_extract=auto_extract, rebind=rebind,
+                         inject=inject, revert_rebind=revert_rebind,
+                         revert_requires=revert_requires)
         self._notifier = notifier.RestrictedNotifier(self.TASK_EVENTS)
 
     @property
@@ -131,8 +129,7 @@ class FunctorTask(Task):
                                  " be callable")
         if name is None:
             name = reflection.get_callable_name(execute)
-        super(FunctorTask, self).__init__(name, provides=provides,
-                                          inject=inject)
+        super().__init__(name, provides=provides, inject=inject)
         self._execute = execute
         self._revert = revert
         if version is not None:
@@ -190,12 +187,12 @@ class ReduceFunctorTask(Task):
 
         if name is None:
             name = reflection.get_callable_name(functor)
-        super(ReduceFunctorTask, self).__init__(name=name,
-                                                provides=provides,
-                                                inject=inject,
-                                                requires=requires,
-                                                rebind=rebind,
-                                                auto_extract=auto_extract)
+        super().__init__(name=name,
+                         provides=provides,
+                         inject=inject,
+                         requires=requires,
+                         rebind=rebind,
+                         auto_extract=auto_extract)
 
         self._functor = functor
 
@@ -235,10 +232,10 @@ class MapFunctorTask(Task):
 
         if name is None:
             name = reflection.get_callable_name(functor)
-        super(MapFunctorTask, self).__init__(name=name, provides=provides,
-                                             inject=inject, requires=requires,
-                                             rebind=rebind,
-                                             auto_extract=auto_extract)
+        super().__init__(name=name, provides=provides,
+                         inject=inject, requires=requires,
+                         rebind=rebind,
+                         auto_extract=auto_extract)
 
         self._functor = functor
 
diff -pruN 5.12.0-2/taskflow/test.py 6.0.2-0ubuntu1/taskflow/test.py
--- 5.12.0-2/taskflow/test.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/test.py	2025-08-25 12:49:32.000000000 +0000
@@ -27,7 +27,7 @@ from taskflow.tests import utils
 from taskflow.utils import misc
 
 
-class GreaterThanEqual(object):
+class GreaterThanEqual:
     """Matches if the item is geq than the matchers reference object."""
 
     def __init__(self, source):
@@ -36,10 +36,10 @@ class GreaterThanEqual(object):
     def match(self, other):
         if other >= self.source:
             return None
-        return matchers.Mismatch("%s was not >= %s" % (other, self.source))
+        return matchers.Mismatch("{} was not >= {}".format(other, self.source))
 
 
-class FailureRegexpMatcher(object):
+class FailureRegexpMatcher:
     """Matches if the failure was caused by the given exception and message.
 
     This will match if a given failure contains and exception of the given
@@ -60,7 +60,7 @@ class FailureRegexpMatcher(object):
                                  (failure, self.exc_class))
 
 
-class ItemsEqual(object):
+class ItemsEqual:
     """Matches the items in two sequences.
 
     This matcher will validate that the provided sequence has the same elements
@@ -166,7 +166,7 @@ class TestCase(base.BaseTestCase):
 class MockTestCase(TestCase):
 
     def setUp(self):
-        super(MockTestCase, self).setUp()
+        super().setUp()
         self.master_mock = mock.Mock(name='master_mock')
 
     def patch(self, target, autospec=True, **kwargs):
diff -pruN 5.12.0-2/taskflow/tests/test_examples.py 6.0.2-0ubuntu1/taskflow/tests/test_examples.py
--- 5.12.0-2/taskflow/tests/test_examples.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/test_examples.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/action_engine/test_builder.py 6.0.2-0ubuntu1/taskflow/tests/unit/action_engine/test_builder.py
--- 5.12.0-2/taskflow/tests/unit/action_engine/test_builder.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/action_engine/test_builder.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -298,7 +296,7 @@ class BuildersTest(test.TestCase):
             flow, initial_state=st.RUNNING)
         transitions = list(machine_runner.run_iter(builder.START))
 
-        occurrences = dict((t, transitions.count(t)) for t in transitions)
+        occurrences = {t: transitions.count(t) for t in transitions}
         self.assertEqual(10, occurrences.get((st.SCHEDULING, st.WAITING)))
         self.assertEqual(10, occurrences.get((st.WAITING, st.ANALYZING)))
         self.assertEqual(9, occurrences.get((st.ANALYZING, st.SCHEDULING)))
diff -pruN 5.12.0-2/taskflow/tests/unit/action_engine/test_compile.py 6.0.2-0ubuntu1/taskflow/tests/unit/action_engine/test_compile.py
--- 5.12.0-2/taskflow/tests/unit/action_engine/test_compile.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/action_engine/test_compile.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -110,7 +108,7 @@ class PatternCompileTest(test.TestCase):
             ('c', 'test[$]'),
             ('d', 'test[$]'),
         ])
-        self.assertEqual(set(['test']), set(g.no_predecessors_iter()))
+        self.assertEqual({'test'}, set(g.no_predecessors_iter()))
 
     def test_linear_nested(self):
         a, b, c, d = test_utils.make_many(4)
@@ -283,7 +281,7 @@ class PatternCompileTest(test.TestCase):
         self.assertEqual(4, len(g))
         self.assertCountEqual(g.edges(data=True), [
             ('test', 'a', {'invariant': True}),
-            ('a', 'b', {'reasons': set(['x'])}),
+            ('a', 'b', {'reasons': {'x'}}),
             ('b', 'test[$]', {'invariant': True}),
         ])
         self.assertCountEqual(['test'], g.no_predecessors_iter())
@@ -302,7 +300,7 @@ class PatternCompileTest(test.TestCase):
         self.assertCountEqual(g.edges(data=True), [
             ('test', 'a', {'invariant': True}),
             ('test2', 'b', {'invariant': True}),
-            ('a', 'test2', {'reasons': set(['x'])}),
+            ('a', 'test2', {'reasons': {'x'}}),
             ('b', 'c', {'invariant': True}),
             ('c', 'test2[$]', {'invariant': True}),
             ('test2[$]', 'test[$]', {'invariant': True}),
@@ -325,7 +323,7 @@ class PatternCompileTest(test.TestCase):
             ('a', 'test[$]', {'invariant': True}),
 
             # The 'x' requirement is produced out of test2...
-            ('test2[$]', 'a', {'reasons': set(['x'])}),
+            ('test2[$]', 'a', {'reasons': {'x'}}),
 
             ('test2', 'b', {'invariant': True}),
             ('b', 'c', {'invariant': True}),
diff -pruN 5.12.0-2/taskflow/tests/unit/action_engine/test_creation.py 6.0.2-0ubuntu1/taskflow/tests/unit/action_engine/test_creation.py
--- 5.12.0-2/taskflow/tests/unit/action_engine/test_creation.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/action_engine/test_creation.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -26,11 +24,6 @@ from taskflow.tests import utils
 from taskflow.utils import eventlet_utils as eu
 from taskflow.utils import persistence_utils as pu
 
-try:
-    from taskflow.engines.action_engine import process_executor as pe
-except ImportError:
-    pe = None
-
 
 class ParallelCreationTest(test.TestCase):
     @staticmethod
@@ -48,26 +41,12 @@ class ParallelCreationTest(test.TestCase
             self.assertIsInstance(eng._task_executor,
                                   executor.ParallelThreadTaskExecutor)
 
-    @testtools.skipIf(pe is None, 'process_executor is not available')
-    def test_process_string_creation(self):
-        for s in ['process', 'processes']:
-            eng = self._create_engine(executor=s)
-            self.assertIsInstance(eng._task_executor,
-                                  pe.ParallelProcessTaskExecutor)
-
     def test_thread_executor_creation(self):
         with futurist.ThreadPoolExecutor(1) as e:
             eng = self._create_engine(executor=e)
             self.assertIsInstance(eng._task_executor,
                                   executor.ParallelThreadTaskExecutor)
 
-    @testtools.skipIf(pe is None, 'process_executor is not available')
-    def test_process_executor_creation(self):
-        with futurist.ProcessPoolExecutor(1) as e:
-            eng = self._create_engine(executor=e)
-            self.assertIsInstance(eng._task_executor,
-                                  pe.ParallelProcessTaskExecutor)
-
     @testtools.skipIf(not eu.EVENTLET_AVAILABLE, 'eventlet is not available')
     def test_green_executor_creation(self):
         with futurist.GreenThreadPoolExecutor(1) as e:
diff -pruN 5.12.0-2/taskflow/tests/unit/action_engine/test_process_executor.py 6.0.2-0ubuntu1/taskflow/tests/unit/action_engine/test_process_executor.py
--- 5.12.0-2/taskflow/tests/unit/action_engine/test_process_executor.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/action_engine/test_process_executor.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,106 +0,0 @@
-# -*- coding: utf-8 -*-
-
-#    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-import errno
-import socket
-import threading
-
-import testtools
-
-from taskflow import task
-from taskflow import test
-from taskflow.test import mock
-from taskflow.tests import utils as test_utils
-
-try:
-    import asyncore
-    from taskflow.engines.action_engine import process_executor as pe
-except ImportError:
-    asyncore = None
-    pe = None
-
-
-@testtools.skipIf(asyncore is None, 'process_executor is not available')
-class ProcessExecutorHelpersTest(test.TestCase):
-    def test_reader(self):
-        capture_buf = []
-
-        def do_capture(identity, message_capture_func):
-            capture_buf.append(message_capture_func())
-
-        r = pe.Reader(b"secret", do_capture)
-        for data in pe._encode_message(b"secret", ['hi'], b'me'):
-            self.assertEqual(len(data), r.bytes_needed)
-            r.feed(data)
-
-        self.assertEqual(1, len(capture_buf))
-        self.assertEqual(['hi'], capture_buf[0])
-
-    def test_bad_hmac_reader(self):
-        r = pe.Reader(b"secret-2", lambda ident, capture_func: capture_func())
-        in_data = b"".join(pe._encode_message(b"secret", ['hi'], b'me'))
-        self.assertRaises(pe.BadHmacValueError, r.feed, in_data)
-
-    @mock.patch("socket.socket")
-    def test_no_connect_channel(self, mock_socket_factory):
-        mock_sock = mock.MagicMock()
-        mock_socket_factory.return_value = mock_sock
-        mock_sock.connect.side_effect = socket.error(errno.ECONNREFUSED,
-                                                     'broken')
-        c = pe.Channel(2222, b"me", b"secret")
-        self.assertRaises(socket.error, c.send, "hi")
-        self.assertTrue(c.dead)
-        self.assertTrue(mock_sock.close.called)
-
-    def test_send_and_dispatch(self):
-        details_capture = []
-
-        t = test_utils.DummyTask("rcver")
-        t.notifier.register(
-            task.EVENT_UPDATE_PROGRESS,
-            lambda _event_type, details: details_capture.append(details))
-
-        d = pe.Dispatcher({}, b'secret', b'server-josh')
-        d.setup()
-        d.targets[b'child-josh'] = t
-
-        s = threading.Thread(target=asyncore.loop, kwargs={'map': d.map})
-        s.start()
-        self.addCleanup(s.join)
-
-        c = pe.Channel(d.port, b'child-josh', b'secret')
-        self.addCleanup(c.close)
-
-        send_what = [
-            {'progress': 0.1},
-            {'progress': 0.2},
-            {'progress': 0.3},
-            {'progress': 0.4},
-            {'progress': 0.5},
-            {'progress': 0.6},
-            {'progress': 0.7},
-            {'progress': 0.8},
-            {'progress': 0.9},
-        ]
-        e_s = pe.EventSender(c)
-        for details in send_what:
-            e_s(task.EVENT_UPDATE_PROGRESS, details)
-
-        # This forces the thread to shutdown (since the asyncore loop
-        # will exit when no more sockets exist to process...)
-        d.close()
-
-        self.assertEqual(len(send_what), len(details_capture))
-        self.assertEqual(send_what, details_capture)
diff -pruN 5.12.0-2/taskflow/tests/unit/action_engine/test_scoping.py 6.0.2-0ubuntu1/taskflow/tests/unit/action_engine/test_scoping.py
--- 5.12.0-2/taskflow/tests/unit/action_engine/test_scoping.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/action_engine/test_scoping.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -272,7 +270,7 @@ class MixedPatternScopingTest(test.TestC
         # This may be different after/if the following is resolved:
         #
         # https://github.com/networkx/networkx/issues/1181 (and a few others)
-        self.assertEqual(set(['customer', 'customer2']),
+        self.assertEqual({'customer', 'customer2'},
                          set(_get_scopes(c, washer)[0]))
         self.assertEqual([], _get_scopes(c, customer2))
         self.assertEqual([], _get_scopes(c, customer))
diff -pruN 5.12.0-2/taskflow/tests/unit/jobs/base.py 6.0.2-0ubuntu1/taskflow/tests/unit/jobs/base.py
--- 5.12.0-2/taskflow/tests/unit/jobs/base.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/jobs/base.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -37,7 +35,7 @@ def connect_close(*args):
             a.close()
 
 
-class BoardTestMixin(object):
+class BoardTestMixin:
 
     @contextlib.contextmanager
     def flush(self, client):
diff -pruN 5.12.0-2/taskflow/tests/unit/jobs/test_entrypoint.py 6.0.2-0ubuntu1/taskflow/tests/unit/jobs/test_entrypoint.py
--- 5.12.0-2/taskflow/tests/unit/jobs/test_entrypoint.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/jobs/test_entrypoint.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/jobs/test_redis_job.py 6.0.2-0ubuntu1/taskflow/tests/unit/jobs/test_redis_job.py
--- 5.12.0-2/taskflow/tests/unit/jobs/test_redis_job.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/jobs/test_redis_job.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -101,7 +99,7 @@ class RedisJobboardTest(test.TestCase, b
             self.assertEqual(0, len(possible_jobs))
 
     def setUp(self):
-        super(RedisJobboardTest, self).setUp()
+        super().setUp()
         self.client, self.board = self.create_board()
 
     def test__make_client(self):
diff -pruN 5.12.0-2/taskflow/tests/unit/jobs/test_zk_job.py 6.0.2-0ubuntu1/taskflow/tests/unit/jobs/test_zk_job.py
--- 5.12.0-2/taskflow/tests/unit/jobs/test_zk_job.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/jobs/test_zk_job.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -137,7 +135,7 @@ class ZookeeperJobboardTest(test.TestCas
         return (client, board)
 
     def setUp(self):
-        super(ZookeeperJobboardTest, self).setUp()
+        super().setUp()
         self.client, self.board = self.create_board()
 
 
@@ -152,7 +150,7 @@ class ZakeJobboardTest(test.TestCase, Zo
         return (client, board)
 
     def setUp(self):
-        super(ZakeJobboardTest, self).setUp()
+        super().setUp()
         self.client, self.board = self.create_board()
         self.bad_paths = [self.board.path, self.board.trash_path]
         self.bad_paths.extend(zake_utils.partition_path(self.board.path))
diff -pruN 5.12.0-2/taskflow/tests/unit/patterns/test_graph_flow.py 6.0.2-0ubuntu1/taskflow/tests/unit/patterns/test_graph_flow.py
--- 5.12.0-2/taskflow/tests/unit/patterns/test_graph_flow.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/patterns/test_graph_flow.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -76,8 +74,8 @@ class GraphFlowTest(test.TestCase):
         self.assertEqual(1, len(f))
         self.assertEqual([task], list(f))
         self.assertEqual([], list(f.iter_links()))
-        self.assertEqual(set(['a', 'b']), f.requires)
-        self.assertEqual(set(['c', 'd']), f.provides)
+        self.assertEqual({'a', 'b'}, f.requires)
+        self.assertEqual({'c', 'd'}, f.provides)
 
     def test_graph_flow_two_independent_tasks(self):
         task1 = _task(name='task1')
@@ -95,11 +93,11 @@ class GraphFlowTest(test.TestCase):
 
         self.assertEqual(2, len(f))
         self.assertCountEqual(f, [task1, task2])
-        self.assertEqual([(task1, task2, {'reasons': set(['a'])})],
+        self.assertEqual([(task1, task2, {'reasons': {'a'}})],
                          list(f.iter_links()))
 
         self.assertEqual(set(), f.requires)
-        self.assertEqual(set(['a']), f.provides)
+        self.assertEqual({'a'}, f.provides)
 
     def test_graph_flow_two_dependent_tasks_two_different_calls(self):
         task1 = _task(name='task1', provides=['a'])
@@ -108,7 +106,7 @@ class GraphFlowTest(test.TestCase):
 
         self.assertEqual(2, len(f))
         self.assertCountEqual(f, [task1, task2])
-        self.assertEqual([(task1, task2, {'reasons': set(['a'])})],
+        self.assertEqual([(task1, task2, {'reasons': {'a'}})],
                          list(f.iter_links()))
 
     def test_graph_flow_two_task_same_provide(self):
@@ -116,14 +114,14 @@ class GraphFlowTest(test.TestCase):
         task2 = _task(name='task2', provides=['a', 'c'])
         f = gf.Flow('test')
         f.add(task2, task1)
-        self.assertEqual(set(['a', 'b', 'c']), f.provides)
+        self.assertEqual({'a', 'b', 'c'}, f.provides)
 
     def test_graph_flow_ambiguous_provides(self):
         task1 = _task(name='task1', provides=['a', 'b'])
         task2 = _task(name='task2', provides=['a'])
         f = gf.Flow('test')
         f.add(task1, task2)
-        self.assertEqual(set(['a', 'b']), f.provides)
+        self.assertEqual({'a', 'b'}, f.provides)
         task3 = _task(name='task3', requires=['a'])
         self.assertRaises(exc.AmbiguousDependency, f.add, task3)
 
@@ -132,7 +130,7 @@ class GraphFlowTest(test.TestCase):
         task2 = _task(name='task2', requires=['a', 'b'])
         f = gf.Flow('test')
         f.add(task1, task2, resolve_requires=False)
-        self.assertEqual(set(['a', 'b']), f.requires)
+        self.assertEqual({'a', 'b'}, f.requires)
 
     def test_graph_flow_no_resolve_existing(self):
         task1 = _task(name='task1', requires=['a', 'b'])
@@ -140,7 +138,7 @@ class GraphFlowTest(test.TestCase):
         f = gf.Flow('test')
         f.add(task1)
         f.add(task2, resolve_existing=False)
-        self.assertEqual(set(['a', 'b']), f.requires)
+        self.assertEqual({'a', 'b'}, f.requires)
 
     def test_graph_flow_resolve_existing(self):
         task1 = _task(name='task1', requires=['a', 'b'])
@@ -148,7 +146,7 @@ class GraphFlowTest(test.TestCase):
         f = gf.Flow('test')
         f.add(task1)
         f.add(task2, resolve_existing=True)
-        self.assertEqual(set([]), f.requires)
+        self.assertEqual(set(), f.requires)
 
     def test_graph_flow_with_retry(self):
         ret = retry.AlwaysRevert(requires=['a'], provides=['b'])
@@ -156,11 +154,11 @@ class GraphFlowTest(test.TestCase):
         self.assertIs(f.retry, ret)
         self.assertEqual('test_retry', ret.name)
 
-        self.assertEqual(set(['a']), f.requires)
-        self.assertEqual(set(['b']), f.provides)
+        self.assertEqual({'a'}, f.requires)
+        self.assertEqual({'b'}, f.provides)
 
     def test_graph_flow_ordering(self):
-        task1 = _task('task1', provides=set(['a', 'b']))
+        task1 = _task('task1', provides={'a', 'b'})
         task2 = _task('task2', provides=['c'], requires=['a', 'b'])
         task3 = _task('task3', provides=[], requires=['c'])
         f = gf.Flow('test').add(task1, task2, task3)
@@ -168,8 +166,8 @@ class GraphFlowTest(test.TestCase):
         self.assertEqual(3, len(f))
 
         self.assertCountEqual(list(f.iter_links()), [
-            (task1, task2, {'reasons': set(['a', 'b'])}),
-            (task2, task3, {'reasons': set(['c'])})
+            (task1, task2, {'reasons': {'a', 'b'}}),
+            (task2, task3, {'reasons': {'c'}})
         ])
 
     def test_graph_flow_links(self):
@@ -190,7 +188,7 @@ class GraphFlowTest(test.TestCase):
         self.assertIs(linked, f)
         expected_meta = {
             'manual': True,
-            'reasons': set(['a'])
+            'reasons': {'a'}
         }
         self.assertCountEqual(list(f.iter_links()), [
             (task1, task2, expected_meta)
@@ -236,7 +234,7 @@ class GraphFlowTest(test.TestCase):
         task3 = _task('task3', provides=['c'])
         f1 = gf.Flow('nested')
         f1.add(task3)
-        tasks = set([task1, task2, f1])
+        tasks = {task1, task2, f1}
         f = gf.Flow('test').add(task1, task2, f1)
         for (n, data) in f.iter_nodes():
             self.assertIn(n, tasks)
@@ -248,7 +246,7 @@ class GraphFlowTest(test.TestCase):
         task3 = _task('task3')
         f1 = gf.Flow('nested')
         f1.add(task3)
-        tasks = set([task1, task2, f1])
+        tasks = {task1, task2, f1}
         f = gf.Flow('test').add(task1, task2, f1)
         for (u, v, data) in f.iter_links():
             self.assertIn(u, tasks)
diff -pruN 5.12.0-2/taskflow/tests/unit/patterns/test_linear_flow.py 6.0.2-0ubuntu1/taskflow/tests/unit/patterns/test_linear_flow.py
--- 5.12.0-2/taskflow/tests/unit/patterns/test_linear_flow.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/patterns/test_linear_flow.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -65,8 +63,8 @@ class LinearFlowTest(test.TestCase):
         self.assertEqual(1, len(f))
         self.assertEqual([task], list(f))
         self.assertEqual([], list(f.iter_links()))
-        self.assertEqual(set(['a', 'b']), f.requires)
-        self.assertEqual(set(['c', 'd']), f.provides)
+        self.assertEqual({'a', 'b'}, f.requires)
+        self.assertEqual({'c', 'd'}, f.provides)
 
     def test_linear_flow_two_independent_tasks(self):
         task1 = _task(name='task1')
@@ -89,7 +87,7 @@ class LinearFlowTest(test.TestCase):
                          list(f.iter_links()))
 
         self.assertEqual(set(), f.requires)
-        self.assertEqual(set(['a']), f.provides)
+        self.assertEqual({'a'}, f.provides)
 
     def test_linear_flow_two_dependent_tasks_two_different_calls(self):
         task1 = _task(name='task1', provides=['a'])
@@ -120,15 +118,15 @@ class LinearFlowTest(test.TestCase):
         self.assertIs(f.retry, ret)
         self.assertEqual('test_retry', ret.name)
 
-        self.assertEqual(set(['a']), f.requires)
-        self.assertEqual(set(['b']), f.provides)
+        self.assertEqual({'a'}, f.requires)
+        self.assertEqual({'b'}, f.provides)
 
     def test_iter_nodes(self):
         task1 = _task(name='task1')
         task2 = _task(name='task2')
         task3 = _task(name='task3')
         f = lf.Flow('test').add(task1, task2, task3)
-        tasks = set([task1, task2, task3])
+        tasks = {task1, task2, task3}
         for (node, data) in f.iter_nodes():
             self.assertIn(node, tasks)
             self.assertDictEqual({}, data)
@@ -138,7 +136,7 @@ class LinearFlowTest(test.TestCase):
         task2 = _task(name='task2')
         task3 = _task(name='task3')
         f = lf.Flow('test').add(task1, task2, task3)
-        tasks = set([task1, task2, task3])
+        tasks = {task1, task2, task3}
         for (u, v, data) in f.iter_links():
             self.assertIn(u, tasks)
             self.assertIn(v, tasks)
diff -pruN 5.12.0-2/taskflow/tests/unit/patterns/test_unordered_flow.py 6.0.2-0ubuntu1/taskflow/tests/unit/patterns/test_unordered_flow.py
--- 5.12.0-2/taskflow/tests/unit/patterns/test_unordered_flow.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/patterns/test_unordered_flow.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -65,8 +63,8 @@ class UnorderedFlowTest(test.TestCase):
         self.assertEqual(1, len(f))
         self.assertEqual([task], list(f))
         self.assertEqual([], list(f.iter_links()))
-        self.assertEqual(set(['a', 'b']), f.requires)
-        self.assertEqual(set(['c', 'd']), f.provides)
+        self.assertEqual({'a', 'b'}, f.requires)
+        self.assertEqual({'c', 'd'}, f.provides)
 
     def test_unordered_flow_two_tasks(self):
         task1 = _task(name='task1')
@@ -74,7 +72,7 @@ class UnorderedFlowTest(test.TestCase):
         f = uf.Flow('test').add(task1, task2)
 
         self.assertEqual(2, len(f))
-        self.assertEqual(set([task1, task2]), set(f))
+        self.assertEqual({task1, task2}, set(f))
         self.assertEqual([], list(f.iter_links()))
 
     def test_unordered_flow_two_tasks_two_different_calls(self):
@@ -83,16 +81,16 @@ class UnorderedFlowTest(test.TestCase):
         f = uf.Flow('test').add(task1)
         f.add(task2)
         self.assertEqual(2, len(f))
-        self.assertEqual(set(['a']), f.requires)
-        self.assertEqual(set(['a']), f.provides)
+        self.assertEqual({'a'}, f.requires)
+        self.assertEqual({'a'}, f.provides)
 
     def test_unordered_flow_two_tasks_reverse_order(self):
         task1 = _task(name='task1', provides=['a'])
         task2 = _task(name='task2', requires=['a'])
         f = uf.Flow('test').add(task2).add(task1)
         self.assertEqual(2, len(f))
-        self.assertEqual(set(['a']), f.requires)
-        self.assertEqual(set(['a']), f.provides)
+        self.assertEqual({'a'}, f.requires)
+        self.assertEqual({'a'}, f.provides)
 
     def test_unordered_flow_two_task_same_provide(self):
         task1 = _task(name='task1', provides=['a', 'b'])
@@ -107,8 +105,8 @@ class UnorderedFlowTest(test.TestCase):
         self.assertIs(f.retry, ret)
         self.assertEqual('test_retry', ret.name)
 
-        self.assertEqual(set(['a']), f.requires)
-        self.assertEqual(set(['b']), f.provides)
+        self.assertEqual({'a'}, f.requires)
+        self.assertEqual({'b'}, f.provides)
 
     def test_unordered_flow_with_retry_fully_satisfies(self):
         ret = retry.AlwaysRevert(provides=['b', 'a'])
@@ -116,13 +114,13 @@ class UnorderedFlowTest(test.TestCase):
         f.add(_task(name='task1', requires=['a']))
         self.assertIs(f.retry, ret)
         self.assertEqual('test_retry', ret.name)
-        self.assertEqual(set([]), f.requires)
-        self.assertEqual(set(['b', 'a']), f.provides)
+        self.assertEqual(set(), f.requires)
+        self.assertEqual({'b', 'a'}, f.provides)
 
     def test_iter_nodes(self):
         task1 = _task(name='task1', provides=['a', 'b'])
         task2 = _task(name='task2', provides=['a', 'c'])
-        tasks = set([task1, task2])
+        tasks = {task1, task2}
         f = uf.Flow('test')
         f.add(task2, task1)
         for (node, data) in f.iter_nodes():
diff -pruN 5.12.0-2/taskflow/tests/unit/persistence/base.py 6.0.2-0ubuntu1/taskflow/tests/unit/persistence/base.py
--- 5.12.0-2/taskflow/tests/unit/persistence/base.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/persistence/base.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Rackspace Hosting All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -24,7 +22,7 @@ from taskflow import states
 from taskflow.types import failure
 
 
-class PersistenceTestMixin(object):
+class PersistenceTestMixin:
     def _get_connection(self):
         raise NotImplementedError('_get_connection() implementation required')
 
@@ -73,7 +71,7 @@ class PersistenceTestMixin(object):
         lb_ids = {}
         for i in range(0, 10):
             lb_id = uuidutils.generate_uuid()
-            lb_name = 'lb-%s-%s' % (i, lb_id)
+            lb_name = 'lb-{}-{}'.format(i, lb_id)
             lb = models.LogBook(name=lb_name, uuid=lb_id)
             lb_ids[lb_id] = True
 
diff -pruN 5.12.0-2/taskflow/tests/unit/persistence/test_dir_persistence.py 6.0.2-0ubuntu1/taskflow/tests/unit/persistence/test_dir_persistence.py
--- 5.12.0-2/taskflow/tests/unit/persistence/test_dir_persistence.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/persistence/test_dir_persistence.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Rackspace Hosting All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -45,7 +43,7 @@ class DirPersistenceTest(testscenarios.T
         return self.backend.get_connection()
 
     def setUp(self):
-        super(DirPersistenceTest, self).setUp()
+        super().setUp()
         self.path = tempfile.mkdtemp()
         self.backend = impl_dir.DirBackend({
             'path': self.path,
@@ -55,7 +53,7 @@ class DirPersistenceTest(testscenarios.T
             conn.upgrade()
 
     def tearDown(self):
-        super(DirPersistenceTest, self).tearDown()
+        super().tearDown()
         if self.path and os.path.isdir(self.path):
             shutil.rmtree(self.path)
         self.path = None
diff -pruN 5.12.0-2/taskflow/tests/unit/persistence/test_memory_persistence.py 6.0.2-0ubuntu1/taskflow/tests/unit/persistence/test_memory_persistence.py
--- 5.12.0-2/taskflow/tests/unit/persistence/test_memory_persistence.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/persistence/test_memory_persistence.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Rackspace Hosting All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -25,7 +23,7 @@ from taskflow.tests.unit.persistence imp
 
 class MemoryPersistenceTest(test.TestCase, base.PersistenceTestMixin):
     def setUp(self):
-        super(MemoryPersistenceTest, self).setUp()
+        super().setUp()
         self._backend = impl_memory.MemoryBackend({})
 
     def _get_connection(self):
@@ -35,7 +33,7 @@ class MemoryPersistenceTest(test.TestCas
         conn = self._get_connection()
         conn.clear_all()
         self._backend = None
-        super(MemoryPersistenceTest, self).tearDown()
+        super().tearDown()
 
     def test_memory_backend_entry_point(self):
         conf = {'connection': 'memory:'}
diff -pruN 5.12.0-2/taskflow/tests/unit/persistence/test_sql_persistence.py 6.0.2-0ubuntu1/taskflow/tests/unit/persistence/test_sql_persistence.py
--- 5.12.0-2/taskflow/tests/unit/persistence/test_sql_persistence.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/persistence/test_sql_persistence.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Rackspace Hosting All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -61,7 +59,7 @@ def _get_connect_string(backend, user, p
         raise Exception("Unrecognized backend: '%s'" % backend)
     if not database:
         database = ''
-    return "%s://%s:%s@localhost/%s" % (backend, user, passwd, database)
+    return "{}://{}:{}@localhost/{}".format(backend, user, passwd, database)
 
 
 def _mysql_exists():
@@ -108,7 +106,7 @@ class SqlitePersistenceTest(test.TestCas
         return impl_sqlalchemy.SQLAlchemyBackend(conf).get_connection()
 
     def setUp(self):
-        super(SqlitePersistenceTest, self).setUp()
+        super().setUp()
         self.db_location = tempfile.mktemp(suffix='.db')
         self.db_uri = "sqlite:///%s" % (self.db_location)
         # Ensure upgraded to the right schema
@@ -116,7 +114,7 @@ class SqlitePersistenceTest(test.TestCas
             conn.upgrade()
 
     def tearDown(self):
-        super(SqlitePersistenceTest, self).tearDown()
+        super().tearDown()
         if self.db_location and os.path.isfile(self.db_location):
             os.unlink(self.db_location)
             self.db_location = None
@@ -146,7 +144,7 @@ class BackendPersistenceTestMixin(base.P
         """Cleans up by removing the database once the tests are done."""
 
     def setUp(self):
-        super(BackendPersistenceTestMixin, self).setUp()
+        super().setUp()
         self.backend = None
         try:
             self.db_uri = self._init_db()
@@ -175,7 +173,7 @@ class MysqlPersistenceTest(BackendPersis
             db_uri = _get_connect_string('mysql', USER, PASSWD)
             engine = sa.create_engine(db_uri)
             with contextlib.closing(engine.connect()) as conn:
-                conn.execute("CREATE DATABASE %s" % DATABASE)
+                conn.execute(sa.text("CREATE DATABASE %s" % DATABASE))
         except Exception as e:
             raise Exception('Failed to initialize MySQL db: %s' % (e))
         finally:
@@ -192,7 +190,7 @@ class MysqlPersistenceTest(BackendPersis
         try:
             engine = sa.create_engine(self.db_uri)
             with contextlib.closing(engine.connect()) as conn:
-                conn.execute("DROP DATABASE IF EXISTS %s" % DATABASE)
+                conn.execute(sa.text("DROP DATABASE IF EXISTS %s" % DATABASE))
         except Exception as e:
             raise Exception('Failed to remove temporary database: %s' % (e))
         finally:
@@ -217,7 +215,7 @@ class PostgresPersistenceTest(BackendPer
             engine = sa.create_engine(db_uri)
             with contextlib.closing(engine.connect()) as conn:
                 conn.connection.set_isolation_level(0)
-                conn.execute("CREATE DATABASE %s" % DATABASE)
+                conn.execute(sa.text("CREATE DATABASE %s" % DATABASE))
                 conn.connection.set_isolation_level(1)
         except Exception as e:
             raise Exception('Failed to initialize PostgreSQL db: %s' % (e))
@@ -241,7 +239,7 @@ class PostgresPersistenceTest(BackendPer
             engine = sa.create_engine(db_uri)
             with contextlib.closing(engine.connect()) as conn:
                 conn.connection.set_isolation_level(0)
-                conn.execute("DROP DATABASE IF EXISTS %s" % DATABASE)
+                conn.execute(sa.text("DROP DATABASE IF EXISTS %s" % DATABASE))
                 conn.connection.set_isolation_level(1)
         except Exception as e:
             raise Exception('Failed to remove temporary database: %s' % (e))
diff -pruN 5.12.0-2/taskflow/tests/unit/persistence/test_zk_persistence.py 6.0.2-0ubuntu1/taskflow/tests/unit/persistence/test_zk_persistence.py
--- 5.12.0-2/taskflow/tests/unit/persistence/test_zk_persistence.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/persistence/test_zk_persistence.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 AT&T Labs All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -56,7 +54,7 @@ class ZkPersistenceTest(test.TestCase, b
         return self.backend.get_connection()
 
     def setUp(self):
-        super(ZkPersistenceTest, self).setUp()
+        super().setUp()
         conf = test_utils.ZK_TEST_CONFIG.copy()
         # Create a unique path just for this test (so that we don't overwrite
         # what other tests are doing).
@@ -84,7 +82,7 @@ class ZakePersistenceTest(test.TestCase,
         return self._backend.get_connection()
 
     def setUp(self):
-        super(ZakePersistenceTest, self).setUp()
+        super().setUp()
         conf = {
             "path": "/taskflow",
         }
diff -pruN 5.12.0-2/taskflow/tests/unit/test_arguments_passing.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_arguments_passing.py
--- 5.12.0-2/taskflow/tests/unit/test_arguments_passing.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_arguments_passing.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -23,11 +21,6 @@ from taskflow import test
 from taskflow.tests import utils
 from taskflow.utils import eventlet_utils as eu
 
-try:
-    from taskflow.engines.action_engine import process_executor as pe
-except ImportError:
-    pe = None
-
 
 class ArgumentsPassingTest(utils.EngineTestBase):
 
@@ -55,9 +48,9 @@ class ArgumentsPassingTest(utils.EngineT
         }, engine.storage.fetch_all())
 
     def test_save_dict(self):
-        flow = utils.TaskMultiDict(provides=set(['badger',
-                                                 'mushroom',
-                                                 'snake']))
+        flow = utils.TaskMultiDict(provides={'badger',
+                                             'mushroom',
+                                             'snake'})
         engine = self._make_engine(flow)
         engine.run()
         self.assertEqual({
@@ -224,18 +217,3 @@ class ParallelEngineWithEventletTest(Arg
                                      backend=self.backend,
                                      engine='parallel',
                                      executor=executor)
-
-
-@testtools.skipIf(pe is None, 'process_executor is not available')
-class ParallelEngineWithProcessTest(ArgumentsPassingTest, test.TestCase):
-    _EXECUTOR_WORKERS = 2
-
-    def _make_engine(self, flow, flow_detail=None, executor=None):
-        if executor is None:
-            executor = 'processes'
-        return taskflow.engines.load(flow,
-                                     flow_detail=flow_detail,
-                                     backend=self.backend,
-                                     engine='parallel',
-                                     executor=executor,
-                                     max_workers=self._EXECUTOR_WORKERS)
diff -pruN 5.12.0-2/taskflow/tests/unit/test_check_transition.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_check_transition.py
--- 5.12.0-2/taskflow/tests/unit/test_check_transition.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_check_transition.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -50,7 +48,7 @@ class TransitionTest(test.TestCase):
 class CheckFlowTransitionTest(TransitionTest):
 
     def setUp(self):
-        super(CheckFlowTransitionTest, self).setUp()
+        super().setUp()
         self.check_transition = states.check_flow_transition
         self.transition_exc_regexp = '^Flow transition.*not allowed'
 
@@ -73,7 +71,7 @@ class CheckFlowTransitionTest(Transition
 class CheckTaskTransitionTest(TransitionTest):
 
     def setUp(self):
-        super(CheckTaskTransitionTest, self).setUp()
+        super().setUp()
         self.check_transition = states.check_task_transition
         self.transition_exc_regexp = '^Task transition.*not allowed'
 
@@ -122,7 +120,7 @@ class CheckTaskTransitionTest(Transition
 class CheckRetryTransitionTest(CheckTaskTransitionTest):
 
     def setUp(self):
-        super(CheckRetryTransitionTest, self).setUp()
+        super().setUp()
         self.check_transition = states.check_retry_transition
         self.transition_exc_regexp = '^Retry transition.*not allowed'
 
diff -pruN 5.12.0-2/taskflow/tests/unit/test_conductors.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_conductors.py
--- 5.12.0-2/taskflow/tests/unit/test_conductors.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_conductors.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/test_deciders.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_deciders.py
--- 5.12.0-2/taskflow/tests/unit/test_deciders.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_deciders.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -55,4 +53,4 @@ class TestDeciders(test.TestCase):
     def test_bad_pick_widest(self):
         self.assertRaises(ValueError, deciders.pick_widest, [])
         self.assertRaises(ValueError, deciders.pick_widest, ["a"])
-        self.assertRaises(ValueError, deciders.pick_widest, set(['b']))
+        self.assertRaises(ValueError, deciders.pick_widest, {'b'})
diff -pruN 5.12.0-2/taskflow/tests/unit/test_engine_helpers.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_engine_helpers.py
--- 5.12.0-2/taskflow/tests/unit/test_engine_helpers.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_engine_helpers.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/test_engines.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_engines.py
--- 5.12.0-2/taskflow/tests/unit/test_engines.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_engines.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -41,11 +39,6 @@ from taskflow.utils import eventlet_util
 from taskflow.utils import persistence_utils as p_utils
 from taskflow.utils import threading_utils as tu
 
-try:
-    from taskflow.engines.action_engine import process_executor as pe
-except ImportError:
-    pe = None
-
 
 # Expected engine transitions when empty workflows are ran...
 _EMPTY_TRANSITIONS = [
@@ -54,7 +47,7 @@ _EMPTY_TRANSITIONS = [
 ]
 
 
-class EngineTaskNotificationsTest(object):
+class EngineTaskNotificationsTest:
     def test_run_capture_task_notifications(self):
         captured = collections.defaultdict(list)
 
@@ -89,7 +82,7 @@ class EngineTaskNotificationsTest(object
             self.assertEqual(expected, captured[name])
 
 
-class EngineTaskTest(object):
+class EngineTaskTest:
 
     def test_run_task_as_flow(self):
         flow = utils.ProgressingTask(name='task1')
@@ -583,8 +576,8 @@ class EngineParallelFlowTest(utils.Engin
         engine = self._make_engine(flow)
         with utils.CaptureListener(engine, capture_flow=False) as capturer:
             engine.run()
-        expected = set(['task2.t SUCCESS(5)', 'task2.t RUNNING',
-                        'task1.t RUNNING', 'task1.t SUCCESS(5)'])
+        expected = {'task2.t SUCCESS(5)', 'task2.t RUNNING',
+                    'task1.t RUNNING', 'task1.t SUCCESS(5)'}
         self.assertEqual(expected, set(capturer.values))
 
     def test_parallel_revert(self):
@@ -858,8 +851,8 @@ class EngineGraphFlowTest(utils.EngineTe
         engine = self._make_engine(flow)
         with utils.CaptureListener(engine, capture_flow=False) as capturer:
             engine.run()
-        expected = set(['task2.t SUCCESS(5)', 'task2.t RUNNING',
-                        'task1.t RUNNING', 'task1.t SUCCESS(5)'])
+        expected = {'task2.t SUCCESS(5)', 'task2.t RUNNING',
+                    'task1.t RUNNING', 'task1.t SUCCESS(5)'}
         self.assertEqual(expected, set(capturer.values))
         self.assertEqual(2, len(flow))
 
@@ -1223,7 +1216,7 @@ class EngineGraphConditionalFlowTest(uti
         with utils.CaptureListener(engine, capture_flow=False) as capturer:
             engine.run()
 
-        expected = set([
+        expected = {
             'task1.t RUNNING',
             'task1.t SUCCESS(5)',
 
@@ -1232,7 +1225,7 @@ class EngineGraphConditionalFlowTest(uti
 
             'task3.t RUNNING',
             'task3.t SUCCESS(5)',
-        ])
+        }
         self.assertEqual(expected, set(capturer.values))
 
     def test_graph_flow_conditional_ignore_reset(self):
@@ -1251,7 +1244,7 @@ class EngineGraphConditionalFlowTest(uti
         with utils.CaptureListener(engine, capture_flow=False) as capturer:
             engine.run()
 
-        expected = set([
+        expected = {
             'task1.t RUNNING',
             'task1.t SUCCESS(5)',
 
@@ -1259,7 +1252,7 @@ class EngineGraphConditionalFlowTest(uti
             'task2.t SUCCESS(5)',
 
             'task3.t IGNORE',
-        ])
+        }
         self.assertEqual(expected, set(capturer.values))
         self.assertEqual(states.IGNORE,
                          engine.storage.get_atom_state('task3'))
@@ -1271,7 +1264,7 @@ class EngineGraphConditionalFlowTest(uti
         with utils.CaptureListener(engine, capture_flow=False) as capturer:
             engine.run()
 
-        expected = set([
+        expected = {
             'task1.t RUNNING',
             'task1.t SUCCESS(5)',
 
@@ -1280,7 +1273,7 @@ class EngineGraphConditionalFlowTest(uti
 
             'task3.t RUNNING',
             'task3.t SUCCESS(5)',
-        ])
+        }
         self.assertEqual(expected, set(capturer.values))
 
     def test_graph_flow_diamond_ignored(self):
@@ -1301,7 +1294,7 @@ class EngineGraphConditionalFlowTest(uti
         with utils.CaptureListener(engine, capture_flow=False) as capturer:
             engine.run()
 
-        expected = set([
+        expected = {
             'task1.t RUNNING',
             'task1.t SUCCESS(5)',
 
@@ -1312,7 +1305,7 @@ class EngineGraphConditionalFlowTest(uti
             'task3.t SUCCESS(5)',
 
             'task4.t IGNORE',
-        ])
+        }
         self.assertEqual(expected, set(capturer.values))
         self.assertEqual(states.IGNORE,
                          engine.storage.get_atom_state('task4'))
@@ -1350,12 +1343,12 @@ class EngineGraphConditionalFlowTest(uti
         with utils.CaptureListener(engine, capture_flow=False) as capturer:
             engine.run()
 
-        expected = set([
+        expected = {
             'task1.t RUNNING', 'task1.t SUCCESS(2)',
             'task3.t IGNORE', 'task3_3.t IGNORE',
             'task2.t RUNNING', 'task2.t SUCCESS(5)',
             'task2_2.t RUNNING', 'task2_2.t SUCCESS(5)',
-        ])
+        }
         self.assertEqual(expected, set(capturer.values))
 
         engine = self._make_engine(flow)
@@ -1363,12 +1356,12 @@ class EngineGraphConditionalFlowTest(uti
         with utils.CaptureListener(engine, capture_flow=False) as capturer:
             engine.run()
 
-        expected = set([
+        expected = {
             'task1.t RUNNING', 'task1.t SUCCESS(1)',
             'task2.t IGNORE', 'task2_2.t IGNORE',
             'task3.t RUNNING', 'task3.t SUCCESS(5)',
             'task3_3.t RUNNING', 'task3_3.t SUCCESS(5)',
-        ])
+        }
         self.assertEqual(expected, set(capturer.values))
 
 
@@ -1499,82 +1492,6 @@ class ParallelEngineWithEventletTest(Eng
                                      store=store, **kwargs)
 
 
-@testtools.skipIf(pe is None, 'process_executor is not available')
-class ParallelEngineWithProcessTest(EngineTaskTest,
-                                    EngineMultipleResultsTest,
-                                    EngineLinearFlowTest,
-                                    EngineParallelFlowTest,
-                                    EngineLinearAndUnorderedExceptionsTest,
-                                    EngineOptionalRequirementsTest,
-                                    EngineGraphFlowTest,
-                                    EngineResetTests,
-                                    EngineMissingDepsTest,
-                                    EngineGraphConditionalFlowTest,
-                                    EngineDeciderDepthTest,
-                                    EngineTaskNotificationsTest,
-                                    test.TestCase):
-    _EXECUTOR_WORKERS = 2
-
-    def test_correct_load(self):
-        engine = self._make_engine(utils.TaskNoRequiresNoReturns)
-        self.assertIsInstance(engine, eng.ParallelActionEngine)
-
-    def _make_engine(self, flow,
-                     flow_detail=None, executor=None, store=None,
-                     **kwargs):
-        if executor is None:
-            executor = 'processes'
-        return taskflow.engines.load(flow, flow_detail=flow_detail,
-                                     backend=self.backend,
-                                     engine='parallel',
-                                     executor=executor,
-                                     store=store,
-                                     max_workers=self._EXECUTOR_WORKERS,
-                                     **kwargs)
-
-    def test_update_progress_notifications_proxied(self):
-        captured = collections.defaultdict(list)
-
-        def notify_me(event_type, details):
-            captured[event_type].append(details)
-
-        a = utils.MultiProgressingTask('a')
-        a.notifier.register(a.notifier.ANY, notify_me)
-        progress_chunks = list(x / 10.0 for x in range(1, 10))
-        e = self._make_engine(a, store={'progress_chunks': progress_chunks})
-        e.run()
-
-        self.assertEqual(11, len(captured[task.EVENT_UPDATE_PROGRESS]))
-
-    def test_custom_notifications_proxied(self):
-        captured = collections.defaultdict(list)
-
-        def notify_me(event_type, details):
-            captured[event_type].append(details)
-
-        a = utils.EmittingTask('a')
-        a.notifier.register(a.notifier.ANY, notify_me)
-        e = self._make_engine(a)
-        e.run()
-
-        self.assertEqual(1, len(captured['hi']))
-        self.assertEqual(2, len(captured[task.EVENT_UPDATE_PROGRESS]))
-
-    def test_just_custom_notifications_proxied(self):
-        captured = collections.defaultdict(list)
-
-        def notify_me(event_type, details):
-            captured[event_type].append(details)
-
-        a = utils.EmittingTask('a')
-        a.notifier.register('hi', notify_me)
-        e = self._make_engine(a)
-        e.run()
-
-        self.assertEqual(1, len(captured['hi']))
-        self.assertEqual(0, len(captured[task.EVENT_UPDATE_PROGRESS]))
-
-
 class WorkerBasedEngineTest(EngineTaskTest,
                             EngineMultipleResultsTest,
                             EngineLinearFlowTest,
@@ -1589,7 +1506,7 @@ class WorkerBasedEngineTest(EngineTaskTe
                             EngineTaskNotificationsTest,
                             test.TestCase):
     def setUp(self):
-        super(WorkerBasedEngineTest, self).setUp()
+        super().setUp()
         shared_conf = {
             'exchange': 'test',
             'transport': 'memory',
diff -pruN 5.12.0-2/taskflow/tests/unit/test_exceptions.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_exceptions.py
--- 5.12.0-2/taskflow/tests/unit/test_exceptions.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_exceptions.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -44,7 +42,7 @@ class TestExceptions(test.TestCase):
     def test_raise_with(self):
         capture = None
         try:
-            raise IOError('broken')
+            raise OSError('broken')
         except Exception:
             try:
                 exc.raise_with_cause(exc.TaskFlowException, 'broken')
@@ -73,8 +71,8 @@ class TestExceptions(test.TestCase):
         try:
             try:
                 try:
-                    raise IOError("Didn't work")
-                except IOError:
+                    raise OSError("Didn't work")
+                except OSError:
                     exc.raise_with_cause(exc.TaskFlowException,
                                          "It didn't go so well")
             except exc.TaskFlowException:
@@ -109,7 +107,7 @@ class TestExceptions(test.TestCase):
     def test_raise_with_cause(self):
         capture = None
         try:
-            raise IOError('broken')
+            raise OSError('broken')
         except Exception:
             try:
                 exc.raise_with_cause(exc.TaskFlowException, 'broken')
diff -pruN 5.12.0-2/taskflow/tests/unit/test_failure.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_failure.py
--- 5.12.0-2/taskflow/tests/unit/test_failure.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_failure.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -37,7 +35,7 @@ def _make_exc_info(msg):
         return sys.exc_info()
 
 
-class GeneralFailureObjTestsMixin(object):
+class GeneralFailureObjTestsMixin:
 
     def test_captures_message(self):
         self.assertEqual('Woot!', self.fail_obj.exception_str)
@@ -72,7 +70,7 @@ class GeneralFailureObjTestsMixin(object
 class CaptureFailureTestCase(test.TestCase, GeneralFailureObjTestsMixin):
 
     def setUp(self):
-        super(CaptureFailureTestCase, self).setUp()
+        super().setUp()
         self.fail_obj = _captured_failure('Woot!')
 
     def test_captures_value(self):
@@ -91,7 +89,7 @@ class CaptureFailureTestCase(test.TestCa
 class ReCreatedFailureTestCase(test.TestCase, GeneralFailureObjTestsMixin):
 
     def setUp(self):
-        super(ReCreatedFailureTestCase, self).setUp()
+        super().setUp()
         fail_obj = _captured_failure('Woot!')
         self.fail_obj = failure.Failure(exception_str=fail_obj.exception_str,
                                         traceback_str=fail_obj.traceback_str,
@@ -124,7 +122,7 @@ class ReCreatedFailureTestCase(test.Test
 class FromExceptionTestCase(test.TestCase, GeneralFailureObjTestsMixin):
 
     def setUp(self):
-        super(FromExceptionTestCase, self).setUp()
+        super().setUp()
         self.fail_obj = failure.Failure.from_exception(RuntimeError('Woot!'))
 
     def test_pformat_no_traceback(self):
@@ -333,24 +331,24 @@ class NonAsciiExceptionsTestCase(test.Te
         excp = ValueError(bad_string)
         fail = failure.Failure.from_exception(excp)
         self.assertEqual(str(excp), fail.exception_str)
-        expected = u'Failure: ValueError: \xc8'
+        expected = 'Failure: ValueError: \xc8'
         self.assertEqual(expected, str(fail))
 
     def test_exception_non_ascii_unicode(self):
-        hi_ru = u'привет'
+        hi_ru = 'привет'
         fail = failure.Failure.from_exception(ValueError(hi_ru))
         self.assertEqual(hi_ru, fail.exception_str)
         self.assertIsInstance(fail.exception_str, str)
-        self.assertEqual(u'Failure: ValueError: %s' % hi_ru,
+        self.assertEqual('Failure: ValueError: %s' % hi_ru,
                          str(fail))
 
     def test_wrapped_failure_non_ascii_unicode(self):
-        hi_cn = u'嗨'
+        hi_cn = '嗨'
         fail = ValueError(hi_cn)
         self.assertEqual(hi_cn, str(fail))
         fail = failure.Failure.from_exception(fail)
         wrapped_fail = exceptions.WrappedFailure([fail])
-        expected_result = (u"WrappedFailure: "
+        expected_result = ("WrappedFailure: "
                            "[Failure: ValueError: %s]" % (hi_cn))
         self.assertEqual(expected_result, str(wrapped_fail))
 
@@ -361,7 +359,7 @@ class NonAsciiExceptionsTestCase(test.Te
         self.assertEqual(fail, copied)
 
     def test_failure_equality_non_ascii_unicode(self):
-        hi_ru = u'привет'
+        hi_ru = 'привет'
         fail = failure.Failure.from_exception(ValueError(hi_ru))
         copied = fail.copy()
         self.assertEqual(fail, copied)
diff -pruN 5.12.0-2/taskflow/tests/unit/test_flow_dependencies.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_flow_dependencies.py
--- 5.12.0-2/taskflow/tests/unit/test_flow_dependencies.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_flow_dependencies.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -32,28 +30,28 @@ class FlowDependenciesTest(test.TestCase
 
     def test_task_requires_default_values(self):
         flow = utils.TaskMultiArg()
-        self.assertEqual(set(['x', 'y', 'z']), flow.requires)
+        self.assertEqual({'x', 'y', 'z'}, flow.requires)
         self.assertEqual(set(), flow.provides, )
 
     def test_task_requires_rebinded_mapped(self):
         flow = utils.TaskMultiArg(rebind={'x': 'a', 'y': 'b', 'z': 'c'})
-        self.assertEqual(set(['a', 'b', 'c']), flow.requires)
+        self.assertEqual({'a', 'b', 'c'}, flow.requires)
         self.assertEqual(set(), flow.provides)
 
     def test_task_requires_additional_values(self):
         flow = utils.TaskMultiArg(requires=['a', 'b'])
-        self.assertEqual(set(['a', 'b', 'x', 'y', 'z']), flow.requires)
+        self.assertEqual({'a', 'b', 'x', 'y', 'z'}, flow.requires)
         self.assertEqual(set(), flow.provides)
 
     def test_task_provides_values(self):
         flow = utils.TaskMultiReturn(provides=['a', 'b', 'c'])
         self.assertEqual(set(), flow.requires)
-        self.assertEqual(set(['a', 'b', 'c']), flow.provides)
+        self.assertEqual({'a', 'b', 'c'}, flow.provides)
 
     def test_task_provides_and_requires_values(self):
         flow = utils.TaskMultiArgMultiReturn(provides=['a', 'b', 'c'])
-        self.assertEqual(set(['x', 'y', 'z']), flow.requires)
-        self.assertEqual(set(['a', 'b', 'c']), flow.provides)
+        self.assertEqual({'x', 'y', 'z'}, flow.requires)
+        self.assertEqual({'a', 'b', 'c'}, flow.provides)
 
     def test_linear_flow_without_dependencies(self):
         flow = lf.Flow('lf').add(
@@ -66,14 +64,14 @@ class FlowDependenciesTest(test.TestCase
         flow = lf.Flow('lf').add(
             utils.TaskOneArg('task1'),
             utils.TaskMultiArg('task2'))
-        self.assertEqual(set(['x', 'y', 'z']), flow.requires)
+        self.assertEqual({'x', 'y', 'z'}, flow.requires)
         self.assertEqual(set(), flow.provides)
 
     def test_linear_flow_requires_rebind_values(self):
         flow = lf.Flow('lf').add(
             utils.TaskOneArg('task1', rebind=['q']),
             utils.TaskMultiArg('task2'))
-        self.assertEqual(set(['x', 'y', 'z', 'q']), flow.requires)
+        self.assertEqual({'x', 'y', 'z', 'q'}, flow.requires)
         self.assertEqual(set(), flow.provides)
 
     def test_linear_flow_provides_values(self):
@@ -81,14 +79,14 @@ class FlowDependenciesTest(test.TestCase
             utils.TaskOneReturn('task1', provides='x'),
             utils.TaskMultiReturn('task2', provides=['a', 'b', 'c']))
         self.assertEqual(set(), flow.requires)
-        self.assertEqual(set(['x', 'a', 'b', 'c']), flow.provides)
+        self.assertEqual({'x', 'a', 'b', 'c'}, flow.provides)
 
     def test_linear_flow_provides_required_values(self):
         flow = lf.Flow('lf').add(
             utils.TaskOneReturn('task1', provides='x'),
             utils.TaskOneArg('task2'))
         self.assertEqual(set(), flow.requires)
-        self.assertEqual(set(['x']), flow.provides)
+        self.assertEqual({'x'}, flow.provides)
 
     def test_linear_flow_multi_provides_and_requires_values(self):
         flow = lf.Flow('lf').add(
@@ -97,8 +95,8 @@ class FlowDependenciesTest(test.TestCase
                                           provides=['x', 'y', 'q']),
             utils.TaskMultiArgMultiReturn('task2',
                                           provides=['i', 'j', 'k']))
-        self.assertEqual(set(['a', 'b', 'c', 'z']), flow.requires)
-        self.assertEqual(set(['x', 'y', 'q', 'i', 'j', 'k']), flow.provides)
+        self.assertEqual({'a', 'b', 'c', 'z'}, flow.requires)
+        self.assertEqual({'x', 'y', 'q', 'i', 'j', 'k'}, flow.provides)
 
     def test_unordered_flow_without_dependencies(self):
         flow = uf.Flow('uf').add(
@@ -111,14 +109,14 @@ class FlowDependenciesTest(test.TestCase
         flow = uf.Flow('uf').add(
             utils.TaskOneArg('task1'),
             utils.TaskMultiArg('task2'))
-        self.assertEqual(set(['x', 'y', 'z']), flow.requires)
+        self.assertEqual({'x', 'y', 'z'}, flow.requires)
         self.assertEqual(set(), flow.provides)
 
     def test_unordered_flow_requires_rebind_values(self):
         flow = uf.Flow('uf').add(
             utils.TaskOneArg('task1', rebind=['q']),
             utils.TaskMultiArg('task2'))
-        self.assertEqual(set(['x', 'y', 'z', 'q']), flow.requires)
+        self.assertEqual({'x', 'y', 'z', 'q'}, flow.requires)
         self.assertEqual(set(), flow.provides)
 
     def test_unordered_flow_provides_values(self):
@@ -126,7 +124,7 @@ class FlowDependenciesTest(test.TestCase
             utils.TaskOneReturn('task1', provides='x'),
             utils.TaskMultiReturn('task2', provides=['a', 'b', 'c']))
         self.assertEqual(set(), flow.requires)
-        self.assertEqual(set(['x', 'a', 'b', 'c']), flow.provides)
+        self.assertEqual({'x', 'a', 'b', 'c'}, flow.provides)
 
     def test_unordered_flow_provides_required_values(self):
         flow = uf.Flow('uf')
@@ -134,23 +132,23 @@ class FlowDependenciesTest(test.TestCase
                  utils.TaskOneArg('task2'))
         flow.add(utils.TaskOneReturn('task1', provides='x'),
                  utils.TaskOneArg('task2'))
-        self.assertEqual(set(['x']), flow.provides)
-        self.assertEqual(set(['x']), flow.requires)
+        self.assertEqual({'x'}, flow.provides)
+        self.assertEqual({'x'}, flow.requires)
 
     def test_unordered_flow_requires_provided_value_other_call(self):
         flow = uf.Flow('uf')
         flow.add(utils.TaskOneReturn('task1', provides='x'))
         flow.add(utils.TaskOneArg('task2'))
-        self.assertEqual(set(['x']), flow.provides)
-        self.assertEqual(set(['x']), flow.requires)
+        self.assertEqual({'x'}, flow.provides)
+        self.assertEqual({'x'}, flow.requires)
 
     def test_unordered_flow_provides_required_value_other_call(self):
         flow = uf.Flow('uf')
         flow.add(utils.TaskOneArg('task2'))
         flow.add(utils.TaskOneReturn('task1', provides='x'))
         self.assertEqual(2, len(flow))
-        self.assertEqual(set(['x']), flow.provides)
-        self.assertEqual(set(['x']), flow.requires)
+        self.assertEqual({'x'}, flow.provides)
+        self.assertEqual({'x'}, flow.requires)
 
     def test_unordered_flow_multi_provides_and_requires_values(self):
         flow = uf.Flow('uf').add(
@@ -159,19 +157,19 @@ class FlowDependenciesTest(test.TestCase
                                           provides=['d', 'e', 'f']),
             utils.TaskMultiArgMultiReturn('task2',
                                           provides=['i', 'j', 'k']))
-        self.assertEqual(set(['a', 'b', 'c', 'x', 'y', 'z']), flow.requires)
-        self.assertEqual(set(['d', 'e', 'f', 'i', 'j', 'k']), flow.provides)
+        self.assertEqual({'a', 'b', 'c', 'x', 'y', 'z'}, flow.requires)
+        self.assertEqual({'d', 'e', 'f', 'i', 'j', 'k'}, flow.provides)
 
     def test_unordered_flow_provides_same_values(self):
         flow = uf.Flow('uf').add(utils.TaskOneReturn(provides='x'))
         flow.add(utils.TaskOneReturn(provides='x'))
-        self.assertEqual(set(['x']), flow.provides)
+        self.assertEqual({'x'}, flow.provides)
 
     def test_unordered_flow_provides_same_values_one_add(self):
         flow = uf.Flow('uf')
         flow.add(utils.TaskOneReturn(provides='x'),
                  utils.TaskOneReturn(provides='x'))
-        self.assertEqual(set(['x']), flow.provides)
+        self.assertEqual({'x'}, flow.provides)
 
     def test_nested_flows_requirements(self):
         flow = uf.Flow('uf').add(
@@ -184,21 +182,21 @@ class FlowDependenciesTest(test.TestCase
                                           rebind=['b'], provides=['z']),
                 utils.TaskOneArgOneReturn('task4', rebind=['c'],
                                           provides=['q'])))
-        self.assertEqual(set(['a', 'b', 'c']), flow.requires)
-        self.assertEqual(set(['x', 'y', 'z', 'q']), flow.provides)
+        self.assertEqual({'a', 'b', 'c'}, flow.requires)
+        self.assertEqual({'x', 'y', 'z', 'q'}, flow.provides)
 
     def test_graph_flow_requires_values(self):
         flow = gf.Flow('gf').add(
             utils.TaskOneArg('task1'),
             utils.TaskMultiArg('task2'))
-        self.assertEqual(set(['x', 'y', 'z']), flow.requires)
+        self.assertEqual({'x', 'y', 'z'}, flow.requires)
         self.assertEqual(set(), flow.provides)
 
     def test_graph_flow_requires_rebind_values(self):
         flow = gf.Flow('gf').add(
             utils.TaskOneArg('task1', rebind=['q']),
             utils.TaskMultiArg('task2'))
-        self.assertEqual(set(['x', 'y', 'z', 'q']), flow.requires)
+        self.assertEqual({'x', 'y', 'z', 'q'}, flow.requires)
         self.assertEqual(set(), flow.provides)
 
     def test_graph_flow_provides_values(self):
@@ -206,20 +204,20 @@ class FlowDependenciesTest(test.TestCase
             utils.TaskOneReturn('task1', provides='x'),
             utils.TaskMultiReturn('task2', provides=['a', 'b', 'c']))
         self.assertEqual(set(), flow.requires)
-        self.assertEqual(set(['x', 'a', 'b', 'c']), flow.provides)
+        self.assertEqual({'x', 'a', 'b', 'c'}, flow.provides)
 
     def test_graph_flow_provides_required_values(self):
         flow = gf.Flow('gf').add(
             utils.TaskOneReturn('task1', provides='x'),
             utils.TaskOneArg('task2'))
         self.assertEqual(set(), flow.requires)
-        self.assertEqual(set(['x']), flow.provides)
+        self.assertEqual({'x'}, flow.provides)
 
     def test_graph_flow_provides_provided_value_other_call(self):
         flow = gf.Flow('gf')
         flow.add(utils.TaskOneReturn('task1', provides='x'))
         flow.add(utils.TaskOneReturn('task2', provides='x'))
-        self.assertEqual(set(['x']), flow.provides)
+        self.assertEqual({'x'}, flow.provides)
 
     def test_graph_flow_multi_provides_and_requires_values(self):
         flow = gf.Flow('gf').add(
@@ -228,8 +226,8 @@ class FlowDependenciesTest(test.TestCase
                                           provides=['d', 'e', 'f']),
             utils.TaskMultiArgMultiReturn('task2',
                                           provides=['i', 'j', 'k']))
-        self.assertEqual(set(['a', 'b', 'c', 'x', 'y', 'z']), flow.requires)
-        self.assertEqual(set(['d', 'e', 'f', 'i', 'j', 'k']), flow.provides)
+        self.assertEqual({'a', 'b', 'c', 'x', 'y', 'z'}, flow.requires)
+        self.assertEqual({'d', 'e', 'f', 'i', 'j', 'k'}, flow.provides)
 
     def test_graph_cyclic_dependency(self):
         flow = gf.Flow('g-3-cyclic')
@@ -255,27 +253,27 @@ class FlowDependenciesTest(test.TestCase
 
     def test_retry_in_linear_flow_with_requirements(self):
         flow = lf.Flow('lf', retry.AlwaysRevert('rt', requires=['x', 'y']))
-        self.assertEqual(set(['x', 'y']), flow.requires)
+        self.assertEqual({'x', 'y'}, flow.requires)
         self.assertEqual(set(), flow.provides)
 
     def test_retry_in_linear_flow_with_provides(self):
         flow = lf.Flow('lf', retry.AlwaysRevert('rt', provides=['x', 'y']))
         self.assertEqual(set(), flow.requires)
-        self.assertEqual(set(['x', 'y']), flow.provides)
+        self.assertEqual({'x', 'y'}, flow.provides)
 
     def test_retry_in_linear_flow_requires_and_provides(self):
         flow = lf.Flow('lf', retry.AlwaysRevert('rt',
                                                 requires=['x', 'y'],
                                                 provides=['a', 'b']))
-        self.assertEqual(set(['x', 'y']), flow.requires)
-        self.assertEqual(set(['a', 'b']), flow.provides)
+        self.assertEqual({'x', 'y'}, flow.requires)
+        self.assertEqual({'a', 'b'}, flow.provides)
 
     def test_retry_requires_and_provides_same_value(self):
         flow = lf.Flow('lf', retry.AlwaysRevert('rt',
                                                 requires=['x', 'y'],
                                                 provides=['x', 'y']))
-        self.assertEqual(set(['x', 'y']), flow.requires)
-        self.assertEqual(set(['x', 'y']), flow.provides)
+        self.assertEqual({'x', 'y'}, flow.requires)
+        self.assertEqual({'x', 'y'}, flow.provides)
 
     def test_retry_in_unordered_flow_no_requirements_no_provides(self):
         flow = uf.Flow('uf', retry.AlwaysRevert('rt'))
@@ -284,20 +282,20 @@ class FlowDependenciesTest(test.TestCase
 
     def test_retry_in_unordered_flow_with_requirements(self):
         flow = uf.Flow('uf', retry.AlwaysRevert('rt', requires=['x', 'y']))
-        self.assertEqual(set(['x', 'y']), flow.requires)
+        self.assertEqual({'x', 'y'}, flow.requires)
         self.assertEqual(set(), flow.provides)
 
     def test_retry_in_unordered_flow_with_provides(self):
         flow = uf.Flow('uf', retry.AlwaysRevert('rt', provides=['x', 'y']))
         self.assertEqual(set(), flow.requires)
-        self.assertEqual(set(['x', 'y']), flow.provides)
+        self.assertEqual({'x', 'y'}, flow.provides)
 
     def test_retry_in_unordered_flow_requires_and_provides(self):
         flow = uf.Flow('uf', retry.AlwaysRevert('rt',
                                                 requires=['x', 'y'],
                                                 provides=['a', 'b']))
-        self.assertEqual(set(['x', 'y']), flow.requires)
-        self.assertEqual(set(['a', 'b']), flow.provides)
+        self.assertEqual({'x', 'y'}, flow.requires)
+        self.assertEqual({'a', 'b'}, flow.provides)
 
     def test_retry_in_graph_flow_no_requirements_no_provides(self):
         flow = gf.Flow('gf', retry.AlwaysRevert('rt'))
@@ -306,20 +304,20 @@ class FlowDependenciesTest(test.TestCase
 
     def test_retry_in_graph_flow_with_requirements(self):
         flow = gf.Flow('gf', retry.AlwaysRevert('rt', requires=['x', 'y']))
-        self.assertEqual(set(['x', 'y']), flow.requires)
+        self.assertEqual({'x', 'y'}, flow.requires)
         self.assertEqual(set(), flow.provides)
 
     def test_retry_in_graph_flow_with_provides(self):
         flow = gf.Flow('gf', retry.AlwaysRevert('rt', provides=['x', 'y']))
         self.assertEqual(set(), flow.requires)
-        self.assertEqual(set(['x', 'y']), flow.provides)
+        self.assertEqual({'x', 'y'}, flow.provides)
 
     def test_retry_in_graph_flow_requires_and_provides(self):
         flow = gf.Flow('gf', retry.AlwaysRevert('rt',
                                                 requires=['x', 'y'],
                                                 provides=['a', 'b']))
-        self.assertEqual(set(['x', 'y']), flow.requires)
-        self.assertEqual(set(['a', 'b']), flow.provides)
+        self.assertEqual({'x', 'y'}, flow.requires)
+        self.assertEqual({'a', 'b'}, flow.provides)
 
     def test_linear_flow_retry_and_task(self):
         flow = lf.Flow('lf', retry.AlwaysRevert('rt',
@@ -328,8 +326,8 @@ class FlowDependenciesTest(test.TestCase
         flow.add(utils.TaskMultiArgOneReturn(rebind=['a', 'x', 'c'],
                                              provides=['z']))
 
-        self.assertEqual(set(['x', 'y', 'c']), flow.requires)
-        self.assertEqual(set(['a', 'b', 'z']), flow.provides)
+        self.assertEqual({'x', 'y', 'c'}, flow.requires)
+        self.assertEqual({'a', 'b', 'z'}, flow.provides)
 
     def test_unordered_flow_retry_and_task(self):
         flow = uf.Flow('uf', retry.AlwaysRevert('rt',
@@ -338,25 +336,25 @@ class FlowDependenciesTest(test.TestCase
         flow.add(utils.TaskMultiArgOneReturn(rebind=['a', 'x', 'c'],
                                              provides=['z']))
 
-        self.assertEqual(set(['x', 'y', 'c']), flow.requires)
-        self.assertEqual(set(['a', 'b', 'z']), flow.provides)
+        self.assertEqual({'x', 'y', 'c'}, flow.requires)
+        self.assertEqual({'a', 'b', 'z'}, flow.provides)
 
     def test_unordered_flow_retry_and_task_same_requires_provides(self):
         flow = uf.Flow('uf', retry.AlwaysRevert('rt', requires=['x']))
         flow.add(utils.TaskOneReturn(provides=['x']))
-        self.assertEqual(set(['x']), flow.requires)
-        self.assertEqual(set(['x']), flow.provides)
+        self.assertEqual({'x'}, flow.requires)
+        self.assertEqual({'x'}, flow.provides)
 
     def test_unordered_flow_retry_and_task_provide_same_value(self):
         flow = uf.Flow('uf', retry.AlwaysRevert('rt', provides=['x']))
         flow.add(utils.TaskOneReturn('t1', provides=['x']))
-        self.assertEqual(set(['x']), flow.provides)
+        self.assertEqual({'x'}, flow.provides)
 
     def test_unordered_flow_retry_two_tasks_provide_same_value(self):
         flow = uf.Flow('uf', retry.AlwaysRevert('rt', provides=['y']))
         flow.add(utils.TaskOneReturn('t1', provides=['x']),
                  utils.TaskOneReturn('t2', provides=['x']))
-        self.assertEqual(set(['x', 'y']), flow.provides)
+        self.assertEqual({'x', 'y'}, flow.provides)
 
     def test_graph_flow_retry_and_task(self):
         flow = gf.Flow('gf', retry.AlwaysRevert('rt',
@@ -365,19 +363,19 @@ class FlowDependenciesTest(test.TestCase
         flow.add(utils.TaskMultiArgOneReturn(rebind=['a', 'x', 'c'],
                                              provides=['z']))
 
-        self.assertEqual(set(['x', 'y', 'c']), flow.requires)
-        self.assertEqual(set(['a', 'b', 'z']), flow.provides)
+        self.assertEqual({'x', 'y', 'c'}, flow.requires)
+        self.assertEqual({'a', 'b', 'z'}, flow.provides)
 
     def test_graph_flow_retry_and_task_dependency_provide_require(self):
         flow = gf.Flow('gf', retry.AlwaysRevert('rt', requires=['x']))
         flow.add(utils.TaskOneReturn(provides=['x']))
-        self.assertEqual(set(['x']), flow.provides)
-        self.assertEqual(set(['x']), flow.requires)
+        self.assertEqual({'x'}, flow.provides)
+        self.assertEqual({'x'}, flow.requires)
 
     def test_graph_flow_retry_and_task_provide_same_value(self):
         flow = gf.Flow('gf', retry.AlwaysRevert('rt', provides=['x']))
         flow.add(utils.TaskOneReturn('t1', provides=['x']))
-        self.assertEqual(set(['x']), flow.provides)
+        self.assertEqual({'x'}, flow.provides)
 
     def test_builtin_retry_args(self):
 
@@ -389,4 +387,4 @@ class FlowDependenciesTest(test.TestCase
                 pass
 
         flow = lf.Flow('lf', retry=FullArgsRetry(requires='a'))
-        self.assertEqual(set(['a']), flow.requires)
+        self.assertEqual({'a'}, flow.requires)
diff -pruN 5.12.0-2/taskflow/tests/unit/test_formatters.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_formatters.py
--- 5.12.0-2/taskflow/tests/unit/test_formatters.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_formatters.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/test_functor_task.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_functor_task.py
--- 5.12.0-2/taskflow/tests/unit/test_functor_task.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_functor_task.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -24,7 +22,7 @@ def add(a, b):
     return a + b
 
 
-class BunchOfFunctions(object):
+class BunchOfFunctions:
 
     def __init__(self, values):
         self.values = values
diff -pruN 5.12.0-2/taskflow/tests/unit/test_listeners.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_listeners.py
--- 5.12.0-2/taskflow/tests/unit/test_listeners.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_listeners.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -52,7 +50,7 @@ _LOG_LEVELS = frozenset([
 
 class SleepyTask(task.Task):
     def __init__(self, name, sleep_for=0.0):
-        super(SleepyTask, self).__init__(name=name)
+        super().__init__(name=name)
         self._sleep_for = float(sleep_for)
 
     def execute(self):
@@ -62,7 +60,7 @@ class SleepyTask(task.Task):
             time.sleep(self._sleep_for)
 
 
-class EngineMakerMixin(object):
+class EngineMakerMixin:
     def _make_engine(self, flow, flow_detail=None, backend=None):
         e = taskflow.engines.load(flow,
                                   flow_detail=flow_detail,
@@ -80,7 +78,7 @@ class TestClaimListener(test.TestCase, E
         return f
 
     def setUp(self):
-        super(TestClaimListener, self).setUp()
+        super().setUp()
         self.client = fake_client.FakeClient()
         self.addCleanup(self.client.stop)
         self.board = jobs.fetch('test', 'zookeeper', client=self.client)
@@ -315,7 +313,7 @@ class TestLoggingListeners(test.TestCase
         with logging_listeners.LoggingListener(e, log=log):
             e.run()
         self.assertGreater(0, handler.counts[logging.DEBUG])
-        for levelno in _LOG_LEVELS - set([logging.DEBUG]):
+        for levelno in _LOG_LEVELS - {logging.DEBUG}:
             self.assertEqual(0, handler.counts[levelno])
         self.assertEqual([], handler.exc_infos)
 
@@ -329,7 +327,7 @@ class TestLoggingListeners(test.TestCase
         with listener:
             e.run()
         self.assertGreater(0, handler.counts[logging.INFO])
-        for levelno in _LOG_LEVELS - set([logging.INFO]):
+        for levelno in _LOG_LEVELS - {logging.INFO}:
             self.assertEqual(0, handler.counts[levelno])
         self.assertEqual([], handler.exc_infos)
 
@@ -341,7 +339,7 @@ class TestLoggingListeners(test.TestCase
         with logging_listeners.LoggingListener(e, log=log):
             self.assertRaises(RuntimeError, e.run)
         self.assertGreater(0, handler.counts[logging.DEBUG])
-        for levelno in _LOG_LEVELS - set([logging.DEBUG]):
+        for levelno in _LOG_LEVELS - {logging.DEBUG}:
             self.assertEqual(0, handler.counts[levelno])
         self.assertEqual(1, len(handler.exc_infos))
 
@@ -353,7 +351,7 @@ class TestLoggingListeners(test.TestCase
         with logging_listeners.DynamicLoggingListener(e, log=log):
             e.run()
         self.assertGreater(0, handler.counts[logging.DEBUG])
-        for levelno in _LOG_LEVELS - set([logging.DEBUG]):
+        for levelno in _LOG_LEVELS - {logging.DEBUG}:
             self.assertEqual(0, handler.counts[levelno])
         self.assertEqual([], handler.exc_infos)
 
@@ -367,7 +365,7 @@ class TestLoggingListeners(test.TestCase
         self.assertGreater(0, handler.counts[logging.WARNING])
         self.assertGreater(0, handler.counts[logging.DEBUG])
         self.assertEqual(1, len(handler.exc_infos))
-        for levelno in _LOG_LEVELS - set([logging.DEBUG, logging.WARNING]):
+        for levelno in _LOG_LEVELS - {logging.DEBUG, logging.WARNING}:
             self.assertEqual(0, handler.counts[levelno])
 
     def test_dynamic_failure_customized_level(self):
@@ -382,5 +380,5 @@ class TestLoggingListeners(test.TestCase
         self.assertGreater(0, handler.counts[logging.ERROR])
         self.assertGreater(0, handler.counts[logging.DEBUG])
         self.assertEqual(1, len(handler.exc_infos))
-        for levelno in _LOG_LEVELS - set([logging.DEBUG, logging.ERROR]):
+        for levelno in _LOG_LEVELS - {logging.DEBUG, logging.ERROR}:
             self.assertEqual(0, handler.counts[levelno])
diff -pruN 5.12.0-2/taskflow/tests/unit/test_mapfunctor_task.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_mapfunctor_task.py
--- 5.12.0-2/taskflow/tests/unit/test_mapfunctor_task.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_mapfunctor_task.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright 2015 Hewlett-Packard Development Company, L.P.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -29,7 +27,7 @@ square = lambda x: x * x
 class MapFunctorTaskTest(test.TestCase):
 
     def setUp(self):
-        super(MapFunctorTaskTest, self).setUp()
+        super().setUp()
 
         self.flow_store = {
             'a': 1,
diff -pruN 5.12.0-2/taskflow/tests/unit/test_notifier.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_notifier.py
--- 5.12.0-2/taskflow/tests/unit/test_notifier.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_notifier.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -57,7 +55,7 @@ class NotifierTest(test.TestCase):
         def call_me(state, details):
             pass
 
-        class A(object):
+        class A:
             def call_me_too(self, state, details):
                 pass
 
diff -pruN 5.12.0-2/taskflow/tests/unit/test_progress.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_progress.py
--- 5.12.0-2/taskflow/tests/unit/test_progress.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_progress.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -26,7 +24,7 @@ from taskflow.utils import persistence_u
 
 class ProgressTask(task.Task):
     def __init__(self, name, segments):
-        super(ProgressTask, self).__init__(name=name)
+        super().__init__(name=name)
         self._segments = segments
 
     def execute(self):
@@ -57,7 +55,7 @@ class TestProgress(test.TestCase):
         return e
 
     def tearDown(self):
-        super(TestProgress, self).tearDown()
+        super().tearDown()
         with contextlib.closing(impl_memory.MemoryBackend({})) as be:
             with contextlib.closing(be.get_connection()) as conn:
                 conn.clear_all()
diff -pruN 5.12.0-2/taskflow/tests/unit/test_reducefunctor_task.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_reducefunctor_task.py
--- 5.12.0-2/taskflow/tests/unit/test_reducefunctor_task.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_reducefunctor_task.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright 2015 Hewlett-Packard Development Company, L.P.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -29,7 +27,7 @@ multiply = lambda x, y: x * y
 class ReduceFunctorTaskTest(test.TestCase):
 
     def setUp(self):
-        super(ReduceFunctorTaskTest, self).setUp()
+        super().setUp()
 
         self.flow_store = {
             'a': 1,
diff -pruN 5.12.0-2/taskflow/tests/unit/test_retries.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_retries.py
--- 5.12.0-2/taskflow/tests/unit/test_retries.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_retries.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -30,11 +28,6 @@ from taskflow.tests import utils
 from taskflow.types import failure
 from taskflow.utils import eventlet_utils as eu
 
-try:
-    from taskflow.engines.action_engine import process_executor as pe
-except ImportError:
-    pe = None
-
 
 class FailingRetry(retry.Retry):
 
@@ -740,7 +733,7 @@ class RetryTest(utils.EngineTestBase):
         self.assertEqual(expected, capturer.values)
 
     def test_for_each_with_set(self):
-        collection = set([3, 2, 5])
+        collection = {3, 2, 5}
         retry1 = retry.ForEach(collection, 'r1', provides='x')
         flow = lf.Flow('flow-1', retry1).add(utils.FailingTaskWithOneArg('t1'))
         engine = self._make_engine(flow)
@@ -1368,20 +1361,3 @@ class ParallelEngineWithEventletTest(Ret
                                      engine='parallel',
                                      executor=executor,
                                      defer_reverts=defer_reverts)
-
-
-@testtools.skipIf(pe is None, 'process_executor is not available')
-class ParallelEngineWithProcessTest(RetryTest, test.TestCase):
-    _EXECUTOR_WORKERS = 2
-
-    def _make_engine(self, flow, defer_reverts=None, flow_detail=None,
-                     executor=None):
-        if executor is None:
-            executor = 'processes'
-        return taskflow.engines.load(flow,
-                                     flow_detail=flow_detail,
-                                     engine='parallel',
-                                     backend=self.backend,
-                                     executor=executor,
-                                     max_workers=self._EXECUTOR_WORKERS,
-                                     defer_reverts=defer_reverts)
diff -pruN 5.12.0-2/taskflow/tests/unit/test_states.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_states.py
--- 5.12.0-2/taskflow/tests/unit/test_states.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_states.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/test_storage.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_storage.py
--- 5.12.0-2/taskflow/tests/unit/test_storage.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_storage.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -30,9 +28,9 @@ from taskflow.types import failure
 from taskflow.utils import persistence_utils as p_utils
 
 
-class StorageTestMixin(object):
+class StorageTestMixin:
     def setUp(self):
-        super(StorageTestMixin, self).setUp()
+        super().setUp()
         self.backend = None
         self.thread_count = 50
 
@@ -40,7 +38,7 @@ class StorageTestMixin(object):
         with contextlib.closing(self.backend) as be:
             with contextlib.closing(be.get_connection()) as conn:
                 conn.clear_all()
-        super(StorageTestMixin, self).tearDown()
+        super().tearDown()
 
     @staticmethod
     def _run_many_threads(threads):
@@ -357,14 +355,14 @@ class StorageTestMixin(object):
         s.inject({'foo': 'bar', 'spam': 'eggs'})
         self.assertEqual({'viking': 'eggs'},
                          s.fetch_mapped_args({'viking': 'spam'},
-                                             optional_args=set(['viking'])))
+                                             optional_args={'viking'}))
 
     def test_fetch_optional_args_not_found(self):
         s = self._get_storage()
         s.inject({'foo': 'bar', 'spam': 'eggs'})
         self.assertEqual({},
                          s.fetch_mapped_args({'viking': 'helmet'},
-                                             optional_args=set(['viking'])))
+                                             optional_args={'viking'}))
 
     def test_set_and_get_task_state(self):
         s = self._get_storage()
@@ -437,7 +435,7 @@ class StorageTestMixin(object):
 
     def test_result_is_checked(self):
         s = self._get_storage()
-        s.ensure_atom(test_utils.NoopTask('my task', provides=set(['result'])))
+        s.ensure_atom(test_utils.NoopTask('my task', provides={'result'}))
         s.save('my task', {})
         self.assertRaisesRegex(exceptions.NotFound,
                                '^Unable to find result', s.fetch, 'result')
@@ -539,7 +537,7 @@ class StorageTestMixin(object):
         s = self._get_storage()
         s.ensure_atom(t)
         missing = s.fetch_unsatisfied_args(t.name, t.rebind)
-        self.assertEqual(set(['x']), missing)
+        self.assertEqual({'x'}, missing)
         s.inject_atom_args(t.name, {'x': 2}, transient=False)
         missing = s.fetch_unsatisfied_args(t.name, t.rebind)
         self.assertEqual(set(), missing)
@@ -551,7 +549,7 @@ class StorageTestMixin(object):
         s = self._get_storage()
         s.ensure_atom(t)
         missing = s.fetch_unsatisfied_args(t.name, t.rebind)
-        self.assertEqual(set(['x']), missing)
+        self.assertEqual({'x'}, missing)
         s.inject_atom_args(t.name, {'x': 2}, transient=False)
         s.inject_atom_args(t.name, {'x': 3}, transient=True)
         missing = s.fetch_unsatisfied_args(t.name, t.rebind)
@@ -589,13 +587,13 @@ class StorageTestMixin(object):
 
 class StorageMemoryTest(StorageTestMixin, test.TestCase):
     def setUp(self):
-        super(StorageMemoryTest, self).setUp()
+        super().setUp()
         self.backend = backends.fetch({'connection': 'memory://'})
 
 
 class StorageSQLTest(StorageTestMixin, test.TestCase):
     def setUp(self):
-        super(StorageSQLTest, self).setUp()
+        super().setUp()
         self.backend = backends.fetch({'connection': 'sqlite://'})
         with contextlib.closing(self.backend.get_connection()) as conn:
             conn.upgrade()
diff -pruN 5.12.0-2/taskflow/tests/unit/test_suspend.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_suspend.py
--- 5.12.0-2/taskflow/tests/unit/test_suspend.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_suspend.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -25,23 +23,18 @@ from taskflow import test
 from taskflow.tests import utils
 from taskflow.utils import eventlet_utils as eu
 
-try:
-    from taskflow.engines.action_engine import process_executor as pe
-except ImportError:
-    pe = None
-
 
 class SuspendingListener(utils.CaptureListener):
 
     def __init__(self, engine,
                  task_name, task_state, capture_flow=False):
-        super(SuspendingListener, self).__init__(
+        super().__init__(
             engine,
             capture_flow=capture_flow)
         self._revert_match = (task_name, task_state)
 
     def _task_receiver(self, state, details):
-        super(SuspendingListener, self)._task_receiver(state, details)
+        super()._task_receiver(state, details)
         if (details['task_name'], state) == self._revert_match:
             self._engine.suspend()
 
@@ -227,17 +220,3 @@ class ParallelEngineWithEventletTest(Sus
         return taskflow.engines.load(flow, flow_detail=flow_detail,
                                      backend=self.backend, engine='parallel',
                                      executor=executor)
-
-
-@testtools.skipIf(pe is None, 'process_executor is not available')
-class ParallelEngineWithProcessTest(SuspendTest, test.TestCase):
-    _EXECUTOR_WORKERS = 2
-
-    def _make_engine(self, flow, flow_detail=None, executor=None):
-        if executor is None:
-            executor = 'processes'
-        return taskflow.engines.load(flow, flow_detail=flow_detail,
-                                     engine='parallel',
-                                     backend=self.backend,
-                                     executor=executor,
-                                     max_workers=self._EXECUTOR_WORKERS)
diff -pruN 5.12.0-2/taskflow/tests/unit/test_task.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_task.py
--- 5.12.0-2/taskflow/tests/unit/test_task.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_task.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright 2015 Hewlett-Packard Development Company, L.P.
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
@@ -81,7 +79,7 @@ class TaskTest(test.TestCase):
 
     def test_generated_name(self):
         my_task = MyTask()
-        self.assertEqual('%s.%s' % (__name__, 'MyTask'),
+        self.assertEqual('{}.{}'.format(__name__, 'MyTask'),
                          my_task.name)
 
     def test_task_str(self):
@@ -121,7 +119,7 @@ class TaskTest(test.TestCase):
         }
         self.assertEqual(expected,
                          my_task.rebind)
-        self.assertEqual(set(['spam', 'eggs', 'context']),
+        self.assertEqual({'spam', 'eggs', 'context'},
                          my_task.requires)
 
     def test_requires_amended(self):
@@ -150,12 +148,12 @@ class TaskTest(test.TestCase):
 
     def test_requires_ignores_optional(self):
         my_task = DefaultArgTask()
-        self.assertEqual(set(['spam']), my_task.requires)
-        self.assertEqual(set(['eggs']), my_task.optional)
+        self.assertEqual({'spam'}, my_task.requires)
+        self.assertEqual({'eggs'}, my_task.optional)
 
     def test_requires_allows_optional(self):
         my_task = DefaultArgTask(requires=('spam', 'eggs'))
-        self.assertEqual(set(['spam', 'eggs']), my_task.requires)
+        self.assertEqual({'spam', 'eggs'}, my_task.requires)
         self.assertEqual(set(), my_task.optional)
 
     def test_rebind_includes_optional(self):
@@ -174,7 +172,7 @@ class TaskTest(test.TestCase):
             'context': 'c'
         }
         self.assertEqual(expected, my_task.rebind)
-        self.assertEqual(set(['a', 'b', 'c']),
+        self.assertEqual({'a', 'b', 'c'},
                          my_task.requires)
 
     def test_rebind_partial(self):
@@ -185,7 +183,7 @@ class TaskTest(test.TestCase):
             'context': 'context'
         }
         self.assertEqual(expected, my_task.rebind)
-        self.assertEqual(set(['a', 'b', 'context']),
+        self.assertEqual({'a', 'b', 'context'},
                          my_task.requires)
 
     def test_rebind_unknown(self):
@@ -208,7 +206,7 @@ class TaskTest(test.TestCase):
             'eggs': 'c'
         }
         self.assertEqual(expected, my_task.rebind)
-        self.assertEqual(set(['a', 'b', 'c']),
+        self.assertEqual({'a', 'b', 'c'},
                          my_task.requires)
 
     def test_rebind_list_partial(self):
@@ -219,7 +217,7 @@ class TaskTest(test.TestCase):
             'eggs': 'eggs'
         }
         self.assertEqual(expected, my_task.rebind)
-        self.assertEqual(set(['a', 'b', 'eggs']),
+        self.assertEqual({'a', 'b', 'eggs'},
                          my_task.requires)
 
     def test_rebind_list_more(self):
@@ -234,7 +232,7 @@ class TaskTest(test.TestCase):
             'c': 'c'
         }
         self.assertEqual(expected, my_task.rebind)
-        self.assertEqual(set(['a', 'b', 'c']),
+        self.assertEqual({'a', 'b', 'c'},
                          my_task.requires)
 
     def test_rebind_list_bad_value(self):
@@ -243,12 +241,12 @@ class TaskTest(test.TestCase):
 
     def test_default_provides(self):
         my_task = DefaultProvidesTask()
-        self.assertEqual(set(['def']), my_task.provides)
+        self.assertEqual({'def'}, my_task.provides)
         self.assertEqual({'def': None}, my_task.save_as)
 
     def test_default_provides_can_be_overridden(self):
         my_task = DefaultProvidesTask(provides=('spam', 'eggs'))
-        self.assertEqual(set(['spam', 'eggs']), my_task.provides)
+        self.assertEqual({'spam', 'eggs'}, my_task.provides)
         self.assertEqual({'spam': 0, 'eggs': 1}, my_task.save_as)
 
     def test_update_progress_within_bounds(self):
@@ -366,7 +364,7 @@ class TaskTest(test.TestCase):
         my_task = SeparateRevertTask(rebind=('a',), revert_rebind=('b',))
         self.assertEqual({'execute_arg': 'a'}, my_task.rebind)
         self.assertEqual({'revert_arg': 'b'}, my_task.revert_rebind)
-        self.assertEqual(set(['a', 'b']),
+        self.assertEqual({'a', 'b'},
                          my_task.requires)
 
         my_task = SeparateRevertTask(requires='execute_arg',
@@ -374,13 +372,13 @@ class TaskTest(test.TestCase):
 
         self.assertEqual({'execute_arg': 'execute_arg'}, my_task.rebind)
         self.assertEqual({'revert_arg': 'revert_arg'}, my_task.revert_rebind)
-        self.assertEqual(set(['execute_arg', 'revert_arg']),
+        self.assertEqual({'execute_arg', 'revert_arg'},
                          my_task.requires)
 
     def test_separate_revert_optional_args(self):
         my_task = SeparateRevertOptionalTask()
-        self.assertEqual(set(['execute_arg']), my_task.optional)
-        self.assertEqual(set(['revert_arg']), my_task.revert_optional)
+        self.assertEqual({'execute_arg'}, my_task.optional)
+        self.assertEqual({'revert_arg'}, my_task.revert_optional)
 
     def test_revert_kwargs(self):
         my_task = RevertKwargsTask()
@@ -389,7 +387,7 @@ class TaskTest(test.TestCase):
         self.assertEqual(expected_rebind, my_task.rebind)
         expected_rebind = {'execute_arg1': 'execute_arg1'}
         self.assertEqual(expected_rebind, my_task.revert_rebind)
-        self.assertEqual(set(['execute_arg1', 'execute_arg2']),
+        self.assertEqual({'execute_arg1', 'execute_arg2'},
                          my_task.requires)
 
 
diff -pruN 5.12.0-2/taskflow/tests/unit/test_types.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_types.py
--- 5.12.0-2/taskflow/tests/unit/test_types.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_types.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -66,9 +64,9 @@ class GraphTest(test.TestCase):
         g.add_node("b")
         g.add_node("c")
         g.add_edge("b", "c")
-        self.assertEqual(set(['a', 'b']),
+        self.assertEqual({'a', 'b'},
                          set(g.no_predecessors_iter()))
-        self.assertEqual(set(['a', 'c']),
+        self.assertEqual({'a', 'c'},
                          set(g.no_successors_iter()))
 
     def test_directed(self):
@@ -534,8 +532,8 @@ CEO
     def test_dfs_itr(self):
         root = self._make_species()
         things = list([n.item for n in root.dfs_iter(include_self=True)])
-        self.assertEqual(set(['animal', 'reptile', 'mammal', 'horse',
-                              'primate', 'monkey', 'human']), set(things))
+        self.assertEqual({'animal', 'reptile', 'mammal', 'horse',
+                          'primate', 'monkey', 'human'}, set(things))
 
     def test_dfs_itr_left_to_right(self):
         root = self._make_species()
diff -pruN 5.12.0-2/taskflow/tests/unit/test_utils.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_utils.py
--- 5.12.0-2/taskflow/tests/unit/test_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_utils.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -30,7 +28,7 @@ from taskflow.utils import threading_uti
 class CachedPropertyTest(test.TestCase):
     def test_attribute_caching(self):
 
-        class A(object):
+        class A:
             def __init__(self):
                 self.call_counter = 0
 
@@ -46,7 +44,7 @@ class CachedPropertyTest(test.TestCase):
 
     def test_custom_property(self):
 
-        class A(object):
+        class A:
             @misc.cachedproperty('_c')
             def b(self):
                 return 'b'
@@ -60,7 +58,7 @@ class CachedPropertyTest(test.TestCase):
         def try_del(a):
             del a.b
 
-        class A(object):
+        class A:
             @misc.cachedproperty
             def b(self):
                 return 'b'
@@ -75,7 +73,7 @@ class CachedPropertyTest(test.TestCase):
         def try_set(a):
             a.b = 'c'
 
-        class A(object):
+        class A:
             @misc.cachedproperty
             def b(self):
                 return 'b'
@@ -87,7 +85,7 @@ class CachedPropertyTest(test.TestCase):
 
     def test_documented_property(self):
 
-        class A(object):
+        class A:
             @misc.cachedproperty
             def b(self):
                 """I like bees."""
@@ -97,7 +95,7 @@ class CachedPropertyTest(test.TestCase):
 
     def test_undocumented_property(self):
 
-        class A(object):
+        class A:
             @misc.cachedproperty
             def b(self):
                 return 'b'
@@ -107,7 +105,7 @@ class CachedPropertyTest(test.TestCase):
     def test_threaded_access_property(self):
         called = collections.deque()
 
-        class A(object):
+        class A:
             @misc.cachedproperty
             def b(self):
                 called.append(1)
@@ -311,7 +309,7 @@ class TestClamping(test.TestCase):
 class TestIterable(test.TestCase):
     def test_string_types(self):
         self.assertFalse(misc.is_iterable('string'))
-        self.assertFalse(misc.is_iterable(u'string'))
+        self.assertFalse(misc.is_iterable('string'))
 
     def test_list(self):
         self.assertTrue(misc.is_iterable(list()))
@@ -347,7 +345,7 @@ class TestSafeCopyDictRaises(testscenari
     scenarios = [
         ('list', {'original': [1, 2], 'exception': TypeError}),
         ('tuple', {'original': (1, 2), 'exception': TypeError}),
-        ('set', {'original': set([1, 2]), 'exception': TypeError}),
+        ('set', {'original': {1, 2}, 'exception': TypeError}),
     ]
 
     def test_exceptions(self):
diff -pruN 5.12.0-2/taskflow/tests/unit/test_utils_async_utils.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_utils_async_utils.py
--- 5.12.0-2/taskflow/tests/unit/test_utils_async_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_utils_async_utils.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/test_utils_binary.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_utils_binary.py
--- 5.12.0-2/taskflow/tests/unit/test_utils_binary.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_utils_binary.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -38,15 +36,15 @@ class BinaryEncodeTest(test.TestCase):
         self._check(data, data)
 
     def test_simple_text(self):
-        self._check(u'hello', _bytes('hello'))
+        self._check('hello', _bytes('hello'))
 
     def test_unicode_text(self):
-        self._check(u'привет', _bytes('привет'))
+        self._check('привет', _bytes('привет'))
 
     def test_unicode_other_encoding(self):
-        result = misc.binary_encode(u'mañana', 'latin-1')
+        result = misc.binary_encode('mañana', 'latin-1')
         self.assertIsInstance(result, bytes)
-        self.assertEqual(u'mañana'.encode('latin-1'), result)
+        self.assertEqual('mañana'.encode('latin-1'), result)
 
 
 class BinaryDecodeTest(test.TestCase):
@@ -57,24 +55,24 @@ class BinaryDecodeTest(test.TestCase):
         self.assertEqual(expected_result, result)
 
     def test_simple_text(self):
-        data = u'hello'
+        data = 'hello'
         self._check(data, data)
 
     def test_unicode_text(self):
-        data = u'привет'
+        data = 'привет'
         self._check(data, data)
 
     def test_simple_binary(self):
-        self._check(_bytes('hello'), u'hello')
+        self._check(_bytes('hello'), 'hello')
 
     def test_unicode_binary(self):
-        self._check(_bytes('привет'), u'привет')
+        self._check(_bytes('привет'), 'привет')
 
     def test_unicode_other_encoding(self):
-        data = u'mañana'.encode('latin-1')
+        data = 'mañana'.encode('latin-1')
         result = misc.binary_decode(data, 'latin-1')
         self.assertIsInstance(result, str)
-        self.assertEqual(u'mañana', result)
+        self.assertEqual('mañana', result)
 
 
 class DecodeJsonTest(test.TestCase):
@@ -85,11 +83,11 @@ class DecodeJsonTest(test.TestCase):
 
     def test_it_works_with_unicode(self):
         data = _bytes('{"foo": "фуу"}')
-        self.assertEqual({"foo": u'фуу'}, misc.decode_json(data))
+        self.assertEqual({"foo": 'фуу'}, misc.decode_json(data))
 
     def test_handles_invalid_unicode(self):
         self.assertRaises(ValueError, misc.decode_json,
-                          '{"\xf1": 1}'.encode('latin-1'))
+                          b'{"\xf1": 1}')
 
     def test_handles_bad_json(self):
         self.assertRaises(ValueError, misc.decode_json,
diff -pruN 5.12.0-2/taskflow/tests/unit/test_utils_iter_utils.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_utils_iter_utils.py
--- 5.12.0-2/taskflow/tests/unit/test_utils_iter_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_utils_iter_utils.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -140,7 +138,7 @@ class IterUtilsTest(test.TestCase):
         self.assertRaises(ValueError, iter_utils.while_is_not, 2, 'a')
 
     def test_while_is_not(self):
-        class Dummy(object):
+        class Dummy:
             def __init__(self, char):
                 self.char = char
         dummy_list = [Dummy(a)
diff -pruN 5.12.0-2/taskflow/tests/unit/test_utils_kazoo_utils.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_utils_kazoo_utils.py
--- 5.12.0-2/taskflow/tests/unit/test_utils_kazoo_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_utils_kazoo_utils.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) Red Hat
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/test_utils_threading_utils.py 6.0.2-0ubuntu1/taskflow/tests/unit/test_utils_threading_utils.py
--- 5.12.0-2/taskflow/tests/unit/test_utils_threading_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/test_utils_threading_utils.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -53,7 +51,7 @@ class TestThreadBundle(test.TestCase):
     thread_count = 5
 
     def setUp(self):
-        super(TestThreadBundle, self).setUp()
+        super().setUp()
         self.bundle = tu.ThreadBundle()
         self.death = threading.Event()
         self.addCleanup(self.bundle.stop)
diff -pruN 5.12.0-2/taskflow/tests/unit/worker_based/test_creation.py 6.0.2-0ubuntu1/taskflow/tests/unit/worker_based/test_creation.py
--- 5.12.0-2/taskflow/tests/unit/worker_based/test_creation.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/worker_based/test_creation.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/worker_based/test_dispatcher.py 6.0.2-0ubuntu1/taskflow/tests/unit/worker_based/test_dispatcher.py
--- 5.12.0-2/taskflow/tests/unit/worker_based/test_dispatcher.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/worker_based/test_dispatcher.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/worker_based/test_endpoint.py 6.0.2-0ubuntu1/taskflow/tests/unit/worker_based/test_endpoint.py
--- 5.12.0-2/taskflow/tests/unit/worker_based/test_endpoint.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/worker_based/test_endpoint.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -25,7 +23,7 @@ from taskflow.tests import utils
 class Task(task.Task):
 
     def __init__(self, a, *args, **kwargs):
-        super(Task, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
 
     def execute(self, *args, **kwargs):
         pass
@@ -34,7 +32,7 @@ class Task(task.Task):
 class TestEndpoint(test.TestCase):
 
     def setUp(self):
-        super(TestEndpoint, self).setUp()
+        super().setUp()
         self.task_cls = utils.TaskOneReturn
         self.task_uuid = 'task-uuid'
         self.task_args = {'context': 'context'}
diff -pruN 5.12.0-2/taskflow/tests/unit/worker_based/test_executor.py 6.0.2-0ubuntu1/taskflow/tests/unit/worker_based/test_executor.py
--- 5.12.0-2/taskflow/tests/unit/worker_based/test_executor.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/worker_based/test_executor.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -29,7 +27,7 @@ from taskflow.types import failure
 class TestWorkerTaskExecutor(test.MockTestCase):
 
     def setUp(self):
-        super(TestWorkerTaskExecutor, self).setUp()
+        super().setUp()
         self.task = test_utils.DummyTask()
         self.task_uuid = 'task-uuid'
         self.task_args = {'a': 'a'}
diff -pruN 5.12.0-2/taskflow/tests/unit/worker_based/test_message_pump.py 6.0.2-0ubuntu1/taskflow/tests/unit/worker_based/test_message_pump.py
--- 5.12.0-2/taskflow/tests/unit/worker_based/test_message_pump.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/worker_based/test_message_pump.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/worker_based/test_pipeline.py 6.0.2-0ubuntu1/taskflow/tests/unit/worker_based/test_pipeline.py
--- 5.12.0-2/taskflow/tests/unit/worker_based/test_pipeline.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/worker_based/test_pipeline.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/worker_based/test_protocol.py 6.0.2-0ubuntu1/taskflow/tests/unit/worker_based/test_protocol.py
--- 5.12.0-2/taskflow/tests/unit/worker_based/test_protocol.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/worker_based/test_protocol.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -25,7 +23,7 @@ from taskflow.tests import utils
 from taskflow.types import failure
 
 
-class Unserializable(object):
+class Unserializable:
     pass
 
 
@@ -98,7 +96,7 @@ class TestProtocolValidation(test.TestCa
 class TestProtocol(test.TestCase):
 
     def setUp(self):
-        super(TestProtocol, self).setUp()
+        super().setUp()
         self.task = utils.DummyTask()
         self.task_uuid = 'task-uuid'
         self.task_action = 'execute'
diff -pruN 5.12.0-2/taskflow/tests/unit/worker_based/test_proxy.py 6.0.2-0ubuntu1/taskflow/tests/unit/worker_based/test_proxy.py
--- 5.12.0-2/taskflow/tests/unit/worker_based/test_proxy.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/worker_based/test_proxy.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -25,7 +23,7 @@ from taskflow.utils import threading_uti
 class TestProxy(test.MockTestCase):
 
     def setUp(self):
-        super(TestProxy, self).setUp()
+        super().setUp()
         self.topic = 'test-topic'
         self.broker_url = 'test-url'
         self.exchange = 'test-exchange'
@@ -72,7 +70,7 @@ class TestProxy(test.MockTestCase):
         self.resetMasterMock()
 
     def _queue_name(self, topic):
-        return "%s_%s" % (self.exchange, topic)
+        return "{}_{}".format(self.exchange, topic)
 
     def proxy_start_calls(self, calls, exc_type=mock.ANY):
         return [
diff -pruN 5.12.0-2/taskflow/tests/unit/worker_based/test_server.py 6.0.2-0ubuntu1/taskflow/tests/unit/worker_based/test_server.py
--- 5.12.0-2/taskflow/tests/unit/worker_based/test_server.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/worker_based/test_server.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -27,7 +25,7 @@ from taskflow.types import failure
 class TestServer(test.MockTestCase):
 
     def setUp(self):
-        super(TestServer, self).setUp()
+        super().setUp()
         self.server_topic = 'server-topic'
         self.server_exchange = 'server-exchange'
         self.broker_url = 'test-url'
@@ -140,8 +138,8 @@ class TestServer(test.MockTestCase):
         self.assertEqual(
             (self.task.name, self.task.name, 'revert',
              dict(arguments=self.task_args,
-                  failures=dict((i, utils.FailureMatcher(f))
-                                for i, f in failures.items()))),
+                  failures={i: utils.FailureMatcher(f)
+                            for i, f in failures.items()})),
             (task_cls, task_name, action, task_args))
 
     @mock.patch("taskflow.engines.worker_based.server.LOG.critical")
diff -pruN 5.12.0-2/taskflow/tests/unit/worker_based/test_types.py 6.0.2-0ubuntu1/taskflow/tests/unit/worker_based/test_types.py
--- 5.12.0-2/taskflow/tests/unit/worker_based/test_types.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/worker_based/test_types.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/tests/unit/worker_based/test_worker.py 6.0.2-0ubuntu1/taskflow/tests/unit/worker_based/test_worker.py
--- 5.12.0-2/taskflow/tests/unit/worker_based/test_worker.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/unit/worker_based/test_worker.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -27,7 +25,7 @@ from taskflow.tests import utils
 class TestWorker(test.MockTestCase):
 
     def setUp(self):
-        super(TestWorker, self).setUp()
+        super().setUp()
         self.task_cls = utils.DummyTask
         self.task_name = reflection.get_class_name(self.task_cls)
         self.broker_url = 'test-url'
diff -pruN 5.12.0-2/taskflow/tests/utils.py 6.0.2-0ubuntu1/taskflow/tests/utils.py
--- 5.12.0-2/taskflow/tests/utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/tests/utils.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -151,7 +149,7 @@ class GiveBackRevert(task.Task):
             return result + 1
 
 
-class FakeTask(object):
+class FakeTask:
 
     def execute(self, **kwargs):
         pass
@@ -169,16 +167,14 @@ RUNTIME_ERROR_CLASSES = ['RuntimeError',
 
 class ProvidesRequiresTask(task.Task):
     def __init__(self, name, provides, requires, return_tuple=True):
-        super(ProvidesRequiresTask, self).__init__(name=name,
-                                                   provides=provides,
-                                                   requires=requires)
+        super().__init__(name=name, provides=provides, requires=requires)
         self.return_tuple = isinstance(provides, (tuple, list))
 
     def execute(self, *args, **kwargs):
         if self.return_tuple:
             return tuple(range(len(self.provides)))
         else:
-            return dict((k, k) for k in self.provides)
+            return {k: k for k in self.provides}
 
 
 # Used to format the captured values into strings (which are easier to
@@ -197,7 +193,7 @@ class CaptureListener(capturing.CaptureL
         name_postfix, name_key = LOOKUP_NAME_POSTFIX[kind]
         name = details[name_key] + name_postfix
         if 'result' in details:
-            name += ' %s(%s)' % (state, details['result'])
+            name += ' {}({})'.format(state, details['result'])
         else:
             name += " %s" % state
         return name
@@ -387,9 +383,9 @@ class SleepTask(task.Task):
         time.sleep(duration)
 
 
-class EngineTestBase(object):
+class EngineTestBase:
     def setUp(self):
-        super(EngineTestBase, self).setUp()
+        super().setUp()
         self.backend = impl_memory.MemoryBackend(conf={})
 
     def tearDown(self):
@@ -397,7 +393,7 @@ class EngineTestBase(object):
         with contextlib.closing(self.backend) as be:
             with contextlib.closing(be.get_connection()) as conn:
                 conn.clear_all()
-        super(EngineTestBase, self).tearDown()
+        super().tearDown()
 
     def _make_engine(self, flow, **kwargs):
         raise exceptions.NotImplementedError("_make_engine() must be"
@@ -405,7 +401,7 @@ class EngineTestBase(object):
                                              " desired")
 
 
-class FailureMatcher(object):
+class FailureMatcher:
     """Needed for failure objects comparison."""
 
     def __init__(self, failure):
@@ -433,7 +429,7 @@ class OneReturnRetry(retry.AlwaysRevert)
 class ConditionalTask(ProgressingTask):
 
     def execute(self, x, y):
-        super(ConditionalTask, self).execute()
+        super().execute()
         if x != y:
             raise RuntimeError('Woot!')
 
@@ -441,7 +437,7 @@ class ConditionalTask(ProgressingTask):
 class WaitForOneFromTask(ProgressingTask):
 
     def __init__(self, name, wait_for, wait_states, **kwargs):
-        super(WaitForOneFromTask, self).__init__(name, **kwargs)
+        super().__init__(name, **kwargs)
         if isinstance(wait_for, str):
             self.wait_for = [wait_for]
         else:
@@ -458,7 +454,7 @@ class WaitForOneFromTask(ProgressingTask
                                'for %s to change state to %s'
                                % (WAIT_TIMEOUT, self.wait_for,
                                   self.wait_states))
-        return super(WaitForOneFromTask, self).execute()
+        return super().execute()
 
     def callback(self, state, details):
         name = details.get('task_name', None)
diff -pruN 5.12.0-2/taskflow/types/entity.py 6.0.2-0ubuntu1/taskflow/types/entity.py
--- 5.12.0-2/taskflow/types/entity.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/types/entity.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Rackspace Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -15,7 +13,7 @@
 #    under the License.
 
 
-class Entity(object):
+class Entity:
     """Entity object that identifies some resource/item/other.
 
     :ivar kind: **immutable** type/kind that identifies this
diff -pruN 5.12.0-2/taskflow/types/failure.py 6.0.2-0ubuntu1/taskflow/types/failure.py
--- 5.12.0-2/taskflow/types/failure.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/types/failure.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -433,8 +431,8 @@ class Failure():
         if not self._exc_type_names:
             buf.write('Failure: %s' % (self._exception_str))
         else:
-            buf.write('Failure: %s: %s' % (self._exc_type_names[0],
-                                           self._exception_str))
+            buf.write('Failure: {}: {}'.format(self._exc_type_names[0],
+                                               self._exception_str))
         if traceback:
             if self._traceback_str is not None:
                 traceback_str = self._traceback_str.rstrip()
@@ -452,8 +450,7 @@ class Failure():
 
     def __iter__(self):
         """Iterate over exception type names."""
-        for et in self._exc_type_names:
-            yield et
+        yield from self._exc_type_names
 
     def __getstate__(self):
         dct = self.to_dict()
diff -pruN 5.12.0-2/taskflow/types/graph.py 6.0.2-0ubuntu1/taskflow/types/graph.py
--- 5.12.0-2/taskflow/types/graph.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/types/graph.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -31,15 +29,15 @@ def _common_format(g, edge_notation):
     lines.append("Nodes: %s" % g.number_of_nodes())
     for n, n_data in g.nodes(data=True):
         if n_data:
-            lines.append("  - %s (%s)" % (n, n_data))
+            lines.append("  - {} ({})".format(n, n_data))
         else:
             lines.append("  - %s" % n)
     lines.append("Edges: %s" % g.number_of_edges())
     for (u, v, e_data) in g.edges(data=True):
         if e_data:
-            lines.append("  %s %s %s (%s)" % (u, edge_notation, v, e_data))
+            lines.append("  {} {} {} ({})".format(u, edge_notation, v, e_data))
         else:
-            lines.append("  %s %s %s" % (u, edge_notation, v))
+            lines.append("  {} {} {}".format(u, edge_notation, v))
     return lines
 
 
@@ -47,8 +45,7 @@ class Graph(nx.Graph):
     """A graph subclass with useful utility functions."""
 
     def __init__(self, incoming_graph_data=None, name=''):
-        super(Graph, self).__init__(incoming_graph_data=incoming_graph_data,
-                                    name=name)
+        super().__init__(incoming_graph_data=incoming_graph_data, name=name)
         self.frozen = False
 
     def freeze(self):
@@ -68,14 +65,14 @@ class Graph(nx.Graph):
     def add_edge(self, u, v, attr_dict=None, **attr):
         """Add an edge between u and v."""
         if attr_dict is not None:
-            return super(Graph, self).add_edge(u, v, **attr_dict)
-        return super(Graph, self).add_edge(u, v, **attr)
+            return super().add_edge(u, v, **attr_dict)
+        return super().add_edge(u, v, **attr)
 
     def add_node(self, n, attr_dict=None, **attr):
         """Add a single node n and update node attributes."""
         if attr_dict is not None:
-            return super(Graph, self).add_node(n, **attr_dict)
-        return super(Graph, self).add_node(n, **attr)
+            return super().add_node(n, **attr_dict)
+        return super().add_node(n, **attr)
 
     def fresh_copy(self):
         """Return a fresh copy graph with the same data structure.
@@ -91,8 +88,7 @@ class DiGraph(nx.DiGraph):
     """A directed graph subclass with useful utility functions."""
 
     def __init__(self, incoming_graph_data=None, name=''):
-        super(DiGraph, self).__init__(incoming_graph_data=incoming_graph_data,
-                                      name=name)
+        super().__init__(incoming_graph_data=incoming_graph_data, name=name)
         self.frozen = False
 
     def freeze(self):
@@ -165,7 +161,7 @@ class DiGraph(nx.DiGraph):
         NOTE(harlowja): predecessor cycles (if they exist) will not be iterated
         over more than once (this prevents infinite iteration).
         """
-        visited = set([n])
+        visited = {n}
         queue = collections.deque(self.predecessors(n))
         while queue:
             pred = queue.popleft()
@@ -179,14 +175,14 @@ class DiGraph(nx.DiGraph):
     def add_edge(self, u, v, attr_dict=None, **attr):
         """Add an edge between u and v."""
         if attr_dict is not None:
-            return super(DiGraph, self).add_edge(u, v, **attr_dict)
-        return super(DiGraph, self).add_edge(u, v, **attr)
+            return super().add_edge(u, v, **attr_dict)
+        return super().add_edge(u, v, **attr)
 
     def add_node(self, n, attr_dict=None, **attr):
         """Add a single node n and update node attributes."""
         if attr_dict is not None:
-            return super(DiGraph, self).add_node(n, **attr_dict)
-        return super(DiGraph, self).add_node(n, **attr)
+            return super().add_node(n, **attr_dict)
+        return super().add_node(n, **attr)
 
     def fresh_copy(self):
         """Return a fresh copy graph with the same data structure.
diff -pruN 5.12.0-2/taskflow/types/latch.py 6.0.2-0ubuntu1/taskflow/types/latch.py
--- 5.12.0-2/taskflow/types/latch.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/types/latch.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -19,7 +17,7 @@ import threading
 from oslo_utils import timeutils
 
 
-class Latch(object):
+class Latch:
     """A class that ensures N-arrivals occur before unblocking.
 
     TODO(harlowja): replace with http://bugs.python.org/issue8777 when we no
diff -pruN 5.12.0-2/taskflow/types/notifier.py 6.0.2-0ubuntu1/taskflow/types/notifier.py
--- 5.12.0-2/taskflow/types/notifier.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/types/notifier.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -24,7 +22,7 @@ from oslo_utils import reflection
 LOG = logging.getLogger(__name__)
 
 
-class Listener(object):
+class Listener:
     """Immutable helper that represents a notification listener/target."""
 
     def __init__(self, callback, args=None, kwargs=None, details_filter=None):
@@ -89,7 +87,7 @@ class Listener(object):
         self._callback(event_type, *self._args, **kwargs)
 
     def __repr__(self):
-        repr_msg = "%s object at 0x%x calling into '%r'" % (
+        repr_msg = "{} object at 0x{:x} calling into '{!r}'".format(
             reflection.get_class_name(self, fully_qualified=False),
             id(self), self._callback)
         if self._details_filter is not None:
@@ -126,7 +124,7 @@ class Listener(object):
         return not self.__eq__(other)
 
 
-class Notifier(object):
+class Notifier:
     """A notification (`pub/sub`_ *like*) helper class.
 
     It is intended to be used to subscribe to notifications of events
@@ -151,7 +149,7 @@ class Notifier(object):
     ANY = '*'
 
     #: Events which can *not* be used to trigger notifications
-    _DISALLOWED_NOTIFICATION_EVENTS = set([ANY])
+    _DISALLOWED_NOTIFICATION_EVENTS = {ANY}
 
     def __init__(self):
         self._topics = collections.defaultdict(list)
@@ -321,7 +319,7 @@ class RestrictedNotifier(Notifier):
     """
 
     def __init__(self, watchable_events, allow_any=True):
-        super(RestrictedNotifier, self).__init__()
+        super().__init__()
         self._watchable_events = frozenset(watchable_events)
         self._allow_any = allow_any
 
@@ -332,8 +330,7 @@ class RestrictedNotifier(Notifier):
         meta-type is not a specific event but is a capture-all that does not
         imply the same meaning as specific event types.
         """
-        for event_type in self._watchable_events:
-            yield event_type
+        yield from self._watchable_events
 
     def can_be_registered(self, event_type):
         """Checks if the event can be registered/subscribed to.
diff -pruN 5.12.0-2/taskflow/types/sets.py 6.0.2-0ubuntu1/taskflow/types/sets.py
--- 5.12.0-2/taskflow/types/sets.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/types/sets.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -57,8 +55,7 @@ class OrderedSet(abc.Set, abc.Hashable):
         return len(self._data)
 
     def __iter__(self):
-        for value in self._data.keys():
-            yield value
+        yield from self._data.keys()
 
     def __setstate__(self, items):
         self.__init__(iterable=iter(items))
@@ -67,7 +64,7 @@ class OrderedSet(abc.Set, abc.Hashable):
         return tuple(self)
 
     def __repr__(self):
-        return "%s(%s)" % (type(self).__name__, list(self))
+        return "{}({})".format(type(self).__name__, list(self))
 
     def copy(self):
         """Return a shallow copy of a set."""
diff -pruN 5.12.0-2/taskflow/types/timing.py 6.0.2-0ubuntu1/taskflow/types/timing.py
--- 5.12.0-2/taskflow/types/timing.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/types/timing.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -17,7 +15,7 @@
 import threading
 
 
-class Timeout(object):
+class Timeout:
     """An object which represents a timeout.
 
     This object has the ability to be interrupted before the actual timeout
diff -pruN 5.12.0-2/taskflow/types/tree.py 6.0.2-0ubuntu1/taskflow/types/tree.py
--- 5.12.0-2/taskflow/types/tree.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/types/tree.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -28,10 +26,10 @@ class FrozenNode(Exception):
     """Exception raised when a frozen node is modified."""
 
     def __init__(self):
-        super(FrozenNode, self).__init__("Frozen node(s) can't be modified")
+        super().__init__("Frozen node(s) can't be modified")
 
 
-class _DFSIter(object):
+class _DFSIter:
     """Depth first iterator (non-recursive) over the child nodes."""
 
     def __init__(self, root, include_self=False, right_to_left=True):
@@ -60,7 +58,7 @@ class _DFSIter(object):
                 stack.extend(iter(node))
 
 
-class _BFSIter(object):
+class _BFSIter:
     """Breadth first iterator (non-recursive) over the child nodes."""
 
     def __init__(self, root, include_self=False, right_to_left=False):
@@ -89,7 +87,7 @@ class _BFSIter(object):
                 q.extend(node.reverse_iter())
 
 
-class Node(object):
+class Node:
     """A n-ary node class that can be used to create tree structures."""
 
     #: Default string prefix used in :py:meth:`.pformat`.
@@ -358,13 +356,11 @@ class Node(object):
 
     def __iter__(self):
         """Iterates over the direct children of this node (right->left)."""
-        for c in self._children:
-            yield c
+        yield from self._children
 
     def reverse_iter(self):
         """Iterates over the direct children of this node (left->right)."""
-        for c in reversed(self._children):
-            yield c
+        yield from reversed(self._children)
 
     def index(self, item):
         """Finds the child index of a given item, searches in added order."""
diff -pruN 5.12.0-2/taskflow/utils/async_utils.py 6.0.2-0ubuntu1/taskflow/utils/async_utils.py
--- 5.12.0-2/taskflow/utils/async_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/utils/async_utils.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/utils/banner.py 6.0.2-0ubuntu1/taskflow/utils/banner.py
--- 5.12.0-2/taskflow/utils/banner.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/utils/banner.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2016 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -74,12 +72,13 @@ def make_banner(what, chapters):
             section_names = sorted(chapter_contents.keys())
             for j, section_name in enumerate(section_names):
                 if j + 1 < len(section_names):
-                    buf.write_nl("  %s => %s"
-                                 % (section_name,
-                                    chapter_contents[section_name]))
+                    buf.write_nl("  {} => {}".format(
+                        section_name,
+                        chapter_contents[section_name]))
                 else:
-                    buf.write("  %s => %s" % (section_name,
-                                              chapter_contents[section_name]))
+                    buf.write("  {} => {}".format(
+                        section_name,
+                        chapter_contents[section_name]))
         elif isinstance(chapter_contents, (list, tuple, set)):
             if isinstance(chapter_contents, set):
                 sections = sorted(chapter_contents)
@@ -87,9 +86,9 @@ def make_banner(what, chapters):
                 sections = chapter_contents
             for j, section in enumerate(sections):
                 if j + 1 < len(sections):
-                    buf.write_nl("  %s. %s" % (j + 1, section))
+                    buf.write_nl("  {}. {}".format(j + 1, section))
                 else:
-                    buf.write("  %s. %s" % (j + 1, section))
+                    buf.write("  {}. {}".format(j + 1, section))
         else:
             raise TypeError("Unsupported chapter contents"
                             " type: one of dict, list, tuple, set expected"
diff -pruN 5.12.0-2/taskflow/utils/eventlet_utils.py 6.0.2-0ubuntu1/taskflow/utils/eventlet_utils.py
--- 5.12.0-2/taskflow/utils/eventlet_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/utils/eventlet_utils.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/utils/iter_utils.py 6.0.2-0ubuntu1/taskflow/utils/iter_utils.py
--- 5.12.0-2/taskflow/utils/iter_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/utils/iter_utils.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/utils/kazoo_utils.py 6.0.2-0ubuntu1/taskflow/utils/kazoo_utils.py
--- 5.12.0-2/taskflow/utils/kazoo_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/utils/kazoo_utils.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -41,7 +39,7 @@ def _parse_hosts(hosts):
     if isinstance(hosts, (dict)):
         host_ports = []
         for (k, v) in hosts.items():
-            host_ports.append("%s:%s" % (k, v))
+            host_ports.append("{}:{}".format(k, v))
         hosts = host_ports
     if isinstance(hosts, (list, set, tuple)):
         return ",".join([str(h) for h in hosts])
@@ -65,7 +63,7 @@ def prettify_failures(failures, limit=-1
             pass
         pretty_op += "(%s)" % (", ".join(selected_attrs))
         pretty_cause = reflection.get_class_name(r, fully_qualified=False)
-        prettier.append("%s@%s" % (pretty_cause, pretty_op))
+        prettier.append("{}@{}".format(pretty_cause, pretty_op))
     if limit <= 0 or len(prettier) <= limit:
         return ", ".join(prettier)
     else:
@@ -78,7 +76,7 @@ class KazooTransactionException(k_exc.Ka
     """Exception raised when a checked commit fails."""
 
     def __init__(self, message, failures):
-        super(KazooTransactionException, self).__init__(message)
+        super().__init__(message)
         self._failures = tuple(failures)
 
     @property
@@ -134,8 +132,8 @@ def check_compatible(client, min_version
     """
     server_version = None
     if min_version:
-        server_version = tuple((int(a) for a in client.server_version()))
-        min_version = tuple((int(a) for a in min_version))
+        server_version = tuple(int(a) for a in client.server_version())
+        min_version = tuple(int(a) for a in min_version)
         if server_version < min_version:
             pretty_server_version = ".".join([str(a) for a in server_version])
             min_version = ".".join([str(a) for a in min_version])
@@ -145,8 +143,8 @@ def check_compatible(client, min_version
                                                          min_version))
     if max_version:
         if server_version is None:
-            server_version = tuple((int(a) for a in client.server_version()))
-        max_version = tuple((int(a) for a in max_version))
+            server_version = tuple(int(a) for a in client.server_version())
+        max_version = tuple(int(a) for a in max_version)
         if server_version > max_version:
             pretty_server_version = ".".join([str(a) for a in server_version])
             max_version = ".".join([str(a) for a in max_version])
diff -pruN 5.12.0-2/taskflow/utils/kombu_utils.py 6.0.2-0ubuntu1/taskflow/utils/kombu_utils.py
--- 5.12.0-2/taskflow/utils/kombu_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/utils/kombu_utils.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -22,7 +20,7 @@ _MSG_PROPERTIES = tuple([
 ])
 
 
-class DelayedPretty(object):
+class DelayedPretty:
     """Wraps a message and delays prettifying it until requested.
 
     TODO(harlowja): remove this when https://github.com/celery/kombu/pull/454/
@@ -70,7 +68,7 @@ def _prettify_message(message):
                 properties[segments[-1]] = value
     if message.body is not None:
         properties['body_length'] = len(message.body)
-    return "%(delivery_tag)s: %(properties)s" % {
-        'delivery_tag': message.delivery_tag,
-        'properties': properties,
-    }
+    return "{delivery_tag}: {properties}".format(
+        delivery_tag=message.delivery_tag,
+        properties=properties,
+    )
diff -pruN 5.12.0-2/taskflow/utils/misc.py 6.0.2-0ubuntu1/taskflow/utils/misc.py
--- 5.12.0-2/taskflow/utils/misc.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/utils/misc.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #    Copyright (C) 2013 Rackspace Hosting All Rights Reserved.
 #
@@ -55,7 +53,7 @@ class StrEnum(str, enum.Enum):
             if not isinstance(a, str):
                 raise TypeError("Enumeration '%s' (%s) is not"
                                 " a string" % (a, type(a).__name__))
-        return super(StrEnum, cls).__new__(cls, *args, **kwargs)
+        return super().__new__(cls, *args, **kwargs)
 
 
 class StringIO(io.StringIO):
@@ -82,7 +80,7 @@ def get_hostname(unknown_hostname=UNKNOW
             return unknown_hostname
         else:
             return hostname
-    except socket.error:
+    except OSError:
         return unknown_hostname
 
 
@@ -189,7 +187,7 @@ def find_subclasses(locations, base_cls,
             except ValueError:
                 module = importutils.import_module(item)
             else:
-                obj = importutils.import_class('%s.%s' % (pkg, cls))
+                obj = importutils.import_class('{}.{}'.format(pkg, cls))
                 if not reflection.is_subclass(obj, base_cls):
                     raise TypeError("Object '%s' (%s) is not a '%s' subclass"
                                     % (item, type(item), base_cls))
@@ -343,7 +341,7 @@ def decode_json(raw_data, root_types=(di
         return _check_decoded_type(data, root_types=root_types)
 
 
-class cachedproperty(object):
+class cachedproperty:
     """A *thread-safe* descriptor property that is only evaluated once.
 
     This caching descriptor can be placed on instance methods to translate
diff -pruN 5.12.0-2/taskflow/utils/persistence_utils.py 6.0.2-0ubuntu1/taskflow/utils/persistence_utils.py
--- 5.12.0-2/taskflow/utils/persistence_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/utils/persistence_utils.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/utils/redis_utils.py 6.0.2-0ubuntu1/taskflow/utils/redis_utils.py
--- 5.12.0-2/taskflow/utils/redis_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/utils/redis_utils.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -41,7 +39,7 @@ class RedisClient(redis.Redis):
     """
 
     def __init__(self, *args, **kwargs):
-        super(RedisClient, self).__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
         self.closed = False
 
     def close(self):
@@ -71,7 +69,7 @@ class UnknownExpire(enum.IntEnum):
 DOES_NOT_EXPIRE = UnknownExpire.DOES_NOT_EXPIRE
 KEY_NOT_FOUND = UnknownExpire.KEY_NOT_FOUND
 
-_UNKNOWN_EXPIRE_MAPPING = dict((e.value, e) for e in list(UnknownExpire))
+_UNKNOWN_EXPIRE_MAPPING = {e.value: e for e in list(UnknownExpire)}
 
 
 def get_expiry(client, key, prior_version=None):
diff -pruN 5.12.0-2/taskflow/utils/schema_utils.py 6.0.2-0ubuntu1/taskflow/utils/schema_utils.py
--- 5.12.0-2/taskflow/utils/schema_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/utils/schema_utils.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
diff -pruN 5.12.0-2/taskflow/utils/threading_utils.py 6.0.2-0ubuntu1/taskflow/utils/threading_utils.py
--- 5.12.0-2/taskflow/utils/threading_utils.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/utils/threading_utils.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -73,7 +71,7 @@ def no_op(*args, **kwargs):
     """Function that does nothing."""
 
 
-class ThreadBundle(object):
+class ThreadBundle:
     """A group/bundle of threads that start/stop together."""
 
     def __init__(self):
diff -pruN 5.12.0-2/taskflow/version.py 6.0.2-0ubuntu1/taskflow/version.py
--- 5.12.0-2/taskflow/version.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow/version.py	2025-08-25 12:49:32.000000000 +0000
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 #    Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -28,4 +26,4 @@ def version_string_with_package():
     if TASK_PACKAGE is None:
         return version_string()
     else:
-        return "%s-%s" % (version_string(), TASK_PACKAGE)
+        return "{}-{}".format(version_string(), TASK_PACKAGE)
diff -pruN 5.12.0-2/taskflow.egg-info/PKG-INFO 6.0.2-0ubuntu1/taskflow.egg-info/PKG-INFO
--- 5.12.0-2/taskflow.egg-info/PKG-INFO	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow.egg-info/PKG-INFO	2025-08-25 12:49:58.000000000 +0000
@@ -0,0 +1,148 @@
+Metadata-Version: 2.1
+Name: taskflow
+Version: 6.0.2
+Summary: Taskflow structured state management library.
+Home-page: https://docs.openstack.org/taskflow/latest/
+Author: OpenStack
+Author-email: openstack-discuss@lists.openstack.org
+Keywords: reliable,tasks,execution,parallel,dataflow,workflows,distributed
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: OpenStack
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Information Technology
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: POSIX :: Linux
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: System :: Distributed Computing
+Requires-Python: >=3.9
+License-File: LICENSE
+Requires-Dist: pbr>=2.0.0
+Requires-Dist: debtcollector>=1.2.0
+Requires-Dist: futurist>=1.2.0
+Requires-Dist: fasteners>=0.17.3
+Requires-Dist: networkx>=2.1.0
+Requires-Dist: stevedore>=1.20.0
+Requires-Dist: jsonschema>=3.2.0
+Requires-Dist: automaton>=1.9.0
+Requires-Dist: oslo.utils>=3.33.0
+Requires-Dist: oslo.serialization>=2.18.0
+Requires-Dist: tenacity>=6.0.0
+Requires-Dist: cachetools>=2.0.0
+Requires-Dist: pydot>=1.2.4
+Provides-Extra: zookeeper
+Requires-Dist: kazoo>=2.6.0; extra == "zookeeper"
+Provides-Extra: redis
+Requires-Dist: redis>=4.0.0; extra == "redis"
+Provides-Extra: etcd
+Requires-Dist: etcd3gw>=2.0.0; extra == "etcd"
+Provides-Extra: workers
+Requires-Dist: kombu>=4.3.0; extra == "workers"
+Provides-Extra: eventlet
+Requires-Dist: eventlet>=0.18.2; extra == "eventlet"
+Provides-Extra: database
+Requires-Dist: SQLAlchemy>=1.0.10; extra == "database"
+Requires-Dist: alembic>=0.8.10; extra == "database"
+Requires-Dist: SQLAlchemy-Utils>=0.30.11; extra == "database"
+Requires-Dist: PyMySQL>=0.7.6; extra == "database"
+Requires-Dist: psycopg2>=2.8.0; extra == "database"
+Provides-Extra: test
+Requires-Dist: kazoo>=2.6.0; extra == "test"
+Requires-Dist: redis>=4.0.0; extra == "test"
+Requires-Dist: etcd3gw>=2.0.0; extra == "test"
+Requires-Dist: kombu>=4.3.0; extra == "test"
+Requires-Dist: eventlet>=0.18.2; extra == "test"
+Requires-Dist: SQLAlchemy>=1.0.10; extra == "test"
+Requires-Dist: alembic>=0.8.10; extra == "test"
+Requires-Dist: SQLAlchemy-Utils>=0.30.11; extra == "test"
+Requires-Dist: PyMySQL>=0.7.6; extra == "test"
+Requires-Dist: psycopg2>=2.8.0; extra == "test"
+Requires-Dist: zake>=0.1.6; extra == "test"
+Requires-Dist: pydotplus>=2.0.2; extra == "test"
+Requires-Dist: oslotest>=3.2.0; extra == "test"
+Requires-Dist: testtools>=2.2.0; extra == "test"
+Requires-Dist: testscenarios>=0.4; extra == "test"
+Requires-Dist: stestr>=2.0.0; extra == "test"
+Requires-Dist: pifpaf>=0.10.0; extra == "test"
+
+========================
+Team and repository tags
+========================
+
+.. image:: https://governance.openstack.org/tc/badges/taskflow.svg
+    :target: https://governance.openstack.org/tc/reference/tags/index.html
+
+.. Change things from this point on
+
+TaskFlow
+========
+
+.. image:: https://img.shields.io/pypi/v/taskflow.svg
+    :target: https://pypi.org/project/taskflow/
+    :alt: Latest Version
+
+A library to do [jobs, tasks, flows] in a highly available, easy to understand
+and declarative manner (and more!) to be used with OpenStack and other
+projects.
+
+* Free software: Apache license
+* Documentation: https://docs.openstack.org/taskflow/latest/
+* Source: https://opendev.org/openstack/taskflow
+* Bugs: https://bugs.launchpad.net/taskflow/
+* Release notes: https://docs.openstack.org/releasenotes/taskflow/
+
+Join us
+-------
+
+- https://launchpad.net/taskflow
+
+Testing and requirements
+------------------------
+
+Requirements
+~~~~~~~~~~~~
+
+Because this project has many optional (pluggable) parts like persistence
+backends and engines, we decided to split our requirements into two
+parts: - things that are absolutely required (you can't use the project
+without them) are put into ``requirements.txt``. The requirements
+that are required by some optional part of this project (you can use the
+project without them) are put into our ``test-requirements.txt`` file (so
+that we can still test the optional functionality works as expected). If
+you want to use the feature in question (`eventlet`_ or the worker based engine
+that uses `kombu`_ or the `sqlalchemy`_ persistence backend or jobboards which
+have an implementation built using `kazoo`_ ...), you should add
+that requirement(s) to your project or environment.
+
+Tox.ini
+~~~~~~~
+
+Our ``tox.ini`` file describes several test environments that allow to test
+TaskFlow with different python versions and sets of requirements installed.
+Please refer to the `tox`_ documentation to understand how to make these test
+environments work for you.
+
+Developer documentation
+-----------------------
+
+We also have sphinx documentation in ``docs/source``.
+
+*To build it, run:*
+
+::
+
+    $ python setup.py build_sphinx
+
+.. _kazoo: https://kazoo.readthedocs.io/en/latest/
+.. _sqlalchemy: https://www.sqlalchemy.org/
+.. _kombu: https://kombu.readthedocs.io/en/latest/
+.. _eventlet: http://eventlet.net/
+.. _tox: https://tox.testrun.org/
+
diff -pruN 5.12.0-2/taskflow.egg-info/SOURCES.txt 6.0.2-0ubuntu1/taskflow.egg-info/SOURCES.txt
--- 5.12.0-2/taskflow.egg-info/SOURCES.txt	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow.egg-info/SOURCES.txt	2025-08-25 12:49:58.000000000 +0000
@@ -0,0 +1,356 @@
+.coveragerc
+.mailmap
+.pre-commit-config.yaml
+.pylintrc
+.stestr.conf
+.zuul.yaml
+AUTHORS
+CONTRIBUTING.rst
+ChangeLog
+LICENSE
+README.rst
+bindep.txt
+pyproject.toml
+requirements.txt
+run_tests.sh
+setup-etcd-env.sh
+setup.cfg
+setup.py
+test-requirements.txt
+tox.ini
+doc/requirements.txt
+doc/diagrams/area_of_influence.graffle.tgz
+doc/diagrams/core.graffle.tgz
+doc/diagrams/jobboard.graffle.tgz
+doc/diagrams/tasks.graffle.tgz
+doc/diagrams/worker-engine.graffle.tgz
+doc/source/conf.py
+doc/source/index.rst
+doc/source/templates/layout.html
+doc/source/user/arguments_and_results.rst
+doc/source/user/atoms.rst
+doc/source/user/conductors.rst
+doc/source/user/engines.rst
+doc/source/user/examples.rst
+doc/source/user/exceptions.rst
+doc/source/user/history.rst
+doc/source/user/index.rst
+doc/source/user/inputs_and_outputs.rst
+doc/source/user/jobs.rst
+doc/source/user/notifications.rst
+doc/source/user/patterns.rst
+doc/source/user/persistence.rst
+doc/source/user/resumption.rst
+doc/source/user/shelf.rst
+doc/source/user/states.rst
+doc/source/user/types.rst
+doc/source/user/utils.rst
+doc/source/user/workers.rst
+doc/source/user/img/area_of_influence.svg
+doc/source/user/img/conductor.png
+doc/source/user/img/conductor_cycle.png
+doc/source/user/img/distributed_flow_rpc.png
+doc/source/user/img/engine_states.svg
+doc/source/user/img/flow_states.svg
+doc/source/user/img/job_states.svg
+doc/source/user/img/jobboard.png
+doc/source/user/img/mandelbrot.png
+doc/source/user/img/retry_states.svg
+doc/source/user/img/task_states.svg
+doc/source/user/img/tasks.png
+doc/source/user/img/wbe_request_states.svg
+doc/source/user/img/worker-engine.svg
+playbooks/tests/functional/Debian.yaml
+playbooks/tests/functional/RedHat.yaml
+playbooks/tests/functional/pre.yml
+releasenotes/notes/.placeholder
+releasenotes/notes/add-sentinel-redis-support-9fd16e2a5dd5c0c9.yaml
+releasenotes/notes/bug-2056656-871b67ddbc8cfc92.yaml
+releasenotes/notes/deprecate-eventlet-df4a34a7d56acc47.yaml
+releasenotes/notes/disable-process_executor-python-312-d1074c816bc8303e.yaml
+releasenotes/notes/drop-python-2-7-73d3113c69d724d6.yaml
+releasenotes/notes/etcd-jobboard-backend-8a9fea2238fb0f12.yaml
+releasenotes/notes/fix-endless-loop-on-storage-error-dd4467f0bbc66abf.yaml
+releasenotes/notes/fix-endless-loop-on-storage-failures-b98b30f0c34d25e1.yaml
+releasenotes/notes/fix-revert-all-revert-a0310cd7beaa7409.yaml
+releasenotes/notes/fix-storage-failure-handling-5c115d92daa0eb82.yaml
+releasenotes/notes/fix-zookeeper-option-parsing-f9d37fbc39af47f4.yaml
+releasenotes/notes/mask-keys-74b9bb5c420d8091.yaml
+releasenotes/notes/redis-username-df0eb33869db09a2.yaml
+releasenotes/notes/remove-process_executor-f59d40a5dd287cd7.yaml
+releasenotes/notes/remove-py38-15af791146f479e1.yaml
+releasenotes/notes/remove-strict-redis-f2a5a924b314de41.yaml
+releasenotes/notes/sentinel-fallbacks-6fe2ab0d68959cdf.yaml
+releasenotes/notes/sentinel-ssl-399c56ed7067d282.yaml
+releasenotes/notes/sentinel-use-redis-creds-63f58b12ad46a2b5.yaml
+releasenotes/notes/zookeeper-ssl-support-b9abf24a39096b62.yaml
+releasenotes/source/2023.1.rst
+releasenotes/source/2023.2.rst
+releasenotes/source/2024.1.rst
+releasenotes/source/2024.2.rst
+releasenotes/source/2025.1.rst
+releasenotes/source/conf.py
+releasenotes/source/index.rst
+releasenotes/source/ocata.rst
+releasenotes/source/pike.rst
+releasenotes/source/queens.rst
+releasenotes/source/rocky.rst
+releasenotes/source/stein.rst
+releasenotes/source/train.rst
+releasenotes/source/unreleased.rst
+releasenotes/source/ussuri.rst
+releasenotes/source/victoria.rst
+releasenotes/source/_static/.placeholder
+releasenotes/source/_templates/.placeholder
+taskflow/__init__.py
+taskflow/atom.py
+taskflow/deciders.py
+taskflow/exceptions.py
+taskflow/flow.py
+taskflow/formatters.py
+taskflow/logging.py
+taskflow/retry.py
+taskflow/states.py
+taskflow/storage.py
+taskflow/task.py
+taskflow/test.py
+taskflow/version.py
+taskflow.egg-info/PKG-INFO
+taskflow.egg-info/SOURCES.txt
+taskflow.egg-info/dependency_links.txt
+taskflow.egg-info/entry_points.txt
+taskflow.egg-info/not-zip-safe
+taskflow.egg-info/pbr.json
+taskflow.egg-info/requires.txt
+taskflow.egg-info/top_level.txt
+taskflow/conductors/__init__.py
+taskflow/conductors/base.py
+taskflow/conductors/backends/__init__.py
+taskflow/conductors/backends/impl_blocking.py
+taskflow/conductors/backends/impl_executor.py
+taskflow/conductors/backends/impl_nonblocking.py
+taskflow/contrib/__init__.py
+taskflow/engines/__init__.py
+taskflow/engines/base.py
+taskflow/engines/helpers.py
+taskflow/engines/action_engine/__init__.py
+taskflow/engines/action_engine/builder.py
+taskflow/engines/action_engine/compiler.py
+taskflow/engines/action_engine/completer.py
+taskflow/engines/action_engine/deciders.py
+taskflow/engines/action_engine/engine.py
+taskflow/engines/action_engine/executor.py
+taskflow/engines/action_engine/runtime.py
+taskflow/engines/action_engine/scheduler.py
+taskflow/engines/action_engine/scopes.py
+taskflow/engines/action_engine/selector.py
+taskflow/engines/action_engine/traversal.py
+taskflow/engines/action_engine/actions/__init__.py
+taskflow/engines/action_engine/actions/base.py
+taskflow/engines/action_engine/actions/retry.py
+taskflow/engines/action_engine/actions/task.py
+taskflow/engines/worker_based/__init__.py
+taskflow/engines/worker_based/dispatcher.py
+taskflow/engines/worker_based/endpoint.py
+taskflow/engines/worker_based/engine.py
+taskflow/engines/worker_based/executor.py
+taskflow/engines/worker_based/protocol.py
+taskflow/engines/worker_based/proxy.py
+taskflow/engines/worker_based/server.py
+taskflow/engines/worker_based/types.py
+taskflow/engines/worker_based/worker.py
+taskflow/examples/99_bottles.py
+taskflow/examples/alphabet_soup.py
+taskflow/examples/build_a_car.py
+taskflow/examples/buildsystem.py
+taskflow/examples/calculate_in_parallel.py
+taskflow/examples/calculate_linear.py
+taskflow/examples/create_parallel_volume.py
+taskflow/examples/delayed_return.py
+taskflow/examples/distance_calculator.py
+taskflow/examples/dump_memory_backend.py
+taskflow/examples/echo_listener.py
+taskflow/examples/example_utils.py
+taskflow/examples/fake_billing.py
+taskflow/examples/graph_flow.py
+taskflow/examples/hello_world.py
+taskflow/examples/jobboard_produce_consume_colors.py
+taskflow/examples/parallel_table_multiply.py
+taskflow/examples/persistence_example.py
+taskflow/examples/pseudo_scoping.out.txt
+taskflow/examples/pseudo_scoping.py
+taskflow/examples/resume_from_backend.out.txt
+taskflow/examples/resume_from_backend.py
+taskflow/examples/resume_many_flows.out.txt
+taskflow/examples/resume_many_flows.py
+taskflow/examples/resume_vm_boot.py
+taskflow/examples/resume_volume_create.py
+taskflow/examples/retry_flow.out.txt
+taskflow/examples/retry_flow.py
+taskflow/examples/reverting_linear.out.txt
+taskflow/examples/reverting_linear.py
+taskflow/examples/run_by_iter.out.txt
+taskflow/examples/run_by_iter.py
+taskflow/examples/run_by_iter_enumerate.out.txt
+taskflow/examples/run_by_iter_enumerate.py
+taskflow/examples/share_engine_thread.py
+taskflow/examples/simple_linear.out.txt
+taskflow/examples/simple_linear.py
+taskflow/examples/simple_linear_listening.out.txt
+taskflow/examples/simple_linear_listening.py
+taskflow/examples/simple_linear_pass.out.txt
+taskflow/examples/simple_linear_pass.py
+taskflow/examples/simple_map_reduce.py
+taskflow/examples/switch_graph_flow.py
+taskflow/examples/timing_listener.py
+taskflow/examples/tox_conductor.py
+taskflow/examples/wbe_event_sender.py
+taskflow/examples/wbe_mandelbrot.out.txt
+taskflow/examples/wbe_mandelbrot.py
+taskflow/examples/wbe_simple_linear.out.txt
+taskflow/examples/wbe_simple_linear.py
+taskflow/examples/wrapped_exception.py
+taskflow/examples/resume_many_flows/my_flows.py
+taskflow/examples/resume_many_flows/resume_all.py
+taskflow/examples/resume_many_flows/run_flow.py
+taskflow/jobs/__init__.py
+taskflow/jobs/base.py
+taskflow/jobs/backends/__init__.py
+taskflow/jobs/backends/impl_etcd.py
+taskflow/jobs/backends/impl_redis.py
+taskflow/jobs/backends/impl_zookeeper.py
+taskflow/listeners/__init__.py
+taskflow/listeners/base.py
+taskflow/listeners/capturing.py
+taskflow/listeners/claims.py
+taskflow/listeners/logging.py
+taskflow/listeners/printing.py
+taskflow/listeners/timing.py
+taskflow/patterns/__init__.py
+taskflow/patterns/graph_flow.py
+taskflow/patterns/linear_flow.py
+taskflow/patterns/unordered_flow.py
+taskflow/persistence/__init__.py
+taskflow/persistence/base.py
+taskflow/persistence/models.py
+taskflow/persistence/path_based.py
+taskflow/persistence/backends/__init__.py
+taskflow/persistence/backends/impl_dir.py
+taskflow/persistence/backends/impl_memory.py
+taskflow/persistence/backends/impl_sqlalchemy.py
+taskflow/persistence/backends/impl_zookeeper.py
+taskflow/persistence/backends/sqlalchemy/__init__.py
+taskflow/persistence/backends/sqlalchemy/migration.py
+taskflow/persistence/backends/sqlalchemy/tables.py
+taskflow/persistence/backends/sqlalchemy/alembic/README
+taskflow/persistence/backends/sqlalchemy/alembic/alembic.ini
+taskflow/persistence/backends/sqlalchemy/alembic/env.py
+taskflow/persistence/backends/sqlalchemy/alembic/script.py.mako
+taskflow/persistence/backends/sqlalchemy/alembic/versions/00af93df9d77_add_unique_into_all_indexes.py
+taskflow/persistence/backends/sqlalchemy/alembic/versions/0bc3e1a3c135_set_result_meduimtext_type.py
+taskflow/persistence/backends/sqlalchemy/alembic/versions/14b227d79a87_add_intention_column.py
+taskflow/persistence/backends/sqlalchemy/alembic/versions/1c783c0c2875_replace_exception_an.py
+taskflow/persistence/backends/sqlalchemy/alembic/versions/1cea328f0f65_initial_logbook_deta.py
+taskflow/persistence/backends/sqlalchemy/alembic/versions/2ad4984f2864_switch_postgres_to_json_native.py
+taskflow/persistence/backends/sqlalchemy/alembic/versions/3162c0f3f8e4_add_revert_results_and_revert_failure_.py
+taskflow/persistence/backends/sqlalchemy/alembic/versions/40fc8c914bd2_fix_atomdetails_failure_size.py
+taskflow/persistence/backends/sqlalchemy/alembic/versions/589dccdf2b6e_rename_taskdetails_to_atomdetails.py
+taskflow/persistence/backends/sqlalchemy/alembic/versions/6df9422fcb43_fix_flowdetails_meta_size.py
+taskflow/persistence/backends/sqlalchemy/alembic/versions/84d6e888850_add_task_detail_type.py
+taskflow/persistence/backends/sqlalchemy/alembic/versions/README
+taskflow/tests/__init__.py
+taskflow/tests/fixtures.py
+taskflow/tests/test_examples.py
+taskflow/tests/utils.py
+taskflow/tests/unit/__init__.py
+taskflow/tests/unit/test_arguments_passing.py
+taskflow/tests/unit/test_check_transition.py
+taskflow/tests/unit/test_conductors.py
+taskflow/tests/unit/test_deciders.py
+taskflow/tests/unit/test_engine_helpers.py
+taskflow/tests/unit/test_engines.py
+taskflow/tests/unit/test_exceptions.py
+taskflow/tests/unit/test_failure.py
+taskflow/tests/unit/test_flow_dependencies.py
+taskflow/tests/unit/test_formatters.py
+taskflow/tests/unit/test_functor_task.py
+taskflow/tests/unit/test_listeners.py
+taskflow/tests/unit/test_mapfunctor_task.py
+taskflow/tests/unit/test_notifier.py
+taskflow/tests/unit/test_progress.py
+taskflow/tests/unit/test_reducefunctor_task.py
+taskflow/tests/unit/test_retries.py
+taskflow/tests/unit/test_states.py
+taskflow/tests/unit/test_storage.py
+taskflow/tests/unit/test_suspend.py
+taskflow/tests/unit/test_task.py
+taskflow/tests/unit/test_types.py
+taskflow/tests/unit/test_utils.py
+taskflow/tests/unit/test_utils_async_utils.py
+taskflow/tests/unit/test_utils_binary.py
+taskflow/tests/unit/test_utils_iter_utils.py
+taskflow/tests/unit/test_utils_kazoo_utils.py
+taskflow/tests/unit/test_utils_threading_utils.py
+taskflow/tests/unit/action_engine/__init__.py
+taskflow/tests/unit/action_engine/test_builder.py
+taskflow/tests/unit/action_engine/test_compile.py
+taskflow/tests/unit/action_engine/test_creation.py
+taskflow/tests/unit/action_engine/test_scoping.py
+taskflow/tests/unit/jobs/__init__.py
+taskflow/tests/unit/jobs/base.py
+taskflow/tests/unit/jobs/test_entrypoint.py
+taskflow/tests/unit/jobs/test_etcd_job.py
+taskflow/tests/unit/jobs/test_redis_job.py
+taskflow/tests/unit/jobs/test_zk_job.py
+taskflow/tests/unit/patterns/__init__.py
+taskflow/tests/unit/patterns/test_graph_flow.py
+taskflow/tests/unit/patterns/test_linear_flow.py
+taskflow/tests/unit/patterns/test_unordered_flow.py
+taskflow/tests/unit/persistence/__init__.py
+taskflow/tests/unit/persistence/base.py
+taskflow/tests/unit/persistence/test_dir_persistence.py
+taskflow/tests/unit/persistence/test_memory_persistence.py
+taskflow/tests/unit/persistence/test_sql_persistence.py
+taskflow/tests/unit/persistence/test_zk_persistence.py
+taskflow/tests/unit/worker_based/__init__.py
+taskflow/tests/unit/worker_based/test_creation.py
+taskflow/tests/unit/worker_based/test_dispatcher.py
+taskflow/tests/unit/worker_based/test_endpoint.py
+taskflow/tests/unit/worker_based/test_executor.py
+taskflow/tests/unit/worker_based/test_message_pump.py
+taskflow/tests/unit/worker_based/test_pipeline.py
+taskflow/tests/unit/worker_based/test_protocol.py
+taskflow/tests/unit/worker_based/test_proxy.py
+taskflow/tests/unit/worker_based/test_server.py
+taskflow/tests/unit/worker_based/test_types.py
+taskflow/tests/unit/worker_based/test_worker.py
+taskflow/types/__init__.py
+taskflow/types/entity.py
+taskflow/types/failure.py
+taskflow/types/graph.py
+taskflow/types/latch.py
+taskflow/types/notifier.py
+taskflow/types/sets.py
+taskflow/types/timing.py
+taskflow/types/tree.py
+taskflow/utils/__init__.py
+taskflow/utils/async_utils.py
+taskflow/utils/banner.py
+taskflow/utils/eventlet_utils.py
+taskflow/utils/iter_utils.py
+taskflow/utils/kazoo_utils.py
+taskflow/utils/kombu_utils.py
+taskflow/utils/misc.py
+taskflow/utils/persistence_utils.py
+taskflow/utils/redis_utils.py
+taskflow/utils/schema_utils.py
+taskflow/utils/threading_utils.py
+tools/clear_zk.sh
+tools/env_builder.sh
+tools/pretty_tox.sh
+tools/schema_generator.py
+tools/speed_test.py
+tools/state_graph.py
+tools/subunit_trace.py
+tools/test-setup.sh
+tools/update_states.sh
\ No newline at end of file
diff -pruN 5.12.0-2/taskflow.egg-info/dependency_links.txt 6.0.2-0ubuntu1/taskflow.egg-info/dependency_links.txt
--- 5.12.0-2/taskflow.egg-info/dependency_links.txt	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow.egg-info/dependency_links.txt	2025-08-25 12:49:58.000000000 +0000
@@ -0,0 +1 @@
+
diff -pruN 5.12.0-2/taskflow.egg-info/entry_points.txt 6.0.2-0ubuntu1/taskflow.egg-info/entry_points.txt
--- 5.12.0-2/taskflow.egg-info/entry_points.txt	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow.egg-info/entry_points.txt	2025-08-25 12:49:58.000000000 +0000
@@ -0,0 +1,24 @@
+[taskflow.conductors]
+blocking = taskflow.conductors.backends.impl_blocking:BlockingConductor
+nonblocking = taskflow.conductors.backends.impl_nonblocking:NonBlockingConductor
+
+[taskflow.engines]
+default = taskflow.engines.action_engine.engine:SerialActionEngine
+parallel = taskflow.engines.action_engine.engine:ParallelActionEngine
+serial = taskflow.engines.action_engine.engine:SerialActionEngine
+worker-based = taskflow.engines.worker_based.engine:WorkerBasedActionEngine
+workers = taskflow.engines.worker_based.engine:WorkerBasedActionEngine
+
+[taskflow.jobboards]
+etcd = taskflow.jobs.backends.impl_etcd:EtcdJobBoard
+redis = taskflow.jobs.backends.impl_redis:RedisJobBoard
+zookeeper = taskflow.jobs.backends.impl_zookeeper:ZookeeperJobBoard
+
+[taskflow.persistence]
+dir = taskflow.persistence.backends.impl_dir:DirBackend
+file = taskflow.persistence.backends.impl_dir:DirBackend
+memory = taskflow.persistence.backends.impl_memory:MemoryBackend
+mysql = taskflow.persistence.backends.impl_sqlalchemy:SQLAlchemyBackend
+postgresql = taskflow.persistence.backends.impl_sqlalchemy:SQLAlchemyBackend
+sqlite = taskflow.persistence.backends.impl_sqlalchemy:SQLAlchemyBackend
+zookeeper = taskflow.persistence.backends.impl_zookeeper:ZkBackend
diff -pruN 5.12.0-2/taskflow.egg-info/not-zip-safe 6.0.2-0ubuntu1/taskflow.egg-info/not-zip-safe
--- 5.12.0-2/taskflow.egg-info/not-zip-safe	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow.egg-info/not-zip-safe	2025-08-25 12:49:58.000000000 +0000
@@ -0,0 +1 @@
+
diff -pruN 5.12.0-2/taskflow.egg-info/pbr.json 6.0.2-0ubuntu1/taskflow.egg-info/pbr.json
--- 5.12.0-2/taskflow.egg-info/pbr.json	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow.egg-info/pbr.json	2025-08-25 12:49:58.000000000 +0000
@@ -0,0 +1 @@
+{"git_version": "bad7ab98", "is_release": true}
\ No newline at end of file
diff -pruN 5.12.0-2/taskflow.egg-info/requires.txt 6.0.2-0ubuntu1/taskflow.egg-info/requires.txt
--- 5.12.0-2/taskflow.egg-info/requires.txt	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow.egg-info/requires.txt	2025-08-25 12:49:58.000000000 +0000
@@ -0,0 +1,54 @@
+pbr>=2.0.0
+debtcollector>=1.2.0
+futurist>=1.2.0
+fasteners>=0.17.3
+networkx>=2.1.0
+stevedore>=1.20.0
+jsonschema>=3.2.0
+automaton>=1.9.0
+oslo.utils>=3.33.0
+oslo.serialization>=2.18.0
+tenacity>=6.0.0
+cachetools>=2.0.0
+pydot>=1.2.4
+
+[database]
+SQLAlchemy>=1.0.10
+alembic>=0.8.10
+SQLAlchemy-Utils>=0.30.11
+PyMySQL>=0.7.6
+psycopg2>=2.8.0
+
+[etcd]
+etcd3gw>=2.0.0
+
+[eventlet]
+eventlet>=0.18.2
+
+[redis]
+redis>=4.0.0
+
+[test]
+kazoo>=2.6.0
+redis>=4.0.0
+etcd3gw>=2.0.0
+kombu>=4.3.0
+eventlet>=0.18.2
+SQLAlchemy>=1.0.10
+alembic>=0.8.10
+SQLAlchemy-Utils>=0.30.11
+PyMySQL>=0.7.6
+psycopg2>=2.8.0
+zake>=0.1.6
+pydotplus>=2.0.2
+oslotest>=3.2.0
+testtools>=2.2.0
+testscenarios>=0.4
+stestr>=2.0.0
+pifpaf>=0.10.0
+
+[workers]
+kombu>=4.3.0
+
+[zookeeper]
+kazoo>=2.6.0
diff -pruN 5.12.0-2/taskflow.egg-info/top_level.txt 6.0.2-0ubuntu1/taskflow.egg-info/top_level.txt
--- 5.12.0-2/taskflow.egg-info/top_level.txt	1970-01-01 00:00:00.000000000 +0000
+++ 6.0.2-0ubuntu1/taskflow.egg-info/top_level.txt	2025-08-25 12:49:58.000000000 +0000
@@ -0,0 +1 @@
+taskflow
diff -pruN 5.12.0-2/tools/speed_test.py 6.0.2-0ubuntu1/tools/speed_test.py
--- 5.12.0-2/tools/speed_test.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/tools/speed_test.py	2025-08-25 12:49:32.000000000 +0000
@@ -36,7 +36,7 @@ def print_header(name):
         print(header_footer)
 
 
-class ProfileIt(object):
+class ProfileIt:
     stats_ordering = ('cumulative', 'calls',)
 
     def __init__(self, name, args):
@@ -65,7 +65,7 @@ class ProfileIt(object):
             print("")
 
 
-class TimeIt(object):
+class TimeIt:
     def __init__(self, name, args):
         self.watch = timeutils.StopWatch()
         self.name = name
diff -pruN 5.12.0-2/tools/state_graph.py 6.0.2-0ubuntu1/tools/state_graph.py
--- 5.12.0-2/tools/state_graph.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/tools/state_graph.py	2025-08-25 12:49:32.000000000 +0000
@@ -34,7 +34,7 @@ from taskflow import states
 
 # This is just needed to get at the machine object (we will not
 # actually be running it...).
-class DummyRuntime(object):
+class DummyRuntime:
     def __init__(self):
         self.analyzer = mock.MagicMock()
         self.completer = mock.MagicMock()
@@ -185,7 +185,7 @@ def main():
     print(g.to_string().strip())
 
     g.write(options.filename, format=options.format)
-    print("Created %s at '%s'" % (options.format, options.filename))
+    print("Created {} at '{}'".format(options.format, options.filename))
 
     # To make the svg more pretty use the following:
     # $ xsltproc ../diagram-tools/notugly.xsl ./states.svg > pretty-states.svg
diff -pruN 5.12.0-2/tools/subunit_trace.py 6.0.2-0ubuntu1/tools/subunit_trace.py
--- 5.12.0-2/tools/subunit_trace.py	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/tools/subunit_trace.py	2025-08-25 12:49:32.000000000 +0000
@@ -36,7 +36,7 @@ RESULTS = {}
 class Starts(testtools.StreamResult):
 
     def __init__(self, output):
-        super(Starts, self).__init__()
+        super().__init__()
         self._output = output
 
     def startTestRun(self):
@@ -46,7 +46,7 @@ class Starts(testtools.StreamResult):
     def status(self, test_id=None, test_status=None, test_tags=None,
                runnable=True, file_name=None, file_bytes=None, eof=False,
                mime_type=None, route_code=None, timestamp=None):
-        super(Starts, self).status(
+        super().status(
             test_id, test_status,
             test_tags=test_tags, runnable=runnable, file_name=file_name,
             file_bytes=file_bytes, eof=eof, mime_type=mime_type,
@@ -146,7 +146,7 @@ def print_attachments(stream, test, all_
             detail.content_type.type = 'text'
         if (all_channels or name in channels) and detail.as_text():
             title = "Captured %s:" % name
-            stream.write("\n%s\n%s\n" % (title, ('~' * len(title))))
+            stream.write("\n{}\n{}\n".format(title, ('~' * len(title))))
             # indent attachment lines 4 spaces to make them visually
             # offset
             for line in detail.as_text().split('\n'):
@@ -174,20 +174,20 @@ def show_outcome(stream, test, print_fai
 
     if status == 'fail':
         FAILS.append(test)
-        stream.write('{%s} %s [%s] ... FAILED\n' % (
+        stream.write('{{{}}} {} [{}] ... FAILED\n'.format(
             worker, name, duration))
         if not print_failures:
             print_attachments(stream, test, all_channels=True)
     elif not failonly:
         if status == 'success':
-            stream.write('{%s} %s [%s] ... ok\n' % (
+            stream.write('{{{}}} {} [{}] ... ok\n'.format(
                 worker, name, duration))
             print_attachments(stream, test)
         elif status == 'skip':
-            stream.write('{%s} %s ... SKIPPED: %s\n' % (
+            stream.write('{{{}}} {} ... SKIPPED: {}\n'.format(
                 worker, name, test['details']['reason'].as_text()))
         else:
-            stream.write('{%s} %s [%s] ... %s\n' % (
+            stream.write('{{{}}} {} [{}] ... {}\n'.format(
                 worker, name, duration, test['status']))
             if not print_failures:
                 print_attachments(stream, test, all_channels=True)
@@ -240,8 +240,8 @@ def worker_stats(worker):
 
 def print_summary(stream):
     stream.write("\n======\nTotals\n======\n")
-    stream.write("Run: %s in %s sec.\n" % (count_tests('status', '.*'),
-                                           run_time()))
+    stream.write("Run: {} in {} sec.\n".format(count_tests('status', '.*'),
+                                               run_time()))
     stream.write(" - Passed: %s\n" % count_tests('status', 'success'))
     stream.write(" - Skipped: %s\n" % count_tests('status', 'skip'))
     stream.write(" - Failed: %s\n" % count_tests('status', 'fail'))
diff -pruN 5.12.0-2/tox.ini 6.0.2-0ubuntu1/tox.ini
--- 5.12.0-2/tox.ini	2025-02-26 15:46:31.000000000 +0000
+++ 6.0.2-0ubuntu1/tox.ini	2025-08-25 12:49:32.000000000 +0000
@@ -27,6 +27,8 @@ commands =
 allowlist_externals =
   find
   ./setup-etcd-env.sh
+passenv =
+  ETCD_VERSION
 
 [testenv:update-states]
 deps =
