diff -pruN 5.1.0-1/bin/rpyc_registry.py 5.2.1-1/bin/rpyc_registry.py
--- 5.1.0-1/bin/rpyc_registry.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/bin/rpyc_registry.py	2022-07-31 00:27:15.000000000 +0000
@@ -31,10 +31,10 @@ class RegistryServer(cli.Application):
     allow_listing = cli.SwitchAttr(["-l", "--listing"], bool, default=False, help="Enable/disable listing on registry")
 
     def main(self):
-        if self.mode == "UDP":
+        if self.mode.upper() == "UDP":
             server = UDPRegistryServer(host='::' if self.ipv6 else '0.0.0.0', port=self.port,
                                        pruning_timeout=self.pruning_timeout, allow_listing=self.allow_listing)
-        elif self.mode == "TCP":
+        elif self.mode.upper() == "TCP":
             server = TCPRegistryServer(port=self.port, pruning_timeout=self.pruning_timeout,
                                        allow_listing=self.allow_listing)
         setup_logger(self.quiet, self.logfile)
diff -pruN 5.1.0-1/CHANGELOG.rst 5.2.1-1/CHANGELOG.rst
--- 5.1.0-1/CHANGELOG.rst	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/CHANGELOG.rst	2022-07-31 00:27:15.000000000 +0000
@@ -1,3 +1,35 @@
+5.2.1
+=====
+Date: 2022-07-30
+
+- `#494`_ Added support for using decorators to expose methods (see `#292`_)
+- `#499`_ Allow `BgServingThread` serve and sleep intervals to be customized
+- `#498`_ Avoid redefining `hasattr_static` on every `_check_attr` call
+- `#489`_ Updated SSL context usage to avoid deprecated aspects and changes
+- `#485`_ Add a configurable timeout on the zero deploy close method
+- `#484`_ Fixed `--mode` CLI argument for `rpyc_registry`
+- `#479`_ Fixed propagation of `AttributeErrors` raised by exposed descriptors
+- `#476`_ Allow filtering by host on list_services
+- `#493`_ and `#502`_  Improved documentation and fixed typos
+- `#492`_ Some work around race conditions but proper fix is rather involved (see `#491`_)
+
+.. _#502: https://github.com/tomerfiliba-org/rpyc/pull/502
+.. _#499: https://github.com/tomerfiliba-org/rpyc/pull/499
+.. _#498: https://github.com/tomerfiliba-org/rpyc/pull/498
+.. _#494: https://github.com/tomerfiliba-org/rpyc/pull/494
+.. _#489: https://github.com/tomerfiliba-org/rpyc/pull/489
+.. _#485: https://github.com/tomerfiliba-org/rpyc/pull/485
+.. _#484: https://github.com/tomerfiliba-org/rpyc/pull/484
+.. _#479: https://github.com/tomerfiliba-org/rpyc/pull/479
+.. _#476: https://github.com/tomerfiliba-org/rpyc/pull/476
+.. _#492: https://github.com/tomerfiliba-org/rpyc/pull/492
+.. _#493: https://github.com/tomerfiliba-org/rpyc/issues/493
+.. _#491: https://github.com/tomerfiliba-org/rpyc/issues/491
+.. _#307: https://github.com/tomerfiliba-org/rpyc/issues/307
+.. _#292: https://github.com/tomerfiliba-org/rpyc/issues/292
+
+* 5.2.0 was skipped due to PyPi not allowing file name reuse
+
 5.1.0
 =====
 Date: 2022-02-26
diff -pruN 5.1.0-1/debian/changelog 5.2.1-1/debian/changelog
--- 5.1.0-1/debian/changelog	2022-03-02 06:57:06.000000000 +0000
+++ 5.2.1-1/debian/changelog	2022-08-01 10:48:20.000000000 +0000
@@ -1,3 +1,12 @@
+rpyc (5.2.1-1) unstable; urgency=medium
+
+  * New upstream version 5.2.1
+  * Build with PEP 517 builder
+  * Disable tests which require network access
+  * Drop rpy_classic and rpyc_registry executables
+
+ -- Timo Röhling <roehling@debian.org>  Mon, 01 Aug 2022 12:48:20 +0200
+
 rpyc (5.1.0-1) unstable; urgency=medium
 
   * New upstream version 5.1.0
diff -pruN 5.1.0-1/debian/control 5.2.1-1/debian/control
--- 5.1.0-1/debian/control	2022-03-02 06:55:58.000000000 +0000
+++ 5.2.1-1/debian/control	2022-08-01 10:37:51.000000000 +0000
@@ -7,7 +7,9 @@ Build-Depends: debhelper-compat (= 13),
 	       dh-python,
                dh-sequence-python3,
 	       dh-sequence-sphinxdoc,
+	       pybuild-plugin-pyproject,
 	       python3-all (>= 3.6),
+	       python3-hatchling,
 	       python3-nose2 <!nocheck>,
 	       python3-plumbum,
 	       python3-setuptools,
diff -pruN 5.1.0-1/debian/patches/0001-Fix-unit-tests.patch 5.2.1-1/debian/patches/0001-Fix-unit-tests.patch
--- 5.1.0-1/debian/patches/0001-Fix-unit-tests.patch	2022-03-02 06:53:42.000000000 +0000
+++ 5.2.1-1/debian/patches/0001-Fix-unit-tests.patch	2022-08-01 10:37:51.000000000 +0000
@@ -9,10 +9,10 @@ Subject: Fix unit tests
  3 files changed, 3 insertions(+), 3 deletions(-)
 
 diff --git a/tests/test_registry.py b/tests/test_registry.py
-index 00580e5..5d20a8c 100644
+index 8a5ce86..74229bd 100644
 --- a/tests/test_registry.py
 +++ b/tests/test_registry.py
-@@ -72,7 +72,7 @@ class TestUdpRegistry(BaseRegistryTest, unittest.TestCase):
+@@ -89,7 +89,7 @@ class TestUdpRegistry(BaseRegistryTest, unittest.TestCase):
          return UDPRegistryServer(pruning_timeout=PRUNING_TIMEOUT, allow_listing=True)
  
      def _get_client(self):
diff -pruN 5.1.0-1/debian/patches/0003-Disable-tests-which-require-network-access.patch 5.2.1-1/debian/patches/0003-Disable-tests-which-require-network-access.patch
--- 5.1.0-1/debian/patches/0003-Disable-tests-which-require-network-access.patch	1970-01-01 00:00:00.000000000 +0000
+++ 5.2.1-1/debian/patches/0003-Disable-tests-which-require-network-access.patch	2022-08-01 10:38:03.000000000 +0000
@@ -0,0 +1,21 @@
+From: =?utf-8?q?Timo_R=C3=B6hling?= <roehling@debian.org>
+Date: Mon, 1 Aug 2022 12:37:50 +0200
+Subject: Disable tests which require network access
+
+---
+ tests/test_deploy.py | 2 ++
+ 1 file changed, 2 insertions(+)
+
+diff --git a/tests/test_deploy.py b/tests/test_deploy.py
+index 063e805..91753d5 100644
+--- a/tests/test_deploy.py
++++ b/tests/test_deploy.py
+@@ -15,6 +15,8 @@ except Exception:
+ 
+ 
+ class TestDeploy(unittest.TestCase):
++    __test__ = False
++
+     def test_deploy(self):
+         rem = SshMachine("localhost")
+         SshMachine.python = rem[sys.executable]
diff -pruN 5.1.0-1/debian/patches/series 5.2.1-1/debian/patches/series
--- 5.1.0-1/debian/patches/series	2022-03-02 06:53:42.000000000 +0000
+++ 5.2.1-1/debian/patches/series	2022-08-01 10:38:03.000000000 +0000
@@ -1,2 +1,3 @@
 0001-Fix-unit-tests.patch
 0002-Remove-Fork-me-on-Github-banner-for-privacy-violatio.patch
+0003-Disable-tests-which-require-network-access.patch
diff -pruN 5.1.0-1/debian/python3-rpyc.manpages 5.2.1-1/debian/python3-rpyc.manpages
--- 5.1.0-1/debian/python3-rpyc.manpages	2022-03-02 06:53:38.000000000 +0000
+++ 5.2.1-1/debian/python3-rpyc.manpages	1970-01-01 00:00:00.000000000 +0000
@@ -1 +0,0 @@
-debian/*.1
diff -pruN 5.1.0-1/debian/rpyc_classic.1 5.2.1-1/debian/rpyc_classic.1
--- 5.1.0-1/debian/rpyc_classic.1	2022-03-02 06:53:38.000000000 +0000
+++ 5.2.1-1/debian/rpyc_classic.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,100 +0,0 @@
-.TH RPYC_CLASSIC 1 "" ""
-.SH NAME
-rpyc_classic \- RPyC classic server
-.SH SYNOPSIS
-.SY rpyc_classic
-.RI [ options ]
-.YS
-.SH DESCRIPTION
-RPyC (pronounced as are-pie-see), or Remote Python Call, is a transparent
-Python library for symmetrical remote procedure calls, clustering and
-distributed-computing. RPyC makes use of object-proxying, a technique that
-employs Python’s dynamic nature, to overcome the physical boundaries between
-processes and computers, so that remote objects can be manipulated as if they
-were local.
-.PP
-.B rpyc_classic
-is the server component which enables remote access to the local system.
-The following options are available:
-.TP
-.BR \-\-mode " " \fIMODE\fR ", " \-m " " \fIMODE\fR
-Select mode of operation. One of
-.BR threaded ", " forking ", " stdio ", " oneshot .
-.TP
-.BR \-\-port " " \fIPORT\fR ", " \-p " " \fIPORT\fR
-The TCP port which the server listens to. The default is 18812 for
-unauthenticated instances and 18821 for SSL authenticated ones.
-.TP
-.BR \-\-host " " \fIINTERFACE\fR
-Bind to network interface
-.IR INTERFACE .
-The default is
-.BR localhost .
-.TP
-.B \-\-ipv6
-Enable IPv6.
-.TP
-.BR \-\-logfile " " \fIFILE\fR
-Write log to
-.IR FILE .
-If not specified, log output is written to
-.BR stderr .
-.TP
-.BR \-\-quiet ", " \-q
-Quiet mode. Only errors will be logged.
-.SS RPyC Registry
-A server instance can be registered with a running
-.BR rpyc_registry (1)
-for automated service discovery by clients:
-.TP
-.BR \-\-registry\-type " " \fITYPE\fR
-Connect to the registry via
-.B TCP
-or
-.B UDP
-(which is the default).
-.TP
-.BR \-\-registry-port " " \fIPORT\fR
-Connect to the registry on port
-.IR PORT .
-The default is 18811.
-.TP
-.BR \-\-registry\-host " " \fIHOST\fR
-Connect to the registry at host
-.IR HOST .
-This is a required option for TCP registries. If no host is specified for UDP,
-the server will attempt a broadcast via IP 255.255.255.255 to reach any
-listening registry in the local network.
-.SS SSL Authenticated Mode
-The server supports authentication and authorization via SSL certificates. 
-.BR WARNING:
-While this mode provides a modest amount of security, there are serious shortcomings
-such as a missing certificate revocation mechanism. Do not rely on this to expose
-vital infrastructure to the Internet.
-.PP
-You need to specify the following options to enable this mode:
-.TP
-.BR \-\-ssl\-cafile " " \fIFILE\fR
-Read the Certificate Authority (CA) certificate from
-.IR FILE .
-The CA is used to determine which client certificates are authorized
-to connect to the server; only client certificates which have been issued by the
-CA are accepted.
-.BR WARNING :
-If this option is omitted, the server will allow any client to connect, which
-is probably not what you want.
-.TP
-.BR \-\-ssl\-certfile " " \fIFILE\FR
-Read the SSL server certificate from
-.IR FILE .
-This certificate is presented to connecting clients to let them verify that the server
-is genuine.
-.TP
-.BR \-\-ssl\-keyfile " " \fIFILE\fR
-Read the private SSL server key for the server certificate from
-.IR FILE .
-.SH SEE ALSO
-.BR rpyc_registry (1)
-.SH AUTHOR
-This manual page was written for Debian by Timo R\[u00F6]hling and
-may be used without restriction.
diff -pruN 5.1.0-1/debian/rpyc_registry.1 5.2.1-1/debian/rpyc_registry.1
--- 5.1.0-1/debian/rpyc_registry.1	2022-03-02 06:53:38.000000000 +0000
+++ 5.2.1-1/debian/rpyc_registry.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,48 +0,0 @@
-.TH RPYC_CLASSIC 1 "" "" "Remote Python Call (RPyC)"
-.SH NAME
-rpyc_registry \- RPyC registry
-.SH SYNOPSIS
-.SY rpyc_registry
-.RI [ options ]
-.YS
-.SH DESCRIPTION
-RPyC (pronounced as are-pie-see), or Remote Python Call, is a transparent
-Python library for symmetrical remote procedure calls, clustering and
-distributed-computing. RPyC makes use of object-proxying, a technique that
-employs Python’s dynamic nature, to overcome the physical boundaries between
-processes and computers, so that remote objects can be manipulated as if they
-were local.
-.PP
-.B rpyc_registry
-provides a way for clients to discover running instances of
-.BR rpyc_classic (1)
-servers. The following options are available:
-.TP
-.BR \-\-mode " " \fIMODE\fR ", " \-m " " \fIMODE\fR
-Choose registry service type. Can be
-.B TCP
-or
-.B UDP
-(the default).
-.TP
-.BR \-\-port " " \fIPORT\fR ", " \-p " " \fIPORT\fR
-Choose the UDP/TCP listener port for the registry. The default
-is 18811.
-.TP
-.BR \-\-timeout " " \fISECS\fR ", " \-t " " \fISECS\fR
-Set a timeout after which unresponsive servers will be removed from the registry.
-The default is 240 seconds.
-.TP
-.BR \-\-logfile " " \fIFILE\fR
-Write log to
-.IR FILE .
-If not specified, log output is written to
-.BR stderr .
-.TP
-.BR \-\-quiet ", " \-q
-Quiet mode. Only errors will be logged.
-.SH SEE ALSO
-.BR rpyc_classic (1)
-.SH AUTHOR
-This manual page was written for Debian by Timo R\[u00F6]hling and
-may be used without restriction.
diff -pruN 5.1.0-1/debian/rules 5.2.1-1/debian/rules
--- 5.1.0-1/debian/rules	2022-03-02 06:53:38.000000000 +0000
+++ 5.2.1-1/debian/rules	2022-08-01 10:48:09.000000000 +0000
@@ -11,10 +11,3 @@ execute_after_dh_auto_build:
 ifeq (,$(filter nodoc,$(DEB_BUILD_OPTIONS)))
 	make -C docs html
 endif
-
-execute_after_dh_auto_install:
-	mv debian/python3-rpyc/usr/bin/rpyc_classic.py \
-	   debian/python3-rpyc/usr/bin/rpyc_classic
-	mv debian/python3-rpyc/usr/bin/rpyc_registry.py \
-	   debian/python3-rpyc/usr/bin/rpyc_registry
-
diff -pruN 5.1.0-1/debian/watch 5.2.1-1/debian/watch
--- 5.1.0-1/debian/watch	2022-03-02 06:53:38.000000000 +0000
+++ 5.2.1-1/debian/watch	2022-08-01 10:03:58.000000000 +0000
@@ -1,3 +1,3 @@
 version=4
-opts=filenamemangle=s/.+\/v?(\d\S+)\.tar\.gz/rpyc-$1\.tar\.gz/ \
- https://github.com/tomerfiliba-org/rpyc/releases/latest .*/v?(\d\S+)\.tar\.gz
+opts=filenamemangle=s%(?:.*?)?v?(\d[\d.]*@ARCHIVE_EXT@)%@PACKAGE@-$1% \
+ https://github.com/tomerfiliba-org/rpyc/releases/latest (?:.*?/)?v?@ANY_VERSION@@ARCHIVE_EXT@
diff -pruN 5.1.0-1/demos/async_client/client.py 5.2.1-1/demos/async_client/client.py
--- 5.1.0-1/demos/async_client/client.py	1970-01-01 00:00:00.000000000 +0000
+++ 5.2.1-1/demos/async_client/client.py	2022-07-31 00:27:15.000000000 +0000
@@ -0,0 +1,67 @@
+#!/usr/bin/env python
+"""Shows expected behavior for a client when the remote thread serving this client is busy/sleeping.
+
+Additional context: https://github.com/tomerfiliba-org/rpyc/issues/491#issuecomment-1131843406
+"""
+import logging
+import threading
+import time
+import rpyc
+
+
+logger = rpyc.setup_logger(namespace='client')
+rpyc.core.protocol.DEFAULT_CONFIG['logger'] = logger
+
+
+def async_example(connection, event):
+    _async_function = rpyc.async_(connection.root.function)  # create async proxy
+    # The server will call event.wait which will block this thread. To process
+    # the set message from the server we need a background thread. A background
+    # thread ensures that we have a thread that is not blocked.
+    #
+    # But wait! Since the communication is symmetric, the server side could
+    # be blocked if you are not careful. It needs responses from the client
+    #
+    # The perils of trying to thread a single connection...
+    # - the thread the receives the message from the server to wait is blocked
+    # - which thread is blocked is VERY hard to guarantee
+    #
+    # THIS IS NOT HE PREFERRED WAY FOR MUTABLE TYPES...
+    # - threading a connection might be okay to do for immutable types depending on context
+
+    bgsrv = rpyc.BgServingThread(connection)
+    ares = _async_function(event, block_server_thread=False)
+    value = ares.value
+    event.clear()
+    logger.info('Running buggy blocking example...')
+    ares = _async_function(event, block_server_thread=True)
+    value = ares.value
+    event.clear()
+    bgsrv.stop()
+
+
+def how_to_block_main_thread(connection, event):
+    """Example of how to block the main thread of a client"""
+    t0 = time.time()
+    logger.debug("Running example that blocks main thread of client...")
+    value = connection.root.function(event, call_set=True)
+    logger.debug(f"Value returned after {time.time()-t0}s: {value}")
+
+
+class Event:
+    def __init__(self):
+        self._evnt = threading.Event()
+
+    def __getattr__(self, name):
+        if name in ('wait', 'set', 'clear'):
+            logging.info(f'Event.__getattr__({name})')
+        return getattr(self._evnt, name)
+
+
+if __name__ == "__main__":
+    logger.info('Printed from main thread')
+    connection = rpyc.connect("localhost", 18812, config=dict(allow_all_attrs=True))
+    event = Event()
+    async_example(connection, event)
+    event.clear()
+    # how_to_block_main_thread_example(connection, event)
diff -pruN 5.1.0-1/demos/async_client/server.py 5.2.1-1/demos/async_client/server.py
--- 5.1.0-1/demos/async_client/server.py	1970-01-01 00:00:00.000000000 +0000
+++ 5.2.1-1/demos/async_client/server.py	2022-07-31 00:27:15.000000000 +0000
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+"""Emulates a service function that is blocked due to being busy/sleeping.
+
+Additional context: https://github.com/tomerfiliba-org/rpyc/issues/491#issuecomment-1131843406
+"""
+import logging
+import time
+import rpyc
+import threading
+
+
+logger = rpyc.setup_logger(namespace='server')
+rpyc.core.protocol.DEFAULT_CONFIG['logger'] = logger
+
+
+class Service(rpyc.Service):
+    def exposed_fetch_value(self):
+        return self._value
+
+    def exposed_function(self, client_event, block_server_thread=False):
+        if block_server_thread:
+            # For some reason
+            _wait = lambda : getattr(client_event, 'wait')()  # delays attr proxy behavior
+            _set = lambda : getattr(client_event, 'set')()  # delays attr proxy behavior
+        else:
+            _wait = rpyc.async_(client_event.wait)  # amortize proxy behavior
+            _set = rpyc.async_(client_event.set)  # amortize proxy behavior
+        _wait()
+        logger.debug('Client messaged to wait for now...')
+        for i in (1, 2):
+            logger.debug(f'Pretending to do task {i}')
+            time.sleep(0.2)
+        self._value = 6465616462656566  # ''.join([hex(ord(c))[2:] for c in 'deadbeef'])
+        _set()
+        logger.debug('Client event set, it may resume...')
+
+if __name__ == "__main__":
+    rpyc.ThreadedServer(service=Service, hostname="localhost", port=18812).start()
diff -pruN 5.1.0-1/docs/conf.py 5.2.1-1/docs/conf.py
--- 5.1.0-1/docs/conf.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/docs/conf.py	2022-07-31 00:27:15.000000000 +0000
@@ -42,17 +42,17 @@ master_doc = 'index'
 
 # General information about the project.
 project = u'RPyC'
-copyright = u'%d, Tomer Filiba, licensed under Attribution-ShareAlike 3.0' % (time.gmtime().tm_year,)
+copyright = f'{time.gmtime().tm_year}, Tomer Filiba, licensed under Attribution-ShareAlike 3.0'
 
 # The version info for the project you're documenting, acts as replacement for
 # |version| and |release|, also used in various other places throughout the
 # built documents.
 #
 # The short X.Y version.
-from rpyc.version import version_string, release_date
-version = version_string
+from rpyc.version import __version__, release_date
+version = __version__
 # The full version, including alpha/beta/rc tags.
-release = version_string + "/" + release_date
+release = __version__ + "/" + release_date
 
 # The language for content autogenerated by Sphinx. Refer to documentation
 # for a list of supported languages.
diff -pruN 5.1.0-1/docs/docs/advanced-debugging.rst 5.2.1-1/docs/docs/advanced-debugging.rst
--- 5.1.0-1/docs/docs/advanced-debugging.rst	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/docs/docs/advanced-debugging.rst	2022-07-31 00:27:15.000000000 +0000
@@ -20,13 +20,19 @@ Let's use `pyenv` to install Python vers
         printf "${PWD}\n" > "${site}/rpyc.pth"
     done
 
-Each `venv` contains a `.pth` file that appends `rpyc` to `sys.path`.
+Each `venv` contains a `.pth` file that appends `rpyc` to `sys.path`. We can run `rpyc_classic.py` using `pyenv` like so.
 
 .. code-block:: bash
 
-    PYENV_VERSION=3.11-dev pyenv exec python ./bin/rpyc_classic.py --host 127.0.0.1
-    PYENV_VERSION=3.10-dev pyenv exec python -c "import rpyc; conn = rpyc.utils.classic.connect('127.0.0.1'); conn.modules.sys.stderr.write('hello world\n')"
+    PYENV_VERSION=3.10-dev pyenv exec python ./bin/rpyc_classic.py --host 127.0.0.1
+    PYENV_VERSION=3.9-dev pyenv exec python -c "import rpyc; conn = rpyc.utils.classic.connect('127.0.0.1'); conn.modules.sys.stderr.write('hello world\n')"
 
+Unit tests can be ran using your desired Python version as well.
+
+.. code-block:: bash
+
+    PYENV_VERSION=3.9-dev pyenv exec python -m unittest discover -s ./tests -k test_affinity
+    PYENV_VERSION=3.8-dev pyenv exec python -m unittest discover -s ./tests
 
 Testing Supported Python Versions via Docker
 --------------------------------------------
@@ -67,8 +73,7 @@ Running the chained-connection unit test
 
 .. code-block:: bash
 
-    cd tests
-    python  -m unittest test_get_id_pack.Test_get_id_pack.test_chained_connect
+    python -m unittest discover -s ./tests -k test_get_id_pack.Test_get_id_pack.test_chained_connect
 
 
 After stopping Wireshark, export specified packets, and open the PCAP. If not already configured, add a custom display column:
diff -pruN 5.1.0-1/docs/docs/rpyc-release-process.rst 5.2.1-1/docs/docs/rpyc-release-process.rst
--- 5.1.0-1/docs/docs/rpyc-release-process.rst	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/docs/docs/rpyc-release-process.rst	2022-07-31 00:27:15.000000000 +0000
@@ -3,17 +3,24 @@ RPyC Release Process
 
 A walkthrough of doing a RPyC Release.
 
-1. Describe commit history within `CHANGELOG.rst` (see `Generate Entry`_)
-2. Update `version` and `release_date` values for `rpyc/version.py` (`Semantic Versioning`_)
-3. Review `git status`, commit changes, and `git push`.
-4. Create an Annotated tag: `git tag -a 5.X.Y -m "Updated CHANGELOG.rst and version for release 5.X.Y"`
-5. Publish release tag: `git push origin 5.X.Y`
-
-6. Clean up any old build artifacts: `pyenv exec python setup.py clean --all`
-7. Create a wheel package: `pyenv exec python setup.py bdist_wheel`
-8. Upload the wheel package: `twine upload --repository-url https://upload.pypi.org/legacy/ dist/rpyc-*-any.whl`
+1. Ensure a clean and current build environment (i.e., ``git pull; git status``)
+2. Describe commit history within ``CHANGELOG.rst`` (see `Generate Entry`_)
+3. Update ``release_date`` in ``rpyc/version.py`` and bump version (`Semantic Versioning`_ and `Versioning using Hatch`_)
+4. Verify changes and run ``git add .``, ``git push``, and ``export ver=$(python -c 'import rpyc; print(rpyc.__version__)')``.
+5. Create an Annotated tag: ``git tag -a ${ver} -m "Updated CHANGELOG.rst and version for release ${ver}"``
+6. Publish release tag: ``git push origin ${ver}``
+7. Install hatch: ``pyenv exec pip install hatch``
+8. Clean up any old build artifacts: ``git clean -Xf -- dist/``
+9. Create a wheel package: ``pyenv exec hatch -v build``
+10. Upload the wheel package: ``pyenv exec hatch -v publish --user=__token__ --auth=${pypi_token} ; history -c && history -w``
+11. Create new release such that the notes are from `CHANGELOG.rst` entry (``%s/`#/#/g`` and ``%s/`_//g``)
+12. Make sure to add the wheel as an attachment to the release and you are done!
 
 .. _Semantic Versioning: https://semver.org/
+.. _Versioning using Hatch: https://hatch.pypa.io/latest/version/
+.. _Build using Hatch: https://hatch.pypa.io/latest/build/
+.. _Publishing to PyPi using Hatch: https://hatch.pypa.io/latest/build/
+
 
 .. _Generate Entry:
 
@@ -21,20 +28,60 @@ Generate CHANGELOG.rst Entry
 ---------------------------------
 To create an initial entry draft, run some shell commands.
 
-.. code-block:: bash
+.. code-block:: python
+    owner="tomerfiliba-org"
+    repo="rpyc"
+    #url="https://github.com/${owner}/${repo}"
+    revisions="$(git rev-list $(hatch version)..HEAD | sed -z 's/\(.*\)\n/\1/;s/\n/|/g')"
+    numbers=( $(git log $(hatch version)..HEAD --no-merges --oneline | sed -nE 's/^.*#([0-9]+).*/\1/p' | sort -nu) )
+    issue_numbers="$(echo "${numbers[@]}" | sed 's/ /|/g')"
+    #
+    api_filter() { 
+        jq -rc ".[] | select( .${1} | . != null) | select(.${1} | tostring | test(\"${2}\"))" "${3}"
+    }
+    url="https://api.github.com/repos/${owner}/${repo}"
+    params="state=closed&accept=application/vnd.github+json"
+    tmp_issues="/tmp/issues.json"
+    tmp_pulls="/tmp/pulls.json"
+    curl "${url}/issues?${params}" > "${tmp_issues}"
+    curl "${url}/pulls?${params}" > "${tmp_pulls}"
+    # Pulls
+    gh_numbers=( )
+    bullets=( )
+    url_refs=( )
+    while IFS= read -r pull; do
+        title="$(echo "${pull}" | jq -r .title)"
+        number="$(echo "${pull}" | jq -r .number)"
+        pull_url="$(echo "${pull}" | jq -r .html_url)"
+        # Add GH number
+        gh_numbers+=( "${number}" )
+        # Add bullet
+        bullets+=( "- \`#${number}\`_ ${title}" )
+        # Add url ref
+        url_ref=".. _#${number}: ${pull_url}"
+        url_refs+=( "${url_ref}" )
+    done <<< "$(api_filter "merge_commit_sha" "${revisions}" "${tmp_pulls}")"
+    # Issues
+    while IFS= read -r issue; do
+        title="$(echo "${issue}" | jq -r .title)"
+        number="$(echo "${issue}" | jq -r .number)"
+        issue_url="$(echo "${issue}" | jq -r .html_url)"
+        # Add bullet
+        bullets+=( "- \`#${number}\`_ ${title}" )
+        # Add url ref
+        url_ref=".. _#${number}: ${issue_url}"
+        url_refs+=( "${url_ref}" )
+    done <<< "$(api_filter "number" "${issue_numbers}" "${tmp_issues}")"
 
-    last_release="1/12/2021"
-    log_since="$(git log --since="${last_release}" --merges --oneline)"
-    pulls=( $(echo "${log_since}" | sed -n 's/^.*request #\([0-9]*\) from .*$/\1/p') )
-    url="https://github.com/tomerfiliba-org/rpyc/pull/"
+    # Header
     printf '5.X.Y\n=====\n'
     printf 'Date: %s\n\n' "$(date --rfc-3339=date)"
-    for pull in ${pulls[@]}; do
-        printf -- '- `#%d`_\n' "${pull}"
+    for bullet in "${bullets[@]}"; do
+        printf '%s\n' "${bullet}"
     done
     printf '\n'
-    for pull in ${pulls[@]}; do
-        printf '.. _#%d: %s%d\n' "${pull}" "${url}" "${pull}"
+    for ref in "${url_refs[@]}"; do
+        printf '%s\n' "${ref}"
     done
 
 Once insert this entry at the top of `CHANGELOG.rst`, review what it looks like with `instant-rst`.
@@ -44,3 +91,8 @@ Once insert this entry at the top of `CH
     instantRst -b chromium -p 8612 -f "CHANGELOG.rst"
 
 
+Misc. References
+================
+- `Wheel file name convention`_
+
+.. _Wheel file name convention: https://peps.python.org/pep-0427/#file-name-convention
diff -pruN 5.1.0-1/docs/docs/services.rst 5.2.1-1/docs/docs/services.rst
--- 5.1.0-1/docs/docs/services.rst	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/docs/docs/services.rst	2022-07-31 00:27:15.000000000 +0000
@@ -44,6 +44,31 @@ that's exposed by the other party. For s
 ``exposed_`` members. For instance, the ``foo`` method above is inaccessible (attempting to
 call it will result in an ``AttributeError``).
 
+Rather than having each method name start with ``exposed_``, you may prefer to use a
+decorator. Let's revisit the calculator service, but this time we'll use decorators. ::
+
+    import rpyc
+
+    @rpyc.service
+    class CalculatorService(rpyc.Service):
+        @rpyc.exposed
+        def add(self, a, b):
+            return a + b
+        @rpyc.exposed
+        def sub(self, a, b):
+            return a - b
+        @rpyc.exposed
+        def mul(self, a, b):
+            return a * b
+        @rpyc.exposed
+        def div(self, a, b):
+            return a / b
+        def foo(self):
+            print "foo"
+
+When implementing services, ``@rpyc.service`` and ``@rpyc.exposed`` can replace the ``exposed_`` naming
+convention.
+
 Implementing Services
 ---------------------
 As previously explained, all ``exposed_`` members of your service class will be available to
@@ -68,9 +93,8 @@ The first name in this list is considere
 are considered aliases. This distinction is meaningless to the protocol and the registry server.
 
 Your service class may also define two special methods: ``on_connect(self, conn)`` and
-``on_disconnect(self, conn)``. These methods are invoked, not surprisingly, when a connection
-has been established, and when it's been disconnected. Note that during ``on_disconnect``,
-the connection is already dead, so you can no longer access any remote objects.
+``on_disconnect(self, conn)``. The ``on_connect`` method is invoked when a connection has been established.
+From the client-side perspective, ``on_connect`` is invoked each time a client successfully invokes ``rpyc.connect`` or any other function provided by the connection factory module: ``rpyc.utils.factory``. After the connection is dead, ``on_disconnect`` is invoked (you will not be able to access remote objects inside of ``on_disconnect``).
 
 .. note::
    Try to avoid overriding the ``__init__`` method of the service. Place all initialization-related
diff -pruN 5.1.0-1/docs/docs/zerodeploy.rst 5.2.1-1/docs/docs/zerodeploy.rst
--- 5.1.0-1/docs/docs/zerodeploy.rst	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/docs/docs/zerodeploy.rst	2022-07-31 00:27:15.000000000 +0000
@@ -114,4 +114,9 @@ under the user's permissions. You can co
 ``rm -rf /``. Second, it creates an SSH tunnel for the transport, so everything is kept encrypted on the wire.
 And you get these features for free -- just configuring SSH accounts will do.
 
-
+Timeouts
+--------
+You can pass a ``timeout`` argument, in seconds, to the ``close()`` method.  A ``TimeoutExpired`` is raised if
+any subprocess communication takes longer than the timeout, after the subprocess has been told to terminate.  By
+default, the timeout is ``None`` i.e. infinite.  A timeout value prevents a ``close()`` call blocking
+indefinitely.
diff -pruN 5.1.0-1/.github/workflows/python-app.yml 5.2.1-1/.github/workflows/python-app.yml
--- 5.1.0-1/.github/workflows/python-app.yml	1970-01-01 00:00:00.000000000 +0000
+++ 5.2.1-1/.github/workflows/python-app.yml	2022-07-31 00:27:15.000000000 +0000
@@ -0,0 +1,45 @@
+# This workflow will install Python dependencies, run tests and lint with a single version of Python
+# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
+
+name: RPyC
+
+on:
+  push:
+    branches: [ master ]
+  pull_request:
+    branches: [ master ]
+
+jobs:
+  unittest-3-10:
+
+    runs-on: ubuntu-latest
+
+    steps:
+    - uses: actions/checkout@v2
+    - name: Set up Python 3.10
+      uses: actions/setup-python@v2
+      with:
+        python-version: "3.10"
+    - name: Install dependencies
+      run: |
+        python -m pip install --upgrade pip setuptools flake8
+        if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
+        echo "PYTHONPATH=${PYTHONPATH}:/home/runner/work/rpyc" >> $GITHUB_ENV
+    - name: Lint with flake8
+      run: |
+        # stop the build if there are Python syntax errors or undefined names
+        flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
+        # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
+        flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
+    - name: Init ssh settings
+      run: |
+        mkdir -pv ~/.ssh
+        chmod 700 ~/.ssh
+        echo NoHostAuthenticationForLocalhost yes >> ~/.ssh/config
+        echo StrictHostKeyChecking no >> ~/.ssh/config
+        ssh-keygen -q -f ~/.ssh/id_rsa -N ''
+        cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
+        uname -a
+    - name: Test with unittest
+      run: |
+        python -m unittest discover -s ./rpyc ./tests
diff -pruN 5.1.0-1/.gitignore 5.2.1-1/.gitignore
--- 5.1.0-1/.gitignore	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/.gitignore	2022-07-31 00:27:15.000000000 +0000
@@ -9,4 +9,5 @@ __pycache__
 rpyc.egg-info
 *.egg
 *.class
+*.swp
 build/
diff -pruN 5.1.0-1/.pep8 5.2.1-1/.pep8
--- 5.1.0-1/.pep8	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/.pep8	1970-01-01 00:00:00.000000000 +0000
@@ -1,2 +0,0 @@
-[pep8]
-max-line-length=120
diff -pruN 5.1.0-1/pyproject.toml 5.2.1-1/pyproject.toml
--- 5.1.0-1/pyproject.toml	1970-01-01 00:00:00.000000000 +0000
+++ 5.2.1-1/pyproject.toml	2022-07-31 00:27:15.000000000 +0000
@@ -0,0 +1,58 @@
+[build-system]
+requires = [
+    "hatchling>=1.6.0",
+]
+build-backend = "hatchling.build"
+
+[project]
+name = "rpyc"
+description = "Remote Python Call (RPyC) is a transparent and symmetric distributed computing library"
+readme = "README.rst"
+license = "MIT"
+requires-python = ">=3.7"
+authors = [
+    { name = "Tomer Filiba", email = "tomerfiliba@gmail.com" },
+    { name = "James Stronz", email = "comrumino@archstrike.org" },
+]
+classifiers = [
+    "Development Status :: 5 - Production/Stable",
+    "Intended Audience :: Developers",
+    "Intended Audience :: System Administrators",
+    "License :: OSI Approved :: MIT License",
+    "Operating System :: OS Independent",
+    "Programming Language :: Python :: 3",
+    "Programming Language :: Python :: 3.7",
+    "Programming Language :: Python :: 3.8",
+    "Programming Language :: Python :: 3.9",
+    "Programming Language :: Python :: 3.10",
+    "Topic :: Internet",
+    "Topic :: Software Development :: Libraries :: Python Modules",
+    "Topic :: Software Development :: Object Brokering",
+    "Topic :: Software Development :: Testing",
+    "Topic :: System :: Clustering",
+    "Topic :: System :: Distributed Computing",
+    "Topic :: System :: Monitoring",
+    "Topic :: System :: Networking",
+    "Topic :: System :: Systems Administration",
+]
+dependencies = [
+    "plumbum",
+]
+dynamic = [
+    "version",
+]
+
+[project.urls]
+Homepage = "https://rpyc.readthedocs.org"
+Source = "https://github.com/tomerfiliba-org/rpyc"
+
+[tool.hatch.version]
+path = "rpyc/version.py"
+
+[tool.hatch.build.targets.sdist]
+only-include = [
+    "/rpyc",
+    "/bin",
+]
+[tool.hatch.build.targets.wheel]
+packages = ["rpyc"]
diff -pruN 5.1.0-1/README.rst 5.2.1-1/README.rst
--- 5.1.0-1/README.rst	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/README.rst	2022-07-31 00:27:15.000000000 +0000
@@ -36,8 +36,8 @@ Documentation can be found at https://rp
    :target:            https://pypi.python.org/pypi/rpyc#downloads
    :alt:               Python Versions
 
-.. |Tests| image::     https://img.shields.io/travis/tomerfiliba-org/rpyc/master.svg?style=flat
-   :target:            https://travis-ci.org/tomerfiliba-org/rpyc
+.. |Tests| image::     https://github.com/tomerfiliba-org/rpyc/actions/workflows/python-app.yml/badge.svg
+   :target:            https://github.com/tomerfiliba-org/rpyc/actions/workflows/python-app.yml
    :alt:               Build Status
 
 .. |License| image::   https://img.shields.io/pypi/l/rpyc.svg?style=flat
diff -pruN 5.1.0-1/rpyc/core/async_.py 5.2.1-1/rpyc/core/async_.py
--- 5.1.0-1/rpyc/core/async_.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/rpyc/core/async_.py	2022-07-31 00:27:15.000000000 +0000
@@ -13,14 +13,14 @@ class AsyncResult(object):
 
     def __init__(self, conn):
         self._conn = conn
-        self._is_ready = Event()
+        self._is_ready = False
         self._is_exc = None
         self._obj = None
         self._callbacks = []
         self._ttl = Timeout(None)
 
     def __repr__(self):
-        if self._is_ready.is_set():
+        if self._is_ready:
             state = "ready"
         elif self._is_exc:
             state = "error"
@@ -35,7 +35,7 @@ class AsyncResult(object):
             return
         self._is_exc = is_exc
         self._obj = obj
-        self._is_ready.set()
+        self._is_ready = True
         for cb in self._callbacks:
             cb(self)
         del self._callbacks[:]
@@ -44,9 +44,14 @@ class AsyncResult(object):
         """Waits for the result to arrive. If the AsyncResult object has an
         expiry set, and the result did not arrive within that timeout,
         an :class:`AsyncResultTimeout` exception is raised"""
-        while not self._is_ready.is_set() and not self._ttl.expired():
+        while not (self._is_ready or self.expired):
+            # Serve the connection since we are not ready. Suppose
+            # the reply for our seq is served. The callback is this class
+            # so __call__ sets our obj and _is_ready to true.
             self._conn.serve(self._ttl)
-        if not self._is_ready.is_set():
+
+        # Check if we timed out before result was ready
+        if not self._is_ready:
             raise AsyncResultTimeout("result expired")
 
     def add_callback(self, func):
@@ -57,7 +62,7 @@ class AsyncResult(object):
 
         :param func: the callback function to add
         """
-        if self._is_ready.is_set():
+        if self._is_ready:
             func(self)
         else:
             self._callbacks.append(func)
@@ -73,12 +78,12 @@ class AsyncResult(object):
     @property
     def ready(self):
         """Indicates whether the result has arrived"""
-        if self._is_ready.is_set():
+        if self._is_ready:
             return True
-        if self._ttl.expired():
+        if self.expired:
             return False
         self._conn.poll_all()
-        return self._is_ready.is_set()
+        return self._is_ready
 
     @property
     def error(self):
@@ -88,7 +93,7 @@ class AsyncResult(object):
     @property
     def expired(self):
         """Indicates whether the AsyncResult has expired"""
-        return not self._is_ready.is_set() and self._ttl.expired()
+        return not self._is_ready and self._ttl.expired()
 
     @property
     def value(self):
diff -pruN 5.1.0-1/rpyc/core/__init__.py 5.2.1-1/rpyc/core/__init__.py
--- 5.1.0-1/rpyc/core/__init__.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/rpyc/core/__init__.py	2022-07-31 00:27:15.000000000 +0000
@@ -1,7 +1,7 @@
 # flake8: noqa: F401
 from rpyc.core.stream import SocketStream, TunneledSocketStream, PipeStream
 from rpyc.core.channel import Channel
-from rpyc.core.protocol import Connection
+from rpyc.core.protocol import Connection, DEFAULT_CONFIG
 from rpyc.core.netref import BaseNetref
 from rpyc.core.async_ import AsyncResult, AsyncResultTimeout
 from rpyc.core.service import Service, VoidService, SlaveService, MasterService, ClassicService
diff -pruN 5.1.0-1/rpyc/core/protocol.py 5.2.1-1/rpyc/core/protocol.py
--- 5.1.0-1/rpyc/core/protocol.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/rpyc/core/protocol.py	2022-07-31 00:27:15.000000000 +0000
@@ -6,8 +6,8 @@ import socket
 import time  # noqa: F401
 import gc  # noqa: F401
 
-from threading import Lock, Condition
-from rpyc.lib import spawn, Timeout, get_methods, get_id_pack
+from threading import Lock, Condition, RLock
+from rpyc.lib import spawn, Timeout, get_methods, get_id_pack, hasattr_static
 from rpyc.lib.compat import pickle, next, maxint, select_error, acquire_lock  # noqa: F401
 from rpyc.lib.colls import WeakValueDict, RefCountingColl
 from rpyc.core import consts, brine, vinegar, netref
@@ -109,7 +109,7 @@ Parameter
 
 ``connid``                               ``None``          **Runtime**: the RPyC connection ID (used
                                                            mainly for debugging purposes)
-``credentials``                          ``None``          **Runtime**: the credentails object that was returned
+``credentials``                          ``None``          **Runtime**: the credentials object that was returned
                                                            by the server's :ref:`authenticator <api-authenticators>`
                                                            or ``None``
 ``endpoints``                            ``None``          **Runtime**: The connection's endpoints. This is a tuple
@@ -145,9 +145,9 @@ class Connection(object):
         self._HANDLERS = self._request_handlers()
         self._channel = channel
         self._seqcounter = itertools.count()
-        self._recvlock = Lock()
+        self._recvlock = RLock()  # AsyncResult implementation means that synchronous requests have multiple acquires
         self._sendlock = Lock()
-        self._recv_event = Condition()
+        self._recv_event = Condition()  # TODO: why not simply timeout? why not associate w/ recvlock? explain/redesign
         self._request_callbacks = {}
         self._local_objects = RefCountingColl()
         self._last_traceback = None
@@ -380,21 +380,35 @@ class Connection(object):
         """
         timeout = Timeout(timeout)
         with self._recv_event:
+            # Exit early if we cannot acquire the recvlock
             if not self._recvlock.acquire(False):
-                return wait_for_lock and self._recv_event.wait(timeout.timeleft())
+                if wait_for_lock:
+                    # Wait condition for recvlock release; recvlock is not underlying lock for condition
+                    return self._recv_event.wait(timeout.timeleft())
+                else:
+                    return False
+        # Assume the receive rlock is acquired and incremented
         try:
+            data = None  # Ensure data is initialized
             data = self._channel.poll(timeout) and self._channel.recv()
-            if not data:
-                return False
-        except EOFError:
-            self.close()
+        except Exception as exc:
+            if isinstance(exc, EOFError):
+                self.close()  # sends close async request
+            self._recvlock.release()
+            with self._recv_event:
+                self._recv_event.notify_all()
             raise
-        finally:
+        # At this point, the recvlock was acquired once, we must release once before exiting the function
+        if data:
+            # Dispatch will unbox, invoke callbacks, etc.
+            self._dispatch(data)
             self._recvlock.release()
             with self._recv_event:
                 self._recv_event.notify_all()
-        self._dispatch(data)
-        return True
+            return True
+        else:
+            self._recvlock.release()
+            return False
 
     def poll(self, timeout=0):  # serving
         """Serves a single transaction, should one arrives in the given
@@ -463,14 +477,17 @@ class Connection(object):
             pass
         return at_least_once
 
-    def sync_request(self, handler, *args):  # serving
+    def sync_request(self, handler, *args):
         """requests, sends a synchronous request (waits for the reply to arrive)
 
         :raises: any exception that the requets may be generated
         :returns: the result of the request
         """
         timeout = self._config["sync_request_timeout"]
-        return self.async_request(handler, *args, timeout=timeout).value
+        _async_res = self.async_request(handler, *args, timeout=timeout)
+        # _async_res is an instance of AsyncResult, the value property invokes Connection.serve via AsyncResult.wait
+        # So, the _recvlock can be acquired multiple times by the owning thread and warrants the use of RLock
+        return _async_res.value
 
     def _async_request(self, handler, args=(), callback=(lambda a, b: None)):  # serving
         seq = self._get_seq_id()
@@ -515,7 +532,7 @@ class Connection(object):
         plain |= config["allow_exposed_attrs"] and name.startswith(prefix)
         plain |= config["allow_safe_attrs"] and name in config["safe_attrs"]
         plain |= config["allow_public_attrs"] and not name.startswith("_")
-        has_exposed = prefix and hasattr(obj, prefix + name)
+        has_exposed = prefix and (hasattr(obj, prefix + name) or hasattr_static(obj, prefix + name))
         if plain and (not has_exposed or hasattr(obj, name)):
             return name
         if has_exposed:
diff -pruN 5.1.0-1/rpyc/core/service.py 5.2.1-1/rpyc/core/service.py
--- 5.1.0-1/rpyc/core/service.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/rpyc/core/service.py	2022-07-31 00:27:15.000000000 +0000
@@ -189,7 +189,7 @@ class SlaveService(Slave, Service):
             instantiate_custom_exceptions=True,
             instantiate_oldstyle_exceptions=True,
         ))
-        super(SlaveService, self).on_connect(conn)
+        super().on_connect(conn)
 
 
 class FakeSlaveService(VoidService):
@@ -212,7 +212,7 @@ class MasterService(Service):
     __slots__ = ()
 
     def on_connect(self, conn):
-        super(MasterService, self).on_connect(conn)
+        super().on_connect(conn)
         self._install(conn, conn.root)
 
     @staticmethod
diff -pruN 5.1.0-1/rpyc/core/stream.py 5.2.1-1/rpyc/core/stream.py
--- 5.1.0-1/rpyc/core/stream.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/rpyc/core/stream.py	2022-07-31 00:27:15.000000000 +0000
@@ -12,6 +12,7 @@ from rpyc.core.consts import STREAM_CHUN
 win32file = safe_import("win32file")
 win32pipe = safe_import("win32pipe")
 win32event = safe_import("win32event")
+ssl = safe_import("ssl")
 
 
 retry_errnos = (errno.EAGAIN, errno.EWOULDBLOCK)
@@ -196,8 +197,8 @@ class SocketStream(Stream):
 
         :param host: the host name
         :param port: the TCP port
-        :param ssl_kwargs: a dictionary of keyword arguments to be passed
-                           directly to ``ssl.wrap_socket``
+        :param ssl_kwargs: a dictionary of keyword arguments for
+                           ``ssl.SSLContext`` and ``ssl.SSLContext.wrap_socket``
         :param kwargs: additional keyword arguments: ``family``, ``socktype``,
                        ``proto``, ``timeout``, ``nodelay``, passed directly to
                        the ``socket`` constructor, or ``ipv6``.
@@ -206,12 +207,31 @@ class SocketStream(Stream):
 
         :returns: a :class:`SocketStream`
         """
-        import ssl
         if kwargs.pop("ipv6", False):
             kwargs["family"] = socket.AF_INET6
         s = cls._connect(host, port, **kwargs)
         try:
-            s2 = ssl.wrap_socket(s, **ssl_kwargs)
+            if "ssl_version" in ssl_kwargs:
+                context = ssl.SSLContext(ssl_kwargs.pop("ssl_version"))
+            else:
+                context = ssl.create_default_context(purpose=ssl.Purpose.SERVER_AUTH)
+            certfile = ssl_kwargs.pop("certfile", None)
+            keyfile = ssl_kwargs.pop("keyfile", None)
+            if certfile is not None:
+                context.load_cert_chain(certfile, keyfile=keyfile)
+            ca_certs = ssl_kwargs.pop("ca_certs", None)
+            if ca_certs is not None:
+                context.load_verify_locations(ca_certs)
+            ciphers = ssl_kwargs.pop("ciphers", None)
+            if ciphers is not None:
+                context.set_ciphers(ciphers)
+            check_hostname = ssl_kwargs.pop("check_hostname", None)
+            if check_hostname is not None:
+                context.check_hostname = check_hostname
+            cert_reqs = ssl_kwargs.pop("cert_reqs", None)
+            if cert_reqs is not None:
+                context.verify_mode = cert_reqs
+            s2 = context.wrap_socket(s, server_hostname=host, **ssl_kwargs)
             return cls(s2)
         except BaseException:
             s.close()
diff -pruN 5.1.0-1/rpyc/core/vinegar.py 5.2.1-1/rpyc/core/vinegar.py
--- 5.1.0-1/rpyc/core/vinegar.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/rpyc/core/vinegar.py	2022-07-31 00:27:15.000000000 +0000
@@ -76,7 +76,7 @@ def dump(typ, val, tb, include_local_tra
                 attrval = repr(attrval)
             attrs.append((name, attrval))
     if include_local_version:
-        attrs.append(("_remote_version", version.version_string))
+        attrs.append(("_remote_version", version.__version__))
     else:
         attrs.append(("_remote_version", "<version denied>"))
     return (typ.__module__, typ.__name__), tuple(args), tuple(attrs), tbtext
@@ -161,7 +161,7 @@ def load(val, import_custom_exceptions,
     remote_ver = getattr(exc, "_remote_version", "<version denied>")
     if remote_ver != "<version denied>" and remote_ver.split('.')[0] != str(version.version[0]):
         _warn = '\nWARNING: Remote is on RPyC {} and local is on RPyC {}.\n\n'
-        tbtext += _warn.format(remote_ver, version.version_string)
+        tbtext += _warn.format(remote_ver, version.__version__)
 
     exc._remote_tb = tbtext
     return exc
diff -pruN 5.1.0-1/rpyc/__init__.py 5.2.1-1/rpyc/__init__.py
--- 5.1.0-1/rpyc/__init__.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/rpyc/__init__.py	2022-07-31 00:27:15.000000000 +0000
@@ -49,8 +49,8 @@ from rpyc.utils.factory import (connect_
                                 connect_stdpipes, connect, ssl_connect, list_services, discover, connect_by_service, connect_subproc,
                                 connect_thread, ssh_connect)
 from rpyc.utils.helpers import async_, timed, buffiter, BgServingThread, restricted
-from rpyc.utils import classic
-from rpyc.version import version as __version__
+from rpyc.utils import classic, exposed, service
+from rpyc.version import __version__
 
 from rpyc.lib import setup_logger, spawn
 from rpyc.utils.server import OneShotServer, ThreadedServer, ThreadPoolServer, ForkingServer
diff -pruN 5.1.0-1/rpyc/lib/__init__.py 5.2.1-1/rpyc/lib/__init__.py
--- 5.1.0-1/rpyc/lib/__init__.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/rpyc/lib/__init__.py	2022-07-31 00:27:15.000000000 +0000
@@ -41,15 +41,20 @@ def safe_import(name):
     return mod
 
 
-def setup_logger(quiet=False, logfile=None):
+def setup_logger(quiet=False, logfile=None, namespace=None):
     opts = {}
     if quiet:
         opts['level'] = logging.ERROR
+        opts['format'] = '%(asctime)s %(levelname)s: %(message)s'
+        opts['datefmt'] = '%b %d %H:%M:%S'
     else:
         opts['level'] = logging.DEBUG
+        opts['format'] = '%(asctime)s %(levelname)s %(name)s[%(threadName)s]: %(message)s'
+        opts['datefmt'] = '%b %d %H:%M:%S'
     if logfile:
         opts['filename'] = logfile
     logging.basicConfig(**opts)
+    return logging.getLogger('rpyc' if namespace is None else f'rpyc.{namespace}')
 
 
 class hybridmethod(object):
@@ -67,6 +72,16 @@ class hybridmethod(object):
         raise AttributeError("Cannot overwrite method")
 
 
+def hasattr_static(obj, attr):
+    """Returns if `inspect.getattr_static` can find an attribute of ``obj``."""
+    try:
+        inspect.getattr_static(obj, attr)
+    except AttributeError:
+        return False
+    else:
+        return True
+
+
 def spawn(*args, **kwargs):
     """Start and return daemon thread. ``spawn(func, *args, **kwargs)``."""
     func, args = args[0], args[1:]
diff -pruN 5.1.0-1/rpyc/utils/authenticators.py 5.2.1-1/rpyc/utils/authenticators.py
--- 5.1.0-1/rpyc/utils/authenticators.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/rpyc/utils/authenticators.py	2022-07-31 00:27:15.000000000 +0000
@@ -35,7 +35,7 @@ class AuthenticationError(Exception):
 
 class SSLAuthenticator(object):
     """An implementation of the authenticator protocol for ``SSL``. The given
-    socket is wrapped by ``ssl.wrap_socket`` and is validated based on
+    socket is wrapped by ``ssl.SSLContext.wrap_socket`` and is validated based on
     certificates
 
     :param keyfile: the server's key file
@@ -48,7 +48,7 @@ class SSLAuthenticator(object):
                     to restrict the available ciphers. New in Python 2.7/3.2
     :param ssl_version: the SSL version to use
 
-    Refer to `ssl.wrap_socket <http://docs.python.org/dev/library/ssl.html#ssl.wrap_socket>`_
+    Refer to `ssl.SSLContext <http://docs.python.org/dev/library/ssl.html#ssl.SSLContext>`_
     for more info.
 
     Clients can connect to this authenticator using
@@ -70,19 +70,22 @@ class SSLAuthenticator(object):
                 self.cert_reqs = ssl.CERT_NONE
         else:
             self.cert_reqs = cert_reqs
-        if ssl_version is None:
-            self.ssl_version = ssl.PROTOCOL_TLS
-        else:
-            self.ssl_version = ssl_version
+        self.ssl_version = ssl_version
 
     def __call__(self, sock):
-        kwargs = dict(keyfile=self.keyfile, certfile=self.certfile,
-                      server_side=True, ca_certs=self.ca_certs, cert_reqs=self.cert_reqs,
-                      ssl_version=self.ssl_version)
-        if self.ciphers is not None:
-            kwargs["ciphers"] = self.ciphers
         try:
-            sock2 = ssl.wrap_socket(sock, **kwargs)
+            if self.ssl_version is None:
+                context = ssl.create_default_context(purpose=ssl.Purpose.CLIENT_AUTH)
+            else:
+                context = ssl.SSLContext(self.ssl_version)
+            context.load_cert_chain(self.certfile, keyfile=self.keyfile)
+            if self.ca_certs is not None:
+                context.load_verify_locations(self.ca_certs)
+            if self.ciphers is not None:
+                context.set_ciphers(self.ciphers)
+            if self.cert_reqs is not None:
+                context.verify_mode = self.cert_reqs
+            sock2 = context.wrap_socket(sock, server_side=True)
         except ssl.SSLError:
             ex = sys.exc_info()[1]
             raise AuthenticationError(str(ex))
diff -pruN 5.1.0-1/rpyc/utils/classic.py 5.2.1-1/rpyc/utils/classic.py
--- 5.1.0-1/rpyc/utils/classic.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/rpyc/utils/classic.py	2022-07-31 00:27:15.000000000 +0000
@@ -97,17 +97,20 @@ def ssl_connect(host, port=DEFAULT_SERVE
     :param port: the TCP port to use
     :param ipv6: whether to create an IPv6 socket or an IPv4 one
 
-    The following arguments are passed directly to
-    `ssl.wrap_socket <http://docs.python.org/dev/library/ssl.html#ssl.wrap_socket>`_:
+    The following arguments are passed to
+    `ssl.SSLContext <http://docs.python.org/dev/library/ssl.html#ssl.SSLContext>`_ and
+    its corresponding methods:
 
-    :param keyfile: see ``ssl.wrap_socket``. May be ``None``
-    :param certfile: see ``ssl.wrap_socket``. May be ``None``
-    :param ca_certs: see ``ssl.wrap_socket``. May be ``None``
-    :param cert_reqs: see ``ssl.wrap_socket``. By default, if ``ca_cert`` is specified,
-                      the requirement is set to ``CERT_REQUIRED``; otherwise it is
-                      set to ``CERT_NONE``
-    :param ssl_version: see ``ssl.wrap_socket``. The default is ``PROTOCOL_TLSv1``
-    :param ciphers: see ``ssl.wrap_socket``. May be ``None``. New in Python 2.7/3.2
+    :param keyfile: see ``ssl.SSLContext.load_cert_chain``. May be ``None``
+    :param certfile: see ``ssl.SSLContext.load_cert_chain``. May be ``None``
+    :param ca_certs: see ``ssl.SSLContext.load_verify_locations``. May be ``None``
+    :param cert_reqs: see ``ssl.SSLContext.verify_mode``. By default, if ``ca_cert`` is
+                      specified, the requirement is set to ``CERT_REQUIRED``; otherwise
+                      it is set to ``CERT_NONE``
+    :param ssl_version: see ``ssl.SSLContext``. The default is defined by
+                        ``ssl.create_default_context``
+    :param ciphers: see ``ssl.SSLContext.set_ciphers``. May be ``None``. New in
+                    Python 2.7/3.2
 
     :returns: an RPyC connection exposing ``SlaveService``
 
diff -pruN 5.1.0-1/rpyc/utils/factory.py 5.2.1-1/rpyc/utils/factory.py
--- 5.1.0-1/rpyc/utils/factory.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/rpyc/utils/factory.py	2022-07-31 00:27:15.000000000 +0000
@@ -119,7 +119,7 @@ def unix_connect(path, service=VoidServi
 
 def ssl_connect(host, port, keyfile=None, certfile=None, ca_certs=None,
                 cert_reqs=None, ssl_version=None, ciphers=None,
-                service=VoidService, config={}, ipv6=False, keepalive=False):
+                service=VoidService, config={}, ipv6=False, keepalive=False, verify_mode=None):
     """
     creates an SSL-wrapped connection to the given host (encrypted and
     authenticated).
@@ -131,17 +131,17 @@ def ssl_connect(host, port, keyfile=None
     :param ipv6: whether to create an IPv6 socket or an IPv4 one(defaults to ``False``)
     :param keepalive: whether to set TCP keepalive on the socket (defaults to ``False``)
 
-    The following arguments are passed directly to
-    `ssl.wrap_socket <http://docs.python.org/dev/library/ssl.html#ssl.wrap_socket>`_:
-
-    :param keyfile: see ``ssl.wrap_socket``. May be ``None``
-    :param certfile: see ``ssl.wrap_socket``. May be ``None``
-    :param ca_certs: see ``ssl.wrap_socket``. May be ``None``
-    :param cert_reqs: see ``ssl.wrap_socket``. By default, if ``ca_cert`` is specified,
-                      the requirement is set to ``CERT_REQUIRED``; otherwise it is
-                      set to ``CERT_NONE``
-    :param ssl_version: see ``ssl.wrap_socket``. The default is ``PROTOCOL_TLSv1``
-    :param ciphers: see ``ssl.wrap_socket``. May be ``None``. New in Python 2.7/3.2
+    :param keyfile: see ``ssl.SSLContext.load_cert_chain``. May be ``None``
+    :param certfile: see ``ssl.SSLContext.load_cert_chain``. May be ``None``
+    :param ca_certs: see ``ssl.SSLContext.load_verify_locations``. May be ``None``
+    :param cert_reqs: see ``ssl.SSLContext.verify_mode``. By default, if ``ca_cert`` is
+                      specified, the requirement is set to ``CERT_REQUIRED``; otherwise
+                      it is set to ``CERT_NONE``
+    :param ssl_version: see ``ssl.SSLContext``. The default is defined by
+                        ``ssl.create_default_context``
+    :param ciphers: see ``ssl.SSLContext.set_ciphers``. May be ``None``. New in
+                    Python 2.7/3.2
+    :param verify_mode: see ``ssl.SSLContext.verify_mode``
 
     :returns: an RPyC connection
     """
@@ -150,14 +150,18 @@ def ssl_connect(host, port, keyfile=None
         ssl_kwargs["keyfile"] = keyfile
     if certfile is not None:
         ssl_kwargs["certfile"] = certfile
+    if verify_mode is not None:
+        ssl_kwargs["cert_reqs"] = verify_mode
+    else:
+        ssl_kwargs["cert_reqs"] = ssl.CERT_NONE
     if ca_certs is not None:
         ssl_kwargs["ca_certs"] = ca_certs
         ssl_kwargs["cert_reqs"] = ssl.CERT_REQUIRED
     if cert_reqs is not None:
         ssl_kwargs["cert_reqs"] = cert_reqs
-    if ssl_version is None:
-        ssl_kwargs["ssl_version"] = ssl.PROTOCOL_TLSv1
-    else:
+    elif cert_reqs != ssl.CERT_NONE:
+        ssl_kwargs["check_hostname"] = False
+    if ssl_version is not None:
         ssl_kwargs["ssl_version"] = ssl_version
     if ciphers is not None:
         ssl_kwargs["ciphers"] = ciphers
@@ -229,11 +233,11 @@ def discover(service_name, host=None, re
     return addrs
 
 
-def list_services(registrar=None, timeout=2):
+def list_services(registrar=None, filter_host=None, timeout=2):
     services = ()
     if registrar is None:
         registrar = UDPRegistryClient(timeout=timeout)
-    services = registrar.list()
+    services = registrar.list(filter_host)
     if services is None:
         raise ForbiddenError("Registry doesn't allow listing")
     return services
diff -pruN 5.1.0-1/rpyc/utils/helpers.py 5.2.1-1/rpyc/utils/helpers.py
--- 5.1.0-1/rpyc/utils/helpers.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/rpyc/utils/helpers.py	2022-07-31 00:27:15.000000000 +0000
@@ -211,10 +211,12 @@ class BgServingThread(object):
     SERVE_INTERVAL = 0.0
     SLEEP_INTERVAL = 0.1
 
-    def __init__(self, conn, callback=None):
+    def __init__(self, conn, callback=None, serve_interval=SERVE_INTERVAL, sleep_interval=SLEEP_INTERVAL):
         self._conn = conn
         self._active = True
         self._callback = callback
+        self._serve_interval = serve_interval
+        self._sleep_interval = sleep_interval
         self._thread = spawn(self._bg_server)
 
     def __del__(self):
@@ -224,8 +226,8 @@ class BgServingThread(object):
     def _bg_server(self):
         try:
             while self._active:
-                self._conn.serve(self.SERVE_INTERVAL)
-                time.sleep(self.SLEEP_INTERVAL)  # to reduce contention
+                self._conn.serve(self._serve_interval)
+                time.sleep(self._sleep_interval)  # to reduce contention
         except Exception:
             if self._active:
                 self._active = False
diff -pruN 5.1.0-1/rpyc/utils/__init__.py 5.2.1-1/rpyc/utils/__init__.py
--- 5.1.0-1/rpyc/utils/__init__.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/rpyc/utils/__init__.py	2022-07-31 00:27:15.000000000 +0000
@@ -1,3 +1,41 @@
 """
 Utilities (not part of the core protocol)
 """
+import functools
+import inspect
+from rpyc.core import DEFAULT_CONFIG
+
+
+def service(cls):
+    """find and rename exposed decorated attributes"""
+    # NOTE: inspect.getmembers invokes getattr for each attribute-name. Descriptors may raise AttributeError.
+    # Only the AttributeError exception is caught when raised. This decorator will if a descriptor raises
+    # any exception other than AttributeError when getattr is called.
+    for attr_name, attr_obj in inspect.getmembers(cls):  # rebind exposed decorated attributes
+        exposed_prefix = getattr(attr_obj, '__exposed__', False)
+        if exposed_prefix and not inspect.iscode(attr_obj):  # exclude the implementation
+            renamed = exposed_prefix + attr_name
+            if inspect.isclass(attr_obj):  # recurse exposed objects such as a class
+                attr_obj = service(attr_obj)
+            setattr(cls, attr_name, attr_obj)
+            setattr(cls, renamed, attr_obj)
+    return cls
+
+
+def exposed(arg):
+    """decorator that adds the exposed prefix information to functions which `service` uses to rebind attrs"""
+    exposed_prefix = DEFAULT_CONFIG['exposed_prefix']
+    if isinstance(arg, str):
+        # When the arg is a string (i.e. `@rpyc.exposed("customPrefix_")`) the prefix
+        # is partially evaluated into the wrapper. The function returned is "frozen" and used as a decorator.
+        return functools.partial(_wrapper, arg)
+    elif hasattr(arg, '__call__') or hasattr(arg, '__get__'):
+        # When the arg is callable (i.e. `@rpyc.exposed`) then use default prefix and invoke
+        return _wrapper(exposed_prefix, arg)
+    else:
+        raise TypeError('rpyc.exposed expects a callable object, descriptor, or string')
+
+
+def _wrapper(exposed_prefix, exposed_obj):
+    exposed_obj.__exposed__ = exposed_prefix
+    return exposed_obj
diff -pruN 5.1.0-1/rpyc/utils/registry.py 5.2.1-1/rpyc/utils/registry.py
--- 5.1.0-1/rpyc/utils/registry.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/rpyc/utils/registry.py	2022-07-31 00:27:15.000000000 +0000
@@ -93,13 +93,21 @@ class RegistryServer(object):
         self.logger.debug(f"replying with {servers!r}")
         return tuple(servers)
 
-    def cmd_list(self, host):
+    def cmd_list(self, host, filter_host):
         """implementation for the ``list`` command"""
         self.logger.debug("querying for services list:")
         if not self.allow_listing:
             self.logger.debug("listing is disabled")
             return None
-        services = tuple(self.services.keys())
+        services = []
+        if filter_host[0]:
+            for serv in self.services.keys():
+                known_hosts = [h[0] for h in self.services[serv].keys()]
+                if filter_host[0] in known_hosts:
+                    services.append(serv)
+            services = tuple(services)
+        else:
+            services = tuple(self.services.keys())
         self.logger.debug(f"replying with {services}")
 
         return services
@@ -274,7 +282,7 @@ class RegistryClient(object):
         """
         raise NotImplementedError()
 
-    def list(self):
+    def list(self, filter_host=None):
         """
         Send a query for the full lists of exposed servers
         :returns: a list of `` service_name ``
@@ -349,18 +357,18 @@ class UDPRegistryClient(RegistryClient):
                 servers = brine.load(data)
         return servers
 
-    def list(self):
+    def list(self, filter_host=None):
         sock = socket.socket(self.sock_family, socket.SOCK_DGRAM)
 
         with closing(sock):
             if self.bcast:
                 sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, True)
-            data = brine.dump(("RPYC", "LIST", ()))
+            data = brine.dump(("RPYC", "LIST", ((filter_host,),)))
             sock.sendto(data, (self.ip, self.port))
             sock.settimeout(self.timeout)
 
             try:
-                data, _ = sock.recvfrom(MAX_DGRAM_SIZE)
+                data, _ = sock.recvfrom(MAX_DGRAM_SIZE * 10)
             except (socket.error, socket.timeout):
                 services = ()
             else:
@@ -446,11 +454,11 @@ class TCPRegistryClient(RegistryClient):
                 servers = brine.load(data)
         return servers
 
-    def list(self):
+    def list(self, filter_host=None):
         sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
         with closing(sock):
             sock.settimeout(self.timeout)
-            data = brine.dump(("RPYC", "LIST", ()))
+            data = brine.dump(("RPYC", "LIST", ((filter_host,),)))
             sock.connect((self.ip, self.port))
             sock.send(data)
 
diff -pruN 5.1.0-1/rpyc/utils/zerodeploy.py 5.2.1-1/rpyc/utils/zerodeploy.py
--- 5.1.0-1/rpyc/utils/zerodeploy.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/rpyc/utils/zerodeploy.py	2022-07-31 00:27:15.000000000 +0000
@@ -4,6 +4,7 @@
 Requires [plumbum](http://plumbum.readthedocs.org/)
 """
 from __future__ import with_statement
+from subprocess import TimeoutExpired
 import sys
 import socket  # noqa: F401
 from rpyc.lib.compat import BYTES_LITERAL
@@ -151,27 +152,36 @@ class DeployedServer(object):
     def __exit__(self, t, v, tb):
         self.close()
 
-    def close(self):
+    def close(self, timeout=None):
         if self.proc is not None:
             try:
                 self.proc.terminate()
-                self.proc.communicate()
+                self.proc.communicate(timeout=timeout)
+            except TimeoutExpired:
+                self.proc.kill()
+                raise
             except Exception:
                 pass
             self.proc = None
         if self.tun is not None:
             try:
                 self.tun._session.proc.terminate()
-                self.tun._session.proc.communicate()
+                self.tun._session.proc.communicate(timeout=timeout)
                 self.tun.close()
+            except TimeoutExpired:
+                self.tun._session.proc.kill()
+                raise
             except Exception:
                 pass
             self.tun = None
         if self.remote_machine is not None:
             try:
                 self.remote_machine._session.proc.terminate()
-                self.remote_machine._session.proc.communicate()
+                self.remote_machine._session.proc.communicate(timeout=timeout)
                 self.remote_machine.close()
+            except TimeoutExpired:
+                self.remote_machine._session.proc.kill()
+                raise
             except Exception:
                 pass
             self.remote_machine = None
diff -pruN 5.1.0-1/rpyc/version.py 5.2.1-1/rpyc/version.py
--- 5.1.0-1/rpyc/version.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/rpyc/version.py	2022-07-31 00:27:15.000000000 +0000
@@ -1,3 +1,3 @@
-version = (5, 1, 0)
-version_string = ".".join(map(str, version))
-release_date = "2022-02-26"
+__version__ = '5.2.1'
+version = tuple(__version__.split('.'))
+release_date = "2022-07-30"
diff -pruN 5.1.0-1/setup.cfg 5.2.1-1/setup.cfg
--- 5.1.0-1/setup.cfg	1970-01-01 00:00:00.000000000 +0000
+++ 5.2.1-1/setup.cfg	2022-07-31 00:27:15.000000000 +0000
@@ -0,0 +1,4 @@
+[pep8]
+max-line-length=120
+[flake8]
+max-line-length=120
diff -pruN 5.1.0-1/setup.py 5.2.1-1/setup.py
--- 5.1.0-1/setup.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/setup.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,69 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-import sys
-import os
-
-try:
-    from setuptools import setup
-except ImportError:
-    from distutils.core import setup
-
-here = os.path.dirname(__file__)
-exec(open(os.path.join(here, 'rpyc', 'version.py')).read())
-
-setup(name="rpyc",
-      version=version_string,  # @UndefinedVariable
-      description="Remote Python Call (RPyC), a transparent and symmetric RPC library",
-      author="Tomer Filiba",
-      author_email="tomerfiliba@gmail.com",
-      maintainer="James Stronz",
-      maintainer_email="james@network-perception.com",
-      license="MIT",
-      url="http://rpyc.readthedocs.org",
-      packages=[
-          'rpyc',
-          'rpyc.core',
-          'rpyc.lib',
-          'rpyc.utils',
-      ],
-      scripts=[
-          os.path.join("bin", "rpyc_classic.py"),
-          os.path.join("bin", "rpyc_registry.py"),
-      ],
-      tests_require=[],
-      test_suite='nose.collector',
-      install_requires=["plumbum"],
-      #    entry_points = dict(
-      #        console_scripts = [
-      #            "rpyc_classic = rpyc.scripts.rpyc_classic:main",
-      #            "rpyc_registry = rpyc.scretips.rpyc_registry:main",
-      #        ],
-      #    ),
-      platforms=["POSIX", "Windows"],
-      python_requires='>=3.6',
-      use_2to3=False,
-      zip_safe=False,
-      long_description=open(os.path.join(here, "README.rst"), "r").read(),
-      classifiers=[
-          "Development Status :: 5 - Production/Stable",
-          "Intended Audience :: Developers",
-          "Intended Audience :: System Administrators",
-          "License :: OSI Approved :: MIT License",
-          "Operating System :: OS Independent",
-          "Programming Language :: Python :: 3",
-          "Programming Language :: Python :: 3.6",
-          "Programming Language :: Python :: 3.7",
-          "Programming Language :: Python :: 3.8",
-          "Programming Language :: Python :: 3.9",
-          "Programming Language :: Python :: 3.10",
-          "Topic :: Internet",
-          "Topic :: Software Development :: Libraries :: Python Modules",
-          "Topic :: Software Development :: Object Brokering",
-          "Topic :: Software Development :: Testing",
-          "Topic :: System :: Clustering",
-          "Topic :: System :: Distributed Computing",
-          "Topic :: System :: Monitoring",
-          "Topic :: System :: Networking",
-          "Topic :: System :: Systems Administration",
-      ],
-      )
diff -pruN 5.1.0-1/tests/support.py 5.2.1-1/tests/support.py
--- 5.1.0-1/tests/support.py	1970-01-01 00:00:00.000000000 +0000
+++ 5.2.1-1/tests/support.py	2022-07-31 00:27:15.000000000 +0000
@@ -0,0 +1,45 @@
+"""Supporting functions for unit tests
+
+The core logic of the functions `_ignore_deprecated_imports` and `import_module` is from the cpython code base:
+- https://github.com/python/cpython/blob/da576e08296490e94924421af71001bcfbccb317/Lib/test/support/import_helper.py
+"""
+import warnings
+import sys
+import contextlib
+import unittest
+
+
+@contextlib.contextmanager
+def _ignore_deprecated_imports(ignore=True):
+    """Context manager to suppress package and module deprecation
+    warnings when importing them.
+    If ignore is False, this context manager has no effect.
+    """
+    if ignore:
+        with warnings.catch_warnings():
+            warnings.filterwarnings("ignore", ".+ (module|package)",
+                                    DeprecationWarning)
+            yield
+    else:
+        yield
+
+
+def import_module(name, deprecated=False, *, required_on=(), fromlist=()):
+    """Import and return the module to be tested, raising SkipTest if
+    it is not available.
+    If deprecated is True, any module or package deprecation messages
+    will be suppressed. If a module is required on a platform but optional for
+    others, set required_on to an iterable of platform prefixes which will be
+    compared against sys.platform.
+    """
+    with _ignore_deprecated_imports(deprecated):
+        try:
+            module = __import__(name, fromlist=fromlist)
+            for a in fromlist:
+                if not hasattr(module, a):
+                    raise ImportError(f"cannot import name '{a}' from '{name}'")
+            return module
+        except ImportError as msg:
+            if sys.platform.startswith(tuple(required_on)):
+                raise
+            raise unittest.SkipTest(str(msg))
diff -pruN 5.1.0-1/tests/test_affinity.py 5.2.1-1/tests/test_affinity.py
--- 5.1.0-1/tests/test_affinity.py	1970-01-01 00:00:00.000000000 +0000
+++ 5.2.1-1/tests/test_affinity.py	2022-07-31 00:27:15.000000000 +0000
@@ -0,0 +1,77 @@
+import sys
+import time
+import unittest
+import support
+import rpyc
+
+
+class Test_Affinity(unittest.TestCase):
+    """To find race conditions we vary processor affinity (CPU pinning) settings.
+
+    GIL tends to context switch more frequently when more CPU cores are available. By running binding this PID
+    to one CPU core, more ticks will occur between each context switch. Increasing the number of CPU cores we are bound to
+    will run be able to test RPyC with more frequent context switching. The aim is to find contention between threads for
+    the socket that result in undesired behavior (e.g. a reply never making it to the right thread).
+
+    Python Thread Visualization: http://www.dabeaz.com/GIL/gilvis/fourthread.html
+    """
+    @classmethod
+    def setUpClass(cls):
+        """Construct the a copy of ClassicServer that embeds a sleep(0) into _dispatch and set affinity"""
+        cls._orig_func = rpyc.core.protocol.Connection._dispatch
+
+        def _sleepy_dispatch(self, data):
+            time.sleep(0.0)
+            return cls._orig_func(self, data)
+        setattr(rpyc.core.protocol.Connection, '_dispatch', _sleepy_dispatch)
+        cls.cfg = {'sync_request_timeout': 5}
+        if sys.platform != "linux":
+            print("Running Test_Affinity is less productive on non-linux systems...")
+        try:
+            cls._skip = None
+            cls._os = None
+            cls._supported = True
+            cls._os = support.import_module('os', fromlist=('sched_setaffinity', 'sched_getaffinity'))
+            cls._orig_affinity = cls._os.sched_getaffinity(0)
+        except unittest.SkipTest as skip:
+            cls._skip = skip
+            cls._supported = False
+            cls._orig_affinity = None
+
+    @classmethod
+    def tearDownClass(cls):
+        setattr(rpyc.core.protocol.Connection, '_dispatch', cls._orig_func)
+
+    def setUp(self):
+        self._os.sched_setaffinity(0, {0, })
+        self.conn = rpyc.connect_thread(rpyc.ClassicService, self.cfg, rpyc.ClassicService, self.cfg)
+        self.bg_threads = [rpyc.BgServingThread(self.conn) for i in range(3)]
+
+    def tearDown(self):
+        for t in self.bg_threads:
+            t.stop()
+        self.bg_threads = []
+        self.conn.close()
+        self.conn = None
+        self._reset_affinity()
+
+    def _time_execute_sleep(self):
+        """returns time to execute 0.3s worth of sleeping"""
+        t0 = time.time()
+        self.conn.execute("import time")
+        for p in (0, 0.1, 0.2):
+            self.conn.execute(f"time.sleep({p})")
+        return time.time() - t0
+
+    def _reset_affinity(self):
+        if self._os is not None:
+            return self._os.sched_setaffinity(0, self._orig_affinity)
+
+    def test_pinned_to_0(self):
+        """test behavior with processor affinity set such that this process is pinned to 0"""
+        if self._skip:
+            raise self._skip
+        max_elapsed_time = self.cfg['sync_request_timeout']
+        elapsed_time = self._time_execute_sleep()
+        self.assertLess(elapsed_time, max_elapsed_time)
+        self.assertIn('count=0', repr(self.conn._recvlock))
diff -pruN 5.1.0-1/tests/test_attr_access.py 5.2.1-1/tests/test_attr_access.py
--- 5.1.0-1/tests/test_attr_access.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/tests/test_attr_access.py	2022-07-31 00:27:15.000000000 +0000
@@ -196,25 +196,58 @@ class TestConfigAllows(unittest.TestCase
         self.assertEqual(obj.exposed_foobar(), "Fee Fie Foe Foo")
         self.assertRaises(AttributeError, lambda: obj._privy)
 
-#    def test_type_protector(self):
-#        obj = self.conn.root.get_two()
-#        assert obj.baba() == "baba"
-#        try:
-#            obj.gaga()
-#        except AttributeError:
-#            pass
-#        else:
-#            assert False, "expected an attribute error!"
-#        obj2 = obj.lala()
-#        assert obj2.foo() == "foo"
-#        assert obj2.spam() == "spam"
-#        try:
-#            obj.bar()
-#        except AttributeError:
-#            pass
-#        else:
-#            assert False, "expected an attribute error!"
-#
+
+class MyDescriptor1(object):
+    def __get__(self, instance, owner=None):
+        raise AttributeError("abcd")
+
+
+class MyDescriptor2(object):
+    def __get__(self, instance, owner=None):
+        if instance is None:
+            return self
+        else:
+            raise RuntimeError("efgh")
+
+
+@rpyc.service
+class MyDecoratedService(rpyc.Service):
+    desc_1 = rpyc.exposed(MyDescriptor1())
+    exposed_desc_2 = MyDescriptor2()
+
+
+class TestDescriptorErrors(unittest.TestCase):
+    """Validate stack traces are consistent independent of how exposed attribute is accessed #478 #479"""
+    def setUp(self):
+        self.cfg = copy.copy(rpyc.core.protocol.DEFAULT_CONFIG)
+        self.server = ThreadedServer(MyDecoratedService(), port=0)
+        self.thd = self.server._start_in_thread()
+        self.conn = rpyc.connect("localhost", self.server.port)
+
+    def tearDown(self):
+        self.conn.close()
+        while self.server.clients:
+            pass
+        self.server.close()
+        self.thd.join()
+
+    def test_default_config(self):
+        root = self.conn.root
+        self.assertRaisesRegex(AttributeError, "abcd", lambda: root.exposed_desc_1)
+        self.assertRaisesRegex(AttributeError, "abcd", lambda: root.desc_1)
+        self.assertRaisesRegex(RuntimeError, "efgh", lambda: root.exposed_desc_2)
+        self.assertRaisesRegex(RuntimeError, "efgh", lambda: root.desc_2)
+
+    def test_allow_all(self):
+        self.cfg['allow_all_attrs'] = True
+        self.conn.close()
+        self.server.protocol_config.update(self.cfg)
+        self.conn = rpyc.connect("localhost", self.server.port)
+        root = self.conn.root
+        self.assertRaisesRegex(AttributeError, "abcd", lambda: root.exposed_desc_1)
+        self.assertRaisesRegex(AttributeError, "abcd", lambda: root.desc_1)
+        self.assertRaisesRegex(RuntimeError, "efgh", lambda: root.exposed_desc_2)
+        self.assertRaisesRegex(RuntimeError, "efgh", lambda: root.desc_2)
 
 
 if __name__ == "__main__":
diff -pruN 5.1.0-1/tests/test_custom_service.py 5.2.1-1/tests/test_custom_service.py
--- 5.1.0-1/tests/test_custom_service.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/tests/test_custom_service.py	2022-07-31 00:27:15.000000000 +0000
@@ -19,6 +19,7 @@ if not isinstance(MyMeta, MyMeta):
     MyClass = MyMeta(MyClass.__name__, MyClass.__bases__, dict(MyClass.__dict__))
 
 
+@rpyc.service
 class MyService(rpyc.Service):
     on_connect_called = False
     on_disconnect_called = False
@@ -50,6 +51,24 @@ class MyService(rpyc.Service):
     def exposed_instance(self, inst, cls):
         return isinstance(inst, cls)
 
+    @rpyc.exposed
+    class MyClass(object):
+        def __init__(self, a, b):
+            self.a = a
+            self.b = b
+
+        @rpyc.exposed
+        def foo(self):
+            return self.a + self.b
+
+    @rpyc.exposed
+    def get_decorated(self):
+        return "decorated"
+
+    @rpyc.exposed('prefix_')
+    def get_decorated_prefix(self):
+        return "decorated_prefix"
+
 
 def before_closed(root):
     root.on_about_to_close()
@@ -61,9 +80,13 @@ class TestCustomService(unittest.TestCas
     def setUp(self):
         self.service = MyService()
         client_config = {"before_closed": before_closed, "close_catchall": False}
-        self.conn = rpyc.connect_thread( remote_service=self.service, config=client_config)
-
+        prefixed_client_config = {'exposed_prefix': 'prefix_'}
+        self.conn = rpyc.connect_thread(remote_service=self.service, config=client_config)
+        self.prefixed_conn = rpyc.connect_thread(remote_service=self.service,
+                                                 config=prefixed_client_config,
+                                                 remote_config=prefixed_client_config)
         self.conn.root  # this will block until the service is initialized,
+        self.prefixed_conn.root  # this will block until the service is initialized,
         # so we can be sure on_connect_called is True by that time
         self.assertTrue(self.service.on_connect_called)
 
@@ -92,6 +115,14 @@ class TestCustomService(unittest.TestCas
         self.conn.root.exposed_getlist
         # this is not an exposed attribute:
         self.assertRaises(AttributeError, lambda: self.conn.root.foobar())
+        # methods exposed using decorator
+        self.conn.root.get_decorated
+        self.conn.root.exposed_get_decorated
+        self.prefixed_conn.root.get_decorated_prefix
+        self.prefixed_conn.root.prefix_get_decorated_prefix
+        self.assertFalse(hasattr(self.conn.root, 'get_decorated_prefix'))
+        smc = self.conn.root.MyClass('a', 'b')
+        self.assertEquals(smc.foo(), 'ab')
 
     def test_safeattrs(self):
         x = self.conn.root.getlist()
diff -pruN 5.1.0-1/tests/test_deploy.py 5.2.1-1/tests/test_deploy.py
--- 5.1.0-1/tests/test_deploy.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/tests/test_deploy.py	2022-07-31 00:27:15.000000000 +0000
@@ -1,6 +1,9 @@
 from __future__ import with_statement
+
 import unittest
+import subprocess
 import sys
+
 from plumbum import SshMachine
 from plumbum.machines.paramiko_machine import ParamikoMachine
 from rpyc.utils.zerodeploy import DeployedServer
@@ -11,7 +14,6 @@ except Exception:
     _paramiko_import_failed = True
 
 
-@unittest.skipIf(_paramiko_import_failed, "Paramiko is not available")
 class TestDeploy(unittest.TestCase):
     def test_deploy(self):
         rem = SshMachine("localhost")
@@ -30,6 +32,51 @@ class TestDeploy(unittest.TestCase):
             self.fail("expected an EOFError")
         rem.close()
 
+    def test_close_timeout(self):
+        expected_timeout = 4
+        observed_timeouts = []
+        original_communicate = subprocess.Popen.communicate
+
+        def replacement_communicate(self, input=None, timeout=None):
+            observed_timeouts.append(timeout)
+            return original_communicate(self, input, timeout)
+
+        try:
+            subprocess.Popen.communicate = replacement_communicate
+            rem = SshMachine("localhost")
+            SshMachine.python = rem[sys.executable]
+            dep = DeployedServer(rem)
+            dep.classic_connect()
+            dep.close(timeout=expected_timeout)
+            rem.close()
+        finally:
+            subprocess.Popen.communicate = original_communicate
+        # The last three calls to communicate() happen during close(), so check they
+        # applied the timeout.
+        assert observed_timeouts[-3:] == [expected_timeout] * 3
+
+    def test_close_timeout_default_none(self):
+        observed_timeouts = []
+        original_communicate = subprocess.Popen.communicate
+
+        def replacement_communicate(self, input=None, timeout=None):
+            observed_timeouts.append(timeout)
+            return original_communicate(self, input, timeout)
+
+        try:
+            subprocess.Popen.communicate = replacement_communicate
+            rem = SshMachine("localhost")
+            SshMachine.python = rem[sys.executable]
+            dep = DeployedServer(rem)
+            dep.classic_connect()
+            dep.close()
+            rem.close()
+        finally:
+            subprocess.Popen.communicate = original_communicate
+        # No timeout specified, so Popen.communicate should have been called with timeout None.
+        assert observed_timeouts == [None] * len(observed_timeouts)
+
+    @unittest.skipIf(_paramiko_import_failed, "Paramiko is not available")
     def test_deploy_paramiko(self):
         rem = ParamikoMachine("localhost", missing_host_policy=paramiko.AutoAddPolicy())
         with DeployedServer(rem) as dep:
diff -pruN 5.1.0-1/tests/test_gdb.py 5.2.1-1/tests/test_gdb.py
--- 5.1.0-1/tests/test_gdb.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/tests/test_gdb.py	2022-07-31 00:27:15.000000000 +0000
@@ -1,8 +1,10 @@
 import pathlib
 import rpyc
 import subprocess
+import sys
 import tempfile
 import unittest
+import os
 from rpyc.utils.server import ThreadedServer
 from shutil import which
 
@@ -13,9 +15,12 @@ class ParentGDB(rpyc.Service):
     def on_connect(self, conn):
         tests_path = pathlib.Path(__file__).resolve().parent
         gdb_cmd = ['gdb', '-q', '-x', pathlib.Path(tests_path, 'gdb_service.py')]
-        self._proc = subprocess.Popen(gdb_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        env = os.environ.copy()
+        env['PYTHONPATH'] = ':'.join(sys.path)
+        self._proc = subprocess.Popen(gdb_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
         stdout = self._proc.stdout.readline()
         self._gdb_svc_port = int(stdout.strip().decode())
+        print(self._gdb_svc_port)
         self.gdb_svc_conn = rpyc.connect(host='localhost', port=self._gdb_svc_port)
 
     def on_disconnect(self, conn):
@@ -50,10 +55,15 @@ class Test_GDB(unittest.TestCase):
             pass
 
     def test_gdb(self):
+        print(0)
         parent_gdb_conn = rpyc.connect(host='localhost', port=18878)
+        print(1)
         gdb = parent_gdb_conn.root.get_gdb()
+        print(2)
         gdb.execute('file {}'.format(self.a_out))
+        print(3)
         disasm = gdb.execute('disassemble main', to_string=True)
+        print(4)
         self.assertIn('End of assembler dump', disasm)
         parent_gdb_conn.close()
 
diff -pruN 5.1.0-1/tests/test_registry.py 5.2.1-1/tests/test_registry.py
--- 5.1.0-1/tests/test_registry.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/tests/test_registry.py	2022-07-31 00:27:15.000000000 +0000
@@ -58,6 +58,23 @@ class BaseRegistryTest(object):
         res = c.discover("BAR")
         self.assertEqual(res, ())
 
+    def test_listing(self):
+        c = self._get_client()
+        c.logger.quiet = True
+
+        c.register(("FOO",), 12345)
+        c.register(("BAR", ), 54321, interface='127.0.0.2')
+        host_ip = c.discover("FOO")[0][0]
+
+        # test basic listing
+        res = c.list()
+        expected = ("FOO", "BAR")
+        self.assertEqual(set(p for p in res), set(expected))
+
+        # test listing with filter
+        res = c.list(filter_host=host_ip)
+        expected = ("FOO",)
+        self.assertEqual(set(res), set(expected))
 
 class TestTcpRegistry(BaseRegistryTest, unittest.TestCase):
     def _get_server(self):
diff -pruN 5.1.0-1/tests/test_remote_exception.py 5.2.1-1/tests/test_remote_exception.py
--- 5.1.0-1/tests/test_remote_exception.py	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/tests/test_remote_exception.py	2022-07-31 00:27:15.000000000 +0000
@@ -6,7 +6,7 @@ import unittest
 class MyService(rpyc.Service):
 
     def exposed_set_version(self):
-        rpyc.version.version_string = '1.0.0'
+        rpyc.version.__version__ = '1.0.0'
 
     def exposed_remote_assert(self, val):
         assert val
@@ -17,11 +17,11 @@ class TestRemoteException(unittest.TestC
         self.server = rpyc.utils.server.OneShotServer(MyService, port=0)
         self.server.logger.quiet = False
         self.server._start_in_thread()
-        self.original_version_string = rpyc.version.version_string
+        self.original_version_string = rpyc.version.__version__
         self.conn = rpyc.connect("localhost", port=self.server.port)
 
     def tearDown(self):
-        rpyc.version.version_string = self.original_version_string
+        rpyc.version.__version__ = self.original_version_string
         self.conn.close()
 
     def test_remote_exception(self):
diff -pruN 5.1.0-1/.travis.yml 5.2.1-1/.travis.yml
--- 5.1.0-1/.travis.yml	2022-02-26 23:28:26.000000000 +0000
+++ 5.2.1-1/.travis.yml	1970-01-01 00:00:00.000000000 +0000
@@ -1,37 +0,0 @@
-language: python
-
-dist: bionic
-
-# See: https://docs.travis-ci.com/user/languages/python/
-# and: https://docs.travis-ci.com/user/customizing-the-build/
-matrix:
-  include:
-    - {python: "3.6"}
-    - {python: "3.7"}
-    - {python: "3.8"}
-    - {python: "3.9"}
-    - {python: "3.10-dev"}
-    - {python: "nightly"}
-
-install:
-    - python setup.py install
-    # Install fails and historically has been skipped anyway
-    # - pip install gevent
-    # - pip install paramiko
-
-before_script:
-    - "echo NoHostAuthenticationForLocalhost yes >> ~/.ssh/config"
-    - "echo StrictHostKeyChecking no >> ~/.ssh/config"
-    - "ssh-keygen -q -f ~/.ssh/id_rsa -N ''"
-    - "cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys"
-    - "uname -a"
-    - "hostname"
-    - "cd tests"
-
-script:
-    - python -m unittest discover
-
-notifications:
-    email:
-        on_success: change
-        on_failure: change
